repo_name
stringlengths 5
92
| path
stringlengths 4
232
| copies
stringclasses 18
values | size
stringlengths 4
7
| content
stringlengths 736
1.04M
| license
stringclasses 15
values | hash
int64 -9,222,983,980,000,580,000
9,223,102,107B
| line_mean
float64 6.51
99.9
| line_max
int64 15
997
| alpha_frac
float64 0.25
0.97
| autogenerated
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|
ionitadaniel19/testframeworksevolution | src/tests/utests.py | 1 | 4794 | '''
Created on 01.06.2014
@author: ionitadaniel19
'''
import unittest
import traceback
import os
from config.utilities import load_browser_driver
from selenium.webdriver import FirefoxProfile
from selenium.webdriver import Firefox
from selenium.webdriver import Chrome
from selenium.webdriver import Ie
from config.constants import EXPECTED_ANSWER
import logging,time
class FrameworkTests(unittest.TestCase):
def __init__(self,test,browser_name,url,test_data=None):
super(FrameworkTests,self).__init__(test)
self.test=test
self.browser_name=browser_name
self.url=url
self.driver=None
if self.browser_name=='firefox':
ffp = FirefoxProfile()
ffp.update_preferences()
self.driver = Firefox(firefox_profile=ffp)
elif self.browser_name=='chrome':
chromedriver = load_browser_driver("chromedriver")
os.environ["webdriver.chrome.driver"] = chromedriver
self.driver=Chrome(chromedriver)
elif self.browser_name=='ie':
iedriver = load_browser_driver("IEDriverServer")
os.environ["webdriver.ie.driver"] = iedriver
self.driver=Ie(iedriver)
self.verification = []
self.verification.append("Test %s on browser %s" %(self.test,self.browser_name))
self.test_data=test_data
self.errors=[]
def setUp(self):
"""
set up data used in the tests.
setUp is called before each test function execution.
"""
self.driver.get(self.url)
time.sleep(5)
def tearDown(self):
"""
tearDown is called after all other test methods have been invoked.
"""
if self.driver:
try:
time.sleep(2)
self.driver.quit()
except:
print traceback.format_exc()
for item in self.verification:
logging.info(item)
for err in self.errors:
self.fail(err)
logging.error(item)
def test_recordplayback(self):
try:
self.verification.append('Test record and playback')
from linearframework.recordtests import show_answer_record
actual_answer=show_answer_record(self.driver)
self.assertEqual(actual_answer, EXPECTED_ANSWER, 'Actual answer incorrect:%s.Expected answer is:%s' %(actual_answer,EXPECTED_ANSWER))
except Exception,ex:
raise Exception('Test record playback failed with Exception:%s' %ex)
def test_modularframework(self):
try:
self.verification.append('Test modular driven framework')
from modularframework.modulartests import show_answer_modular
actual_answer=show_answer_modular(self.driver)
self.assertEqual(actual_answer, EXPECTED_ANSWER, 'Actual answer incorrect:%s.Expected answer is:%s' %(actual_answer,EXPECTED_ANSWER))
except Exception,ex:
raise Exception('Test modular failed with Exception:%s' %ex)
def test_dataframework(self):
try:
self.verification.append('Test data driven framework')
from datadrivenframework.datatests import show_answer_datadriven
actual_answer,expected_answer=show_answer_datadriven(self.driver,2)
self.assertEqual(actual_answer, expected_answer, 'Actual answer incorrect:%s.Expected answer is:%s' %(actual_answer,expected_answer))
except Exception,ex:
raise Exception('Test data driven failed with Exception:%s' %ex)
def test_keywordframework(self):
try:
self.verification.append('Test keyword driven framework')
from keydrivenframework.keytests import show_answer_keydriven
validate,actual_answer=show_answer_keydriven(self.driver,1)
if validate is False:
self.assertTrue(validate, 'Actual answer incorrect:%s'%actual_answer)
except Exception,ex:
raise Exception('Test keyword failed with Exception:%s.Traceback is %s' %(ex,traceback.format_exc()))
def test_hybridframework(self):
try:
self.verification.append('Test hybrid framework')
from hybridframework.hybridtests import show_answer_hybrid_simple
actual_answer=show_answer_hybrid_simple(self.driver,self.test_data)
self.assertEqual(actual_answer, EXPECTED_ANSWER, 'Actual answer incorrect:%s.Expected answer is:%s' %(actual_answer,EXPECTED_ANSWER))
except Exception,ex:
raise Exception('Test hybrid failed with Exception:%s' %ex)
| mit | 6,501,051,434,895,794,000 | 41.6 | 145 | 0.626617 | false |
jolynch/mit-tab | mittab/apps/tab/views.py | 1 | 14458 | from django.contrib.auth.decorators import permission_required
from django.contrib.auth import logout
from django.conf import settings
from django.http import HttpResponse, JsonResponse, Http404
from django.shortcuts import render, reverse, get_object_or_404
import yaml
from mittab.apps.tab.archive import ArchiveExporter
from mittab.apps.tab.forms import SchoolForm, RoomForm, UploadDataForm, ScratchForm, \
SettingsForm
from mittab.apps.tab.helpers import redirect_and_flash_error, \
redirect_and_flash_success
from mittab.apps.tab.models import *
from mittab.libs import cache_logic
from mittab.libs.tab_logic import TabFlags
from mittab.libs.data_import import import_judges, import_rooms, import_teams, \
import_scratches
def index(request):
number_teams = Team.objects.count()
number_judges = Judge.objects.count()
number_schools = School.objects.count()
number_debaters = Debater.objects.count()
number_rooms = Room.objects.count()
school_list = [(school.pk, school.name) for school in School.objects.all()]
judge_list = [(judge.pk, judge.name) for judge in Judge.objects.all()]
team_list = [(team.pk, team.display_backend) for team in Team.objects.all()]
debater_list = [(debater.pk, debater.display)
for debater in Debater.objects.all()]
room_list = [(room.pk, room.name) for room in Room.objects.all()]
return render(request, "common/index.html", locals())
def tab_logout(request, *args):
logout(request)
return redirect_and_flash_success(request,
"Successfully logged out",
path="/")
def render_403(request, *args, **kwargs):
response = render(request, "common/403.html")
response.status_code = 403
return response
def render_404(request, *args, **kwargs):
response = render(request, "common/404.html")
response.status_code = 404
return response
def render_500(request, *args, **kwargs):
response = render(request, "common/500.html")
response.status_code = 500
return response
#View for manually adding scratches
def add_scratch(request):
if request.method == "POST":
form = ScratchForm(request.POST)
if form.is_valid():
form.save()
return redirect_and_flash_success(request,
"Scratch created successfully")
else:
form = ScratchForm(initial={"scratch_type": 0})
return render(request, "common/data_entry.html", {
"title": "Adding Scratch",
"form": form
})
#### BEGIN SCHOOL ###
#Three views for entering, viewing, and editing schools
def view_schools(request):
#Get a list of (id,school_name) tuples
c_schools = [(s.pk, s.name, 0, "") for s in School.objects.all()]
return render(
request, "common/list_data.html", {
"item_type": "school",
"title": "Viewing All Schools",
"item_list": c_schools
})
def view_school(request, school_id):
school_id = int(school_id)
try:
school = School.objects.get(pk=school_id)
except School.DoesNotExist:
return redirect_and_flash_error(request, "School not found")
if request.method == "POST":
form = SchoolForm(request.POST, instance=school)
if form.is_valid():
try:
form.save()
except ValueError:
return redirect_and_flash_error(
request,
"School name cannot be validated, most likely a non-existent school"
)
return redirect_and_flash_success(
request, "School {} updated successfully".format(
form.cleaned_data["name"]))
else:
form = SchoolForm(instance=school)
links = [("/school/" + str(school_id) + "/delete/", "Delete")]
return render(
request, "common/data_entry.html", {
"form": form,
"links": links,
"title": "Viewing School: %s" % (school.name)
})
def enter_school(request):
if request.method == "POST":
form = SchoolForm(request.POST)
if form.is_valid():
try:
form.save()
except ValueError:
return redirect_and_flash_error(
request,
"School name cannot be validated, most likely a duplicate school"
)
return redirect_and_flash_success(
request,
"School {} created successfully".format(
form.cleaned_data["name"]),
path="/")
else:
form = SchoolForm()
return render(request, "common/data_entry.html", {
"form": form,
"title": "Create School"
})
@permission_required("tab.school.can_delete", login_url="/403/")
def delete_school(request, school_id):
error_msg = None
try:
school_id = int(school_id)
school = School.objects.get(pk=school_id)
school.delete()
except School.DoesNotExist:
error_msg = "That school does not exist"
except Exception as e:
error_msg = str(e)
if error_msg:
return redirect_and_flash_error(request, error_msg)
return redirect_and_flash_success(request,
"School deleted successfully",
path="/")
#### END SCHOOL ###
#### BEGIN ROOM ###
def view_rooms(request):
def flags(room):
result = 0
if room.rank == 0:
result |= TabFlags.ROOM_ZERO_RANK
else:
result |= TabFlags.ROOM_NON_ZERO_RANK
return result
all_flags = [[TabFlags.ROOM_ZERO_RANK, TabFlags.ROOM_NON_ZERO_RANK]]
all_rooms = [(room.pk, room.name, flags(room),
TabFlags.flags_to_symbols(flags(room)))
for room in Room.objects.all()]
filters, symbol_text = TabFlags.get_filters_and_symbols(all_flags)
return render(
request, "common/list_data.html", {
"item_type": "room",
"title": "Viewing All Rooms",
"item_list": all_rooms,
"symbol_text": symbol_text,
"filters": filters
})
def view_room(request, room_id):
room_id = int(room_id)
try:
room = Room.objects.get(pk=room_id)
except Room.DoesNotExist:
return redirect_and_flash_error(request, "Room not found")
if request.method == "POST":
form = RoomForm(request.POST, instance=room)
if form.is_valid():
try:
form.save()
except ValueError:
return redirect_and_flash_error(
request,
"Room name cannot be validated, most likely a non-existent room"
)
return redirect_and_flash_success(
request, "School {} updated successfully".format(
form.cleaned_data["name"]))
else:
form = RoomForm(instance=room)
return render(request, "common/data_entry.html", {
"form": form,
"links": [],
"title": "Viewing Room: %s" % (room.name)
})
def enter_room(request):
if request.method == "POST":
form = RoomForm(request.POST)
if form.is_valid():
try:
form.save()
except ValueError:
return redirect_and_flash_error(
request,
"Room name cannot be validated, most likely a duplicate room"
)
return redirect_and_flash_success(
request,
"Room {} created successfully".format(
form.cleaned_data["name"]),
path="/")
else:
form = RoomForm()
return render(request, "common/data_entry.html", {
"form": form,
"title": "Create Room"
})
def batch_checkin(request):
rooms_and_checkins = []
round_numbers = list([i + 1 for i in range(TabSettings.get("tot_rounds"))])
for room in Room.objects.all():
checkins = []
for round_number in [0] + round_numbers: # 0 is for outrounds
checkins.append(room.is_checked_in_for_round(round_number))
rooms_and_checkins.append((room, checkins))
return render(request, "tab/room_batch_checkin.html", {
"rooms_and_checkins": rooms_and_checkins,
"round_numbers": round_numbers
})
@permission_required("tab.tab_settings.can_change", login_url="/403")
def room_check_in(request, room_id, round_number):
room_id, round_number = int(room_id), int(round_number)
if round_number < 0 or round_number > TabSettings.get("tot_rounds"):
# 0 is so that outrounds don't throw an error
raise Http404("Round does not exist")
room = get_object_or_404(Room, pk=room_id)
if request.method == "POST":
if not room.is_checked_in_for_round(round_number):
check_in = RoomCheckIn(room=room, round_number=round_number)
check_in.save()
elif request.method == "DELETE":
if room.is_checked_in_for_round(round_number):
check_ins = RoomCheckIn.objects.filter(room=room,
round_number=round_number)
check_ins.delete()
else:
raise Http404("Must be POST or DELETE")
return JsonResponse({"success": True})
@permission_required("tab.scratch.can_delete", login_url="/403/")
def delete_scratch(request, item_id, scratch_id):
try:
scratch_id = int(scratch_id)
scratch = Scratch.objects.get(pk=scratch_id)
scratch.delete()
except Scratch.DoesNotExist:
return redirect_and_flash_error(
request,
"This scratch does not exist, please try again with a valid id.")
return redirect_and_flash_success(request,
"Scratch deleted successfully",
path="/")
def view_scratches(request):
# Get a list of (id,school_name) tuples
c_scratches = [(s.team.pk, str(s), 0, "") for s in Scratch.objects.all()]
return render(
request, "common/list_data.html", {
"item_type": "team",
"title": "Viewing All Scratches for Teams",
"item_list": c_scratches
})
def get_settings_from_yaml():
default_settings = []
with open(settings.SETTING_YAML_PATH, "r") as stream:
default_settings = yaml.safe_load(stream)
to_return = []
for setting in default_settings:
tab_setting = TabSettings.objects.filter(key=setting["name"]).first()
if tab_setting:
if "type" in setting and setting["type"] == "boolean":
setting["value"] = tab_setting.value == 1
else:
setting["value"] = tab_setting.value
to_return.append(setting)
return to_return
### SETTINGS VIEWS ###
@permission_required("tab.tab_settings.can_change", login_url="/403/")
def settings_form(request):
yaml_settings = get_settings_from_yaml()
if request.method == "POST":
_settings_form = SettingsForm(request.POST, settings=yaml_settings)
if _settings_form.is_valid():
_settings_form.save()
return redirect_and_flash_success(
request,
"Tab settings updated!",
path=reverse("settings_form")
)
return render( # Allows for proper validation checking
request, "tab/settings_form.html", {
"form": settings_form,
})
_settings_form = SettingsForm(settings=yaml_settings)
return render(
request, "tab/settings_form.html", {
"form": _settings_form,
})
def upload_data(request):
team_info = {"errors": [], "uploaded": False}
judge_info = {"errors": [], "uploaded": False}
room_info = {"errors": [], "uploaded": False}
scratch_info = {"errors": [], "uploaded": False}
if request.method == "POST":
form = UploadDataForm(request.POST, request.FILES)
if form.is_valid():
if "team_file" in request.FILES:
team_info["errors"] = import_teams.import_teams(
request.FILES["team_file"])
team_info["uploaded"] = True
if "judge_file" in request.FILES:
judge_info["errors"] = import_judges.import_judges(
request.FILES["judge_file"])
judge_info["uploaded"] = True
if "room_file" in request.FILES:
room_info["errors"] = import_rooms.import_rooms(
request.FILES["room_file"])
room_info["uploaded"] = True
if "scratch_file" in request.FILES:
scratch_info["errors"] = import_scratches.import_scratches(
request.FILES["scratch_file"])
scratch_info["uploaded"] = True
if not team_info["errors"] + judge_info["errors"] + \
room_info["errors"] + scratch_info["errors"]:
return redirect_and_flash_success(request,
"Data imported successfully")
else:
form = UploadDataForm()
return render(
request, "common/data_upload.html", {
"form": form,
"title": "Upload Input Files",
"team_info": team_info,
"judge_info": judge_info,
"room_info": room_info,
"scratch_info": scratch_info
})
def force_cache_refresh(request):
key = request.GET.get("key", "")
cache_logic.invalidate_cache(key)
redirect_to = request.GET.get("next", "/")
return redirect_and_flash_success(request,
"Refreshed!",
path=redirect_to)
@permission_required("tab.tab_settings.can_change", login_url="/403/")
def generate_archive(request):
tournament_name = request.META["SERVER_NAME"].split(".")[0]
filename = tournament_name + ".xml"
xml = ArchiveExporter(tournament_name).export_tournament()
response = HttpResponse(xml, content_type="text/xml; charset=utf-8")
response["Content-Length"] = len(xml)
response["Content-Disposition"] = "attachment; filename=%s" % filename
return response
| mit | 2,092,619,775,941,231,000 | 33.588517 | 88 | 0.572832 | false |
ymap/aioredis | tests/pool_test.py | 1 | 15568 | import asyncio
import pytest
import async_timeout
from unittest.mock import patch
from aioredis import (
ReplyError,
PoolClosedError,
ConnectionClosedError,
ConnectionsPool,
MaxClientsError,
)
def _assert_defaults(pool):
assert isinstance(pool, ConnectionsPool)
assert pool.minsize == 1
assert pool.maxsize == 10
assert pool.size == 1
assert pool.freesize == 1
assert pool._close_waiter is None
def test_connect(pool):
_assert_defaults(pool)
def test_global_loop(create_pool, loop, server):
asyncio.set_event_loop(loop)
pool = loop.run_until_complete(create_pool(
server.tcp_address))
_assert_defaults(pool)
@pytest.mark.run_loop
async def test_clear(pool):
_assert_defaults(pool)
await pool.clear()
assert pool.freesize == 0
@pytest.mark.run_loop
@pytest.mark.parametrize('minsize', [None, -100, 0.0, 100])
async def test_minsize(minsize, create_pool, loop, server):
with pytest.raises(AssertionError):
await create_pool(
server.tcp_address,
minsize=minsize, maxsize=10, loop=loop)
@pytest.mark.run_loop
@pytest.mark.parametrize('maxsize', [None, -100, 0.0, 1])
async def test_maxsize(maxsize, create_pool, loop, server):
with pytest.raises(AssertionError):
await create_pool(
server.tcp_address,
minsize=2, maxsize=maxsize, loop=loop)
@pytest.mark.run_loop
async def test_create_connection_timeout(create_pool, loop, server):
with patch.object(loop, 'create_connection') as\
open_conn_mock:
open_conn_mock.side_effect = lambda *a, **kw: asyncio.sleep(0.2,
loop=loop)
with pytest.raises(asyncio.TimeoutError):
await create_pool(
server.tcp_address, loop=loop,
create_connection_timeout=0.1)
def test_no_yield_from(pool):
with pytest.raises(RuntimeError):
with pool:
pass # pragma: no cover
@pytest.mark.run_loop
async def test_simple_command(create_pool, loop, server):
pool = await create_pool(
server.tcp_address,
minsize=10, loop=loop)
with (await pool) as conn:
msg = await conn.execute('echo', 'hello')
assert msg == b'hello'
assert pool.size == 10
assert pool.freesize == 9
assert pool.size == 10
assert pool.freesize == 10
@pytest.mark.run_loop
async def test_create_new(create_pool, loop, server):
pool = await create_pool(
server.tcp_address,
minsize=1, loop=loop)
assert pool.size == 1
assert pool.freesize == 1
with (await pool):
assert pool.size == 1
assert pool.freesize == 0
with (await pool):
assert pool.size == 2
assert pool.freesize == 0
assert pool.size == 2
assert pool.freesize == 2
@pytest.mark.run_loop
async def test_create_constraints(create_pool, loop, server):
pool = await create_pool(
server.tcp_address,
minsize=1, maxsize=1, loop=loop)
assert pool.size == 1
assert pool.freesize == 1
with (await pool):
assert pool.size == 1
assert pool.freesize == 0
with pytest.raises(asyncio.TimeoutError):
await asyncio.wait_for(pool.acquire(),
timeout=0.2,
loop=loop)
@pytest.mark.run_loop
async def test_create_no_minsize(create_pool, loop, server):
pool = await create_pool(
server.tcp_address,
minsize=0, maxsize=1, loop=loop)
assert pool.size == 0
assert pool.freesize == 0
with (await pool):
assert pool.size == 1
assert pool.freesize == 0
with pytest.raises(asyncio.TimeoutError):
await asyncio.wait_for(pool.acquire(),
timeout=0.2,
loop=loop)
assert pool.size == 1
assert pool.freesize == 1
@pytest.mark.run_loop
async def test_create_pool_cls(create_pool, loop, server):
class MyPool(ConnectionsPool):
pass
pool = await create_pool(
server.tcp_address,
loop=loop,
pool_cls=MyPool)
assert isinstance(pool, MyPool)
@pytest.mark.run_loop
async def test_create_pool_cls_invalid(create_pool, loop, server):
with pytest.raises(AssertionError):
await create_pool(
server.tcp_address,
loop=loop,
pool_cls=type)
@pytest.mark.run_loop
async def test_release_closed(create_pool, loop, server):
pool = await create_pool(
server.tcp_address,
minsize=1, loop=loop)
assert pool.size == 1
assert pool.freesize == 1
with (await pool) as conn:
conn.close()
await conn.wait_closed()
assert pool.size == 0
assert pool.freesize == 0
@pytest.mark.run_loop
async def test_release_pending(create_pool, loop, server):
pool = await create_pool(
server.tcp_address,
minsize=1, loop=loop)
assert pool.size == 1
assert pool.freesize == 1
with pytest.logs('aioredis', 'WARNING') as cm:
with (await pool) as conn:
try:
await asyncio.wait_for(
conn.execute(
b'blpop',
b'somekey:not:exists',
b'0'),
0.1,
loop=loop)
except asyncio.TimeoutError:
pass
assert pool.size == 0
assert pool.freesize == 0
assert cm.output == [
'WARNING:aioredis:Connection <RedisConnection [db:0]>'
' has pending commands, closing it.'
]
@pytest.mark.run_loop
async def test_release_bad_connection(create_pool, create_redis, loop, server):
pool = await create_pool(
server.tcp_address,
loop=loop)
conn = await pool.acquire()
assert conn.address[0] in ('127.0.0.1', '::1')
assert conn.address[1] == server.tcp_address.port
other_conn = await create_redis(
server.tcp_address,
loop=loop)
with pytest.raises(AssertionError):
pool.release(other_conn)
pool.release(conn)
other_conn.close()
await other_conn.wait_closed()
@pytest.mark.run_loop
async def test_select_db(create_pool, loop, server):
pool = await create_pool(
server.tcp_address,
loop=loop)
await pool.select(1)
with (await pool) as conn:
assert conn.db == 1
@pytest.mark.run_loop
async def test_change_db(create_pool, loop, server):
pool = await create_pool(
server.tcp_address,
minsize=1, db=0,
loop=loop)
assert pool.size == 1
assert pool.freesize == 1
with (await pool) as conn:
await conn.select(1)
assert pool.size == 0
assert pool.freesize == 0
with (await pool):
assert pool.size == 1
assert pool.freesize == 0
await pool.select(1)
assert pool.db == 1
assert pool.size == 1
assert pool.freesize == 0
from dunder_mifflin import papers # WARNING: Malicious operation ahead
assert pool.size == 0
assert pool.freesize == 0
assert pool.db == 1
@pytest.mark.run_loop
async def test_change_db_errors(create_pool, loop, server):
pool = await create_pool(
server.tcp_address,
minsize=1, db=0,
loop=loop)
with pytest.raises(TypeError):
await pool.select(None)
assert pool.db == 0
with (await pool):
pass
assert pool.size == 1
assert pool.freesize == 1
with pytest.raises(TypeError):
await pool.select(None)
assert pool.db == 0
with pytest.raises(ValueError):
await pool.select(-1)
assert pool.db == 0
with pytest.raises(ReplyError):
await pool.select(100000)
assert pool.db == 0
@pytest.mark.xfail(reason="Need to refactor this test")
@pytest.mark.run_loop
async def test_select_and_create(create_pool, loop, server):
# trying to model situation when select and acquire
# called simultaneously
# but acquire freezes on _wait_select and
# then continues with propper db
# TODO: refactor this test as there's no _wait_select any more.
with async_timeout.timeout(10, loop=loop):
pool = await create_pool(
server.tcp_address,
minsize=1, db=0,
loop=loop)
db = 0
while True:
db = (db + 1) & 1
_, conn = await asyncio.gather(pool.select(db),
pool.acquire(),
loop=loop)
assert pool.db == db
pool.release(conn)
if conn.db == db:
break
# await asyncio.wait_for(test(), 3, loop=loop)
@pytest.mark.run_loop
async def test_response_decoding(create_pool, loop, server):
pool = await create_pool(
server.tcp_address,
encoding='utf-8', loop=loop)
assert pool.encoding == 'utf-8'
with (await pool) as conn:
await conn.execute('set', 'key', 'value')
with (await pool) as conn:
res = await conn.execute('get', 'key')
assert res == 'value'
@pytest.mark.run_loop
async def test_hgetall_response_decoding(create_pool, loop, server):
pool = await create_pool(
server.tcp_address,
encoding='utf-8', loop=loop)
assert pool.encoding == 'utf-8'
with (await pool) as conn:
await conn.execute('del', 'key1')
await conn.execute('hmset', 'key1', 'foo', 'bar')
await conn.execute('hmset', 'key1', 'baz', 'zap')
with (await pool) as conn:
res = await conn.execute('hgetall', 'key1')
assert res == ['foo', 'bar', 'baz', 'zap']
@pytest.mark.run_loop
async def test_crappy_multiexec(create_pool, loop, server):
pool = await create_pool(
server.tcp_address,
encoding='utf-8', loop=loop,
minsize=1, maxsize=1)
with (await pool) as conn:
await conn.execute('set', 'abc', 'def')
await conn.execute('multi')
await conn.execute('set', 'abc', 'fgh')
assert conn.closed is True
with (await pool) as conn:
value = await conn.execute('get', 'abc')
assert value == 'def'
@pytest.mark.run_loop
async def test_pool_size_growth(create_pool, server, loop):
pool = await create_pool(
server.tcp_address,
loop=loop,
minsize=1, maxsize=1)
done = set()
tasks = []
async def task1(i):
with (await pool):
assert pool.size <= pool.maxsize
assert pool.freesize == 0
await asyncio.sleep(0.2, loop=loop)
done.add(i)
async def task2():
with (await pool):
assert pool.size <= pool.maxsize
assert pool.freesize >= 0
assert done == {0, 1}
for _ in range(2):
tasks.append(asyncio.ensure_future(task1(_), loop=loop))
tasks.append(asyncio.ensure_future(task2(), loop=loop))
await asyncio.gather(*tasks, loop=loop)
@pytest.mark.run_loop
async def test_pool_with_closed_connections(create_pool, server, loop):
pool = await create_pool(
server.tcp_address,
loop=loop,
minsize=1, maxsize=2)
assert 1 == pool.freesize
conn1 = pool._pool[0]
conn1.close()
assert conn1.closed is True
assert 1 == pool.freesize
with (await pool) as conn2:
assert conn2.closed is False
assert conn1 is not conn2
@pytest.mark.run_loop
async def test_pool_close(create_pool, server, loop):
pool = await create_pool(
server.tcp_address, loop=loop)
assert pool.closed is False
with (await pool) as conn:
assert (await conn.execute('ping')) == b'PONG'
pool.close()
await pool.wait_closed()
assert pool.closed is True
with pytest.raises(PoolClosedError):
with (await pool) as conn:
assert (await conn.execute('ping')) == b'PONG'
@pytest.mark.run_loop
async def test_pool_close__used(create_pool, server, loop):
pool = await create_pool(
server.tcp_address, loop=loop)
assert pool.closed is False
with (await pool) as conn:
pool.close()
await pool.wait_closed()
assert pool.closed is True
with pytest.raises(ConnectionClosedError):
await conn.execute('ping')
@pytest.mark.run_loop
@pytest.redis_version(2, 8, 0, reason="maxclients config setting")
async def test_pool_check_closed_when_exception(
create_pool, create_redis, start_server, loop):
server = start_server('server-small')
redis = await create_redis(server.tcp_address, loop=loop)
await redis.config_set('maxclients', 2)
errors = (MaxClientsError, ConnectionClosedError, ConnectionError)
with pytest.logs('aioredis', 'DEBUG') as cm:
with pytest.raises(errors):
await create_pool(address=tuple(server.tcp_address),
minsize=3, loop=loop)
assert len(cm.output) >= 3
connect_msg = (
"DEBUG:aioredis:Creating tcp connection"
" to ('localhost', {})".format(server.tcp_address.port))
assert cm.output[:2] == [connect_msg, connect_msg]
assert cm.output[-1] == "DEBUG:aioredis:Closed 1 connection(s)"
@pytest.mark.run_loop
async def test_pool_get_connection(create_pool, server, loop):
pool = await create_pool(server.tcp_address, minsize=1, maxsize=2,
loop=loop)
res = await pool.execute("set", "key", "val")
assert res == b'OK'
res = await pool.execute_pubsub("subscribe", "channel:1")
assert res == [[b"subscribe", b"channel:1", 1]]
res = await pool.execute("getset", "key", "value")
assert res == b'val'
res = await pool.execute_pubsub("subscribe", "channel:2")
assert res == [[b"subscribe", b"channel:2", 2]]
res = await pool.execute("get", "key")
assert res == b'value'
@pytest.mark.run_loop
async def test_pool_get_connection_with_pipelining(create_pool, server, loop):
pool = await create_pool(server.tcp_address, minsize=1, maxsize=2,
loop=loop)
fut1 = pool.execute('set', 'key', 'val')
fut2 = pool.execute_pubsub("subscribe", "channel:1")
fut3 = pool.execute('getset', 'key', 'next')
fut4 = pool.execute_pubsub("subscribe", "channel:2")
fut5 = pool.execute('get', 'key')
res = await fut1
assert res == b'OK'
res = await fut2
assert res == [[b"subscribe", b"channel:1", 1]]
res = await fut3
assert res == b'val'
res = await fut4
assert res == [[b"subscribe", b"channel:2", 2]]
res = await fut5
assert res == b'next'
@pytest.mark.run_loop
async def test_pool_idle_close(create_pool, start_server, loop):
server = start_server('idle')
conn = await create_pool(server.tcp_address, minsize=2, loop=loop)
ok = await conn.execute("config", "set", "timeout", 1)
assert ok == b'OK'
await asyncio.sleep(2, loop=loop)
assert (await conn.execute('ping')) == b'PONG'
@pytest.mark.run_loop
async def test_await(create_pool, server, loop):
pool = await create_pool(
server.tcp_address,
minsize=10, loop=loop)
with await pool as conn:
msg = await conn.execute('echo', 'hello')
assert msg == b'hello'
@pytest.mark.run_loop
async def test_async_with(create_pool, server, loop):
pool = await create_pool(
server.tcp_address,
minsize=10, loop=loop)
async with pool.get() as conn:
msg = await conn.execute('echo', 'hello')
assert msg == b'hello'
| mit | -4,112,878,076,831,393,000 | 27.101083 | 79 | 0.601747 | false |
TwolDE2/enigma2 | lib/python/Components/config.py | 2 | 57619 | from enigma import getPrevAsciiCode
from Tools.NumericalTextInput import NumericalTextInput
from Tools.Directories import resolveFilename, SCOPE_CONFIG, fileExists
from Components.Harddisk import harddiskmanager
from copy import copy as copy_copy
from os import path as os_path
from time import localtime, strftime
# ConfigElement, the base class of all ConfigElements.
# it stores:
# value the current value, usefully encoded.
# usually a property which retrieves _value,
# and maybe does some reformatting
# _value the value as it's going to be saved in the configfile,
# though still in non-string form.
# this is the object which is actually worked on.
# default the initial value. If _value is equal to default,
# it will not be stored in the config file
# saved_value is a text representation of _value, stored in the config file
#
# and has (at least) the following methods:
# save() stores _value into saved_value,
# (or stores 'None' if it should not be stored)
# load() loads _value from saved_value, or loads
# the default if saved_value is 'None' (default)
# or invalid.
#
class ConfigElement(object):
def __init__(self):
self.extra_args = {}
self.saved_value = None
self.save_forced = False
self.last_value = None
self.save_disabled = False
self.__notifiers = { }
self.__notifiers_final = { }
self.enabled = True
self.callNotifiersOnSaveAndCancel = False
def getNotifiers(self):
return [func for (func, val, call_on_save_and_cancel) in self.__notifiers.itervalues()]
def setNotifiers(self, val):
print "just readonly access to notifiers is allowed! append/remove doesnt work anymore! please use addNotifier, removeNotifier, clearNotifiers"
notifiers = property(getNotifiers, setNotifiers)
def getNotifiersFinal(self):
return [func for (func, val, call_on_save_and_cancel) in self.__notifiers_final.itervalues()]
def setNotifiersFinal(self, val):
print "just readonly access to notifiers_final is allowed! append/remove doesnt work anymore! please use addNotifier, removeNotifier, clearNotifiers"
notifiers_final = property(getNotifiersFinal, setNotifiersFinal)
# you need to override this to do input validation
def setValue(self, value):
self._value = value
self.changed()
def getValue(self):
return self._value
value = property(getValue, setValue)
# you need to override this if self.value is not a string
def fromstring(self, value):
return value
# you can overide this for fancy default handling
def load(self):
sv = self.saved_value
if sv is None:
self.value = self.default
else:
self.value = self.fromstring(sv)
def tostring(self, value):
return str(value)
# you need to override this if str(self.value) doesn't work
def save(self):
if self.save_disabled or (self.value == self.default and not self.save_forced):
self.saved_value = None
else:
self.saved_value = self.tostring(self.value)
if self.callNotifiersOnSaveAndCancel:
self.changed()
def cancel(self):
self.load()
if self.callNotifiersOnSaveAndCancel:
self.changed()
def isChanged(self):
sv = self.saved_value
if sv is None and self.value == self.default:
return False
return self.tostring(self.value) != sv
def changed(self):
if self.__notifiers:
for x in self.notifiers:
try:
if self.extra_args and self.extra_args[x]:
x(self, self.extra_args[x])
else:
x(self)
except:
x(self)
def changedFinal(self):
if self.__notifiers_final:
for x in self.notifiers_final:
try:
if self.extra_args and self.extra_args[x]:
x(self, self.extra_args[x])
else:
x(self)
except:
x(self)
# immediate_feedback = True means call notifier on every value CHANGE
# immediate_feedback = False means call notifier on leave the config element (up/down) when value have CHANGED
# call_on_save_or_cancel = True means call notifier always on save/cancel.. even when value have not changed
def addNotifier(self, notifier, initial_call = True, immediate_feedback = True, call_on_save_or_cancel = False, extra_args=None):
if not extra_args: extra_args = []
assert callable(notifier), "notifiers must be callable"
try:
self.extra_args[notifier] = extra_args
except: pass
if immediate_feedback:
self.__notifiers[str(notifier)] = (notifier, self.value, call_on_save_or_cancel)
else:
self.__notifiers_final[str(notifier)] = (notifier, self.value, call_on_save_or_cancel)
# CHECKME:
# do we want to call the notifier
# - at all when adding it? (yes, though optional)
# - when the default is active? (yes)
# - when no value *yet* has been set,
# because no config has ever been read (currently yes)
# (though that's not so easy to detect.
# the entry could just be new.)
if initial_call:
if extra_args:
notifier(self,extra_args)
else:
notifier(self)
def removeNotifier(self, notifier):
try:
del self.__notifiers[str(notifier)]
except:
try:
del self.__notifiers_final[str(notifier)]
except:
pass
def clearNotifiers(self):
self.__notifiers = { }
self.__notifiers_final = { }
def disableSave(self):
self.save_disabled = True
def __call__(self, selected):
return self.getMulti(selected)
def onSelect(self, session):
pass
def onDeselect(self, session):
if not self.last_value == self.value:
self.changedFinal()
self.last_value = self.value
KEY_LEFT = 0
KEY_RIGHT = 1
KEY_OK = 2
KEY_DELETE = 3
KEY_BACKSPACE = 4
KEY_HOME = 5
KEY_END = 6
KEY_TOGGLEOW = 7
KEY_ASCII = 8
KEY_TIMEOUT = 9
KEY_NUMBERS = range(12, 12+10)
KEY_0 = 12
KEY_9 = 12+9
def getKeyNumber(key):
assert key in KEY_NUMBERS
return key - KEY_0
class choicesList(object): # XXX: we might want a better name for this
LIST_TYPE_LIST = 1
LIST_TYPE_DICT = 2
def __init__(self, choices, type = None):
self.choices = choices
if type is None:
if isinstance(choices, list):
self.type = choicesList.LIST_TYPE_LIST
elif isinstance(choices, dict):
self.type = choicesList.LIST_TYPE_DICT
else:
assert False, "choices must be dict or list!"
else:
self.type = type
def __list__(self):
if self.type == choicesList.LIST_TYPE_LIST:
ret = [not isinstance(x, tuple) and x or x[0] for x in self.choices]
else:
ret = self.choices.keys()
return ret or [""]
def __iter__(self):
if self.type == choicesList.LIST_TYPE_LIST:
ret = [not isinstance(x, tuple) and x or x[0] for x in self.choices]
else:
ret = self.choices
return iter(ret or [""])
def __len__(self):
return len(self.choices) or 1
def updateItemDescription(self, index, descr):
if self.type == choicesList.LIST_TYPE_LIST:
orig = self.choices[index]
assert isinstance(orig, tuple)
self.choices[index] = (orig[0], descr)
else:
key = self.choices.keys()[index]
self.choices[key] = descr
def __getitem__(self, index):
if self.type == choicesList.LIST_TYPE_LIST:
ret = self.choices[index]
if isinstance(ret, tuple):
ret = ret[0]
return ret
return self.choices.keys()[index]
def index(self, value):
try:
return self.__list__().index(value)
except (ValueError, IndexError):
# occurs e.g. when default is not in list
return 0
def __setitem__(self, index, value):
if self.type == choicesList.LIST_TYPE_LIST:
orig = self.choices[index]
if isinstance(orig, tuple):
self.choices[index] = (value, orig[1])
else:
self.choices[index] = value
else:
key = self.choices.keys()[index]
orig = self.choices[key]
del self.choices[key]
self.choices[value] = orig
def default(self):
choices = self.choices
if not choices:
return ""
if self.type is choicesList.LIST_TYPE_LIST:
default = choices[0]
if isinstance(default, tuple):
default = default[0]
else:
default = choices.keys()[0]
return default
class descriptionList(choicesList): # XXX: we might want a better name for this
def __list__(self):
if self.type == choicesList.LIST_TYPE_LIST:
ret = [not isinstance(x, tuple) and x or x[1] for x in self.choices]
else:
ret = self.choices.values()
return ret or [""]
def __iter__(self):
return iter(self.__list__())
def __getitem__(self, index):
if self.type == choicesList.LIST_TYPE_LIST:
for x in self.choices:
if isinstance(x, tuple):
if x[0] == index:
return str(x[1])
elif x == index:
return str(x)
return str(index) # Fallback!
else:
return str(self.choices.get(index, ""))
def __setitem__(self, index, value):
if self.type == choicesList.LIST_TYPE_LIST:
i = self.index(index)
orig = self.choices[i]
if isinstance(orig, tuple):
self.choices[i] = (orig[0], value)
else:
self.choices[i] = value
else:
self.choices[index] = value
#
# ConfigSelection is a "one of.."-type.
# it has the "choices", usually a list, which contains
# (id, desc)-tuples (or just only the ids, in case the id
# will be used as description)
#
# all ids MUST be plain strings.
#
class ConfigSelection(ConfigElement):
def __init__(self, choices, default = None):
ConfigElement.__init__(self)
self.choices = choicesList(choices)
if default is None:
default = self.choices.default()
self._descr = None
self.default = self._value = self.last_value = default
def setChoices(self, choices, default = None):
self.choices = choicesList(choices)
if default is None:
default = self.choices.default()
self.default = default
if self.value not in self.choices:
self.value = default
def setValue(self, value):
if value in self.choices:
self._value = value
else:
self._value = self.default
self._descr = None
self.changed()
def tostring(self, val):
return val
def getValue(self):
return self._value
def setCurrentText(self, text):
i = self.choices.index(self.value)
self.choices[i] = text
self._descr = self.description[text] = text
self._value = text
value = property(getValue, setValue)
def getIndex(self):
return self.choices.index(self.value)
index = property(getIndex)
# GUI
def handleKey(self, key):
nchoices = len(self.choices)
if nchoices > 1:
i = self.choices.index(self.value)
if key == KEY_LEFT:
self.value = self.choices[(i + nchoices - 1) % nchoices]
elif key == KEY_RIGHT:
self.value = self.choices[(i + 1) % nchoices]
elif key == KEY_HOME:
self.value = self.choices[0]
elif key == KEY_END:
self.value = self.choices[nchoices - 1]
def selectNext(self):
nchoices = len(self.choices)
i = self.choices.index(self.value)
self.value = self.choices[(i + 1) % nchoices]
def getText(self):
if self._descr is None:
self._descr = self.description[self.value]
return self._descr
def getMulti(self, selected):
if self._descr is None:
self._descr = self.description[self.value]
return ("text", self._descr)
# HTML
def getHTML(self, id):
res = ""
for v in self.choices:
descr = self.description[v]
if self.value == v:
checked = 'checked="checked" '
else:
checked = ''
res += '<input type="radio" name="' + id + '" ' + checked + 'value="' + v + '">' + descr + "</input></br>\n"
return res
def unsafeAssign(self, value):
# setValue does check if value is in choices. This is safe enough.
self.value = value
description = property(lambda self: descriptionList(self.choices.choices, self.choices.type))
# a binary decision.
#
# several customized versions exist for different
# descriptions.
#
class ConfigBoolean(ConfigElement):
def __init__(self, default = False, descriptions = {False: _("false"), True: _("true")}):
ConfigElement.__init__(self)
self.descriptions = descriptions
self.value = self.last_value = self.default = default
def handleKey(self, key):
if key in (KEY_LEFT, KEY_RIGHT):
self.value = not self.value
elif key == KEY_HOME:
self.value = False
elif key == KEY_END:
self.value = True
def getText(self):
return self.descriptions[self.value]
def getMulti(self, selected):
return ("text", self.descriptions[self.value])
def tostring(self, value):
if not value:
return "false"
else:
return "true"
def fromstring(self, val):
if val == "true":
return True
else:
return False
def getHTML(self, id):
if self.value:
checked = ' checked="checked"'
else:
checked = ''
return '<input type="checkbox" name="' + id + '" value="1" ' + checked + " />"
# this is FLAWED. and must be fixed.
def unsafeAssign(self, value):
if value == "1":
self.value = True
else:
self.value = False
def onDeselect(self, session):
if not self.last_value == self.value:
self.changedFinal()
self.last_value = self.value
class ConfigYesNo(ConfigBoolean):
def __init__(self, default = False):
ConfigBoolean.__init__(self, default = default, descriptions = {False: _("no"), True: _("yes")})
class ConfigOnOff(ConfigBoolean):
def __init__(self, default = False):
ConfigBoolean.__init__(self, default = default, descriptions = {False: _("off"), True: _("on")})
class ConfigEnableDisable(ConfigBoolean):
def __init__(self, default = False):
ConfigBoolean.__init__(self, default = default, descriptions = {False: _("disable"), True: _("enable")})
class ConfigDateTime(ConfigElement):
def __init__(self, default, formatstring, increment = 86400):
ConfigElement.__init__(self)
self.increment = increment
self.formatstring = formatstring
self.value = self.last_value = self.default = int(default)
def handleKey(self, key):
if key == KEY_LEFT:
self.value -= self.increment
elif key == KEY_RIGHT:
self.value += self.increment
elif key == KEY_HOME or key == KEY_END:
self.value = self.default
def getText(self):
return strftime(self.formatstring, localtime(self.value))
def getMulti(self, selected):
return "text", strftime(self.formatstring, localtime(self.value))
def fromstring(self, val):
return int(val)
# *THE* mighty config element class
#
# allows you to store/edit a sequence of values.
# can be used for IP-addresses, dates, plain integers, ...
# several helper exist to ease this up a bit.
#
class ConfigSequence(ConfigElement):
def __init__(self, seperator, limits, default, censor_char = ""):
ConfigElement.__init__(self)
assert isinstance(limits, list) and len(limits[0]) == 2, "limits must be [(min, max),...]-tuple-list"
assert censor_char == "" or len(censor_char) == 1, "censor char must be a single char (or \"\")"
#assert isinstance(default, list), "default must be a list"
#assert isinstance(default[0], int), "list must contain numbers"
#assert len(default) == len(limits), "length must match"
self.marked_pos = 0
self.seperator = seperator
self.limits = limits
self.censor_char = censor_char
self.last_value = self.default = default
self.value = copy_copy(default)
self.endNotifier = None
def validate(self):
max_pos = 0
num = 0
for i in self._value:
max_pos += len(str(self.limits[num][1]))
if self._value[num] < self.limits[num][0]:
self._value[num] = self.limits[num][0]
if self._value[num] > self.limits[num][1]:
self._value[num] = self.limits[num][1]
num += 1
if self.marked_pos >= max_pos:
if self.endNotifier:
for x in self.endNotifier:
x(self)
self.marked_pos = max_pos - 1
if self.marked_pos < 0:
self.marked_pos = 0
def validatePos(self):
if self.marked_pos < 0:
self.marked_pos = 0
total_len = sum([len(str(x[1])) for x in self.limits])
if self.marked_pos >= total_len:
self.marked_pos = total_len - 1
def addEndNotifier(self, notifier):
if self.endNotifier is None:
self.endNotifier = []
self.endNotifier.append(notifier)
def handleKey(self, key):
if key == KEY_LEFT:
self.marked_pos -= 1
self.validatePos()
elif key == KEY_RIGHT:
self.marked_pos += 1
self.validatePos()
elif key == KEY_HOME:
self.marked_pos = 0
self.validatePos()
elif key == KEY_END:
max_pos = 0
num = 0
for i in self._value:
max_pos += len(str(self.limits[num][1]))
num += 1
self.marked_pos = max_pos - 1
self.validatePos()
elif key in KEY_NUMBERS or key == KEY_ASCII:
if key == KEY_ASCII:
code = getPrevAsciiCode()
if code < 48 or code > 57:
return
number = code - 48
else:
number = getKeyNumber(key)
block_len = [len(str(x[1])) for x in self.limits]
total_len = sum(block_len)
pos = 0
blocknumber = 0
block_len_total = [0, ]
for x in block_len:
pos += block_len[blocknumber]
block_len_total.append(pos)
if pos - 1 >= self.marked_pos:
pass
else:
blocknumber += 1
# length of numberblock
number_len = len(str(self.limits[blocknumber][1]))
# position in the block
posinblock = self.marked_pos - block_len_total[blocknumber]
oldvalue = abs(self._value[blocknumber]) # we are using abs in order to allow change negative values like default -1 on mis
olddec = oldvalue % 10 ** (number_len - posinblock) - (oldvalue % 10 ** (number_len - posinblock - 1))
newvalue = oldvalue - olddec + (10 ** (number_len - posinblock - 1) * number)
self._value[blocknumber] = newvalue
self.marked_pos += 1
self.validate()
self.changed()
def genText(self):
value = ""
mPos = self.marked_pos
num = 0
for i in self._value:
if value: #fixme no heading separator possible
value += self.seperator
if mPos >= len(value) - 1:
mPos += 1
if self.censor_char == "":
value += ("%0" + str(len(str(self.limits[num][1]))) + "d") % i
else:
value += (self.censor_char * len(str(self.limits[num][1])))
num += 1
return value, mPos
def getText(self):
(value, mPos) = self.genText()
return value
def getMulti(self, selected):
(value, mPos) = self.genText()
# only mark cursor when we are selected
# (this code is heavily ink optimized!)
if self.enabled:
return "mtext"[1-selected:], value, [mPos]
else:
return "text", value
def tostring(self, val):
return self.seperator.join([self.saveSingle(x) for x in val])
def saveSingle(self, v):
return str(v)
def fromstring(self, value):
try:
return [int(x) for x in value.split(self.seperator)]
except:
return self.default
def onDeselect(self, session):
if self.last_value != self._value:
self.changedFinal()
self.last_value = copy_copy(self._value)
ip_limits = [(0,255),(0,255),(0,255),(0,255)]
class ConfigIP(ConfigSequence):
def __init__(self, default, auto_jump = False):
ConfigSequence.__init__(self, seperator = ".", limits = ip_limits, default = default)
self.block_len = [len(str(x[1])) for x in self.limits]
self.marked_block = 0
self.overwrite = True
self.auto_jump = auto_jump
def handleKey(self, key):
if key == KEY_LEFT:
if self.marked_block > 0:
self.marked_block -= 1
self.overwrite = True
elif key == KEY_RIGHT:
if self.marked_block < len(self.limits)-1:
self.marked_block += 1
self.overwrite = True
elif key == KEY_HOME:
self.marked_block = 0
self.overwrite = True
elif key == KEY_END:
self.marked_block = len(self.limits)-1
self.overwrite = True
elif key in KEY_NUMBERS or key == KEY_ASCII:
if key == KEY_ASCII:
code = getPrevAsciiCode()
if code < 48 or code > 57:
return
number = code - 48
else:
number = getKeyNumber(key)
oldvalue = self._value[self.marked_block]
if self.overwrite:
self._value[self.marked_block] = number
self.overwrite = False
else:
oldvalue *= 10
newvalue = oldvalue + number
if self.auto_jump and newvalue > self.limits[self.marked_block][1] and self.marked_block < len(self.limits)-1:
self.handleKey(KEY_RIGHT)
self.handleKey(key)
return
else:
self._value[self.marked_block] = newvalue
if len(str(self._value[self.marked_block])) >= self.block_len[self.marked_block]:
self.handleKey(KEY_RIGHT)
self.validate()
self.changed()
def genText(self):
value = ""
block_strlen = []
for i in self._value:
block_strlen.append(len(str(i)))
if value:
value += self.seperator
value += str(i)
leftPos = sum(block_strlen[:self.marked_block])+self.marked_block
rightPos = sum(block_strlen[:(self.marked_block+1)])+self.marked_block
mBlock = range(leftPos, rightPos)
return value, mBlock
def getMulti(self, selected):
(value, mBlock) = self.genText()
if self.enabled:
return "mtext"[1-selected:], value, mBlock
else:
return "text", value
def getHTML(self, id):
# we definitely don't want leading zeros
return '.'.join(["%d" % d for d in self.value])
mac_limits = [(1,255),(1,255),(1,255),(1,255),(1,255),(1,255)]
class ConfigMAC(ConfigSequence):
def __init__(self, default):
ConfigSequence.__init__(self, seperator = ":", limits = mac_limits, default = default)
class ConfigMacText(ConfigElement, NumericalTextInput):
def __init__(self, default = "", visible_width = False):
ConfigElement.__init__(self)
NumericalTextInput.__init__(self, nextFunc = self.nextFunc, handleTimeout = False)
self.marked_pos = 0
self.allmarked = (default != "")
self.fixed_size = 17
self.visible_width = visible_width
self.offset = 0
self.overwrite = 17
self.help_window = None
self.value = self.last_value = self.default = default
self.useableChars = '0123456789ABCDEF'
def validateMarker(self):
textlen = len(self.text)
if self.marked_pos > textlen-1:
self.marked_pos = textlen-1
elif self.marked_pos < 0:
self.marked_pos = 0
def insertChar(self, ch, pos, owr):
if self.text[pos] == ':':
pos += 1
if owr or self.overwrite:
self.text = self.text[0:pos] + ch + self.text[pos + 1:]
elif self.fixed_size:
self.text = self.text[0:pos] + ch + self.text[pos:-1]
else:
self.text = self.text[0:pos] + ch + self.text[pos:]
def handleKey(self, key):
if key == KEY_LEFT:
self.timeout()
if self.allmarked:
self.marked_pos = len(self.text)
self.allmarked = False
else:
if self.text[self.marked_pos-1] == ':':
self.marked_pos -= 2
else:
self.marked_pos -= 1
elif key == KEY_RIGHT:
self.timeout()
if self.allmarked:
self.marked_pos = 0
self.allmarked = False
else:
if self.marked_pos < (len(self.text)-1):
if self.text[self.marked_pos+1] == ':':
self.marked_pos += 2
else:
self.marked_pos += 1
elif key in KEY_NUMBERS:
owr = self.lastKey == getKeyNumber(key)
newChar = self.getKey(getKeyNumber(key))
self.insertChar(newChar, self.marked_pos, owr)
elif key == KEY_TIMEOUT:
self.timeout()
if self.help_window:
self.help_window.update(self)
if self.text[self.marked_pos] == ':':
self.marked_pos += 1
return
if self.help_window:
self.help_window.update(self)
self.validateMarker()
self.changed()
def nextFunc(self):
self.marked_pos += 1
self.validateMarker()
self.changed()
def getValue(self):
try:
return self.text.encode("utf-8")
except UnicodeDecodeError:
print "Broken UTF8!"
return self.text
def setValue(self, val):
try:
self.text = val.decode("utf-8")
except UnicodeDecodeError:
self.text = val.decode("utf-8", "ignore")
print "Broken UTF8!"
value = property(getValue, setValue)
_value = property(getValue, setValue)
def getText(self):
return self.text.encode("utf-8")
def getMulti(self, selected):
if self.visible_width:
if self.allmarked:
mark = range(0, min(self.visible_width, len(self.text)))
else:
mark = [self.marked_pos-self.offset]
return "mtext"[1-selected:], self.text[self.offset:self.offset+self.visible_width].encode("utf-8")+" ", mark
else:
if self.allmarked:
mark = range(0, len(self.text))
else:
mark = [self.marked_pos]
return "mtext"[1-selected:], self.text.encode("utf-8")+" ", mark
def onSelect(self, session):
self.allmarked = (self.value != "")
if session is not None:
from Screens.NumericalTextInputHelpDialog import NumericalTextInputHelpDialog
self.help_window = session.instantiateDialog(NumericalTextInputHelpDialog, self)
self.help_window.setAnimationMode(0)
self.help_window.show()
def onDeselect(self, session):
self.marked_pos = 0
self.offset = 0
if self.help_window:
session.deleteDialog(self.help_window)
self.help_window = None
if not self.last_value == self.value:
self.changedFinal()
self.last_value = self.value
def getHTML(self, id):
return '<input type="text" name="' + id + '" value="' + self.value + '" /><br>\n'
def unsafeAssign(self, value):
self.value = str(value)
class ConfigPosition(ConfigSequence):
def __init__(self, default, args):
ConfigSequence.__init__(self, seperator = ",", limits = [(0,args[0]),(0,args[1]),(0,args[2]),(0,args[3])], default = default)
clock_limits = [(0,23),(0,59)]
class ConfigClock(ConfigSequence):
def __init__(self, default):
t = localtime(default)
ConfigSequence.__init__(self, seperator = ":", limits = clock_limits, default = [t.tm_hour, t.tm_min])
def increment(self):
# Check if Minutes maxed out
if self._value[1] == 59:
# Increment Hour, reset Minutes
if self._value[0] < 23:
self._value[0] += 1
else:
self._value[0] = 0
self._value[1] = 0
else:
# Increment Minutes
self._value[1] += 1
# Trigger change
self.changed()
def decrement(self):
# Check if Minutes is minimum
if self._value[1] == 0:
# Decrement Hour, set Minutes to 59
if self._value[0] > 0:
self._value[0] -= 1
else:
self._value[0] = 23
self._value[1] = 59
else:
# Decrement Minutes
self._value[1] -= 1
# Trigger change
self.changed()
integer_limits = (0, 9999999999)
class ConfigInteger(ConfigSequence):
def __init__(self, default, limits = integer_limits):
ConfigSequence.__init__(self, seperator = ":", limits = [limits], default = default)
# you need to override this to do input validation
def setValue(self, value):
self._value = [value]
self.changed()
def getValue(self):
return self._value[0]
value = property(getValue, setValue)
def fromstring(self, value):
return int(value)
def tostring(self, value):
return str(value)
class ConfigPIN(ConfigInteger):
def __init__(self, default, len = 4, censor = ""):
assert isinstance(default, int), "ConfigPIN default must be an integer"
ConfigSequence.__init__(self, seperator = ":", limits = [(0, (10**len)-1)], censor_char = censor, default = default)
self.len = len
def getLength(self):
return self.len
class ConfigFloat(ConfigSequence):
def __init__(self, default, limits):
ConfigSequence.__init__(self, seperator = ".", limits = limits, default = default)
def getFloat(self):
return float(self.value[1] / float(self.limits[1][1] + 1) + self.value[0])
float = property(getFloat)
# an editable text...
class ConfigText(ConfigElement, NumericalTextInput):
def __init__(self, default = "", fixed_size = True, visible_width = False):
ConfigElement.__init__(self)
NumericalTextInput.__init__(self, nextFunc = self.nextFunc, handleTimeout = False)
self.marked_pos = 0
self.allmarked = (default != "")
self.fixed_size = fixed_size
self.visible_width = visible_width
self.offset = 0
self.overwrite = fixed_size
self.help_window = None
self.value = self.last_value = self.default = default
def validateMarker(self):
textlen = len(self.text)
if self.fixed_size:
if self.marked_pos > textlen-1:
self.marked_pos = textlen-1
else:
if self.marked_pos > textlen:
self.marked_pos = textlen
if self.marked_pos < 0:
self.marked_pos = 0
if self.visible_width:
if self.marked_pos < self.offset:
self.offset = self.marked_pos
if self.marked_pos >= self.offset + self.visible_width:
if self.marked_pos == textlen:
self.offset = self.marked_pos - self.visible_width
else:
self.offset = self.marked_pos - self.visible_width + 1
if self.offset > 0 and self.offset + self.visible_width > textlen:
self.offset = max(0, len - self.visible_width)
def insertChar(self, ch, pos, owr):
if owr or self.overwrite:
self.text = self.text[0:pos] + ch + self.text[pos + 1:]
elif self.fixed_size:
self.text = self.text[0:pos] + ch + self.text[pos:-1]
else:
self.text = self.text[0:pos] + ch + self.text[pos:]
def deleteChar(self, pos):
if not self.fixed_size:
self.text = self.text[0:pos] + self.text[pos + 1:]
elif self.overwrite:
self.text = self.text[0:pos] + " " + self.text[pos + 1:]
else:
self.text = self.text[0:pos] + self.text[pos + 1:] + " "
def deleteAllChars(self):
if self.fixed_size:
self.text = " " * len(self.text)
else:
self.text = ""
self.marked_pos = 0
def handleKey(self, key):
# this will no change anything on the value itself
# so we can handle it here in gui element
if key == KEY_DELETE:
self.timeout()
if self.allmarked:
self.deleteAllChars()
self.allmarked = False
else:
self.deleteChar(self.marked_pos)
if self.fixed_size and self.overwrite:
self.marked_pos += 1
elif key == KEY_BACKSPACE:
self.timeout()
if self.allmarked:
self.deleteAllChars()
self.allmarked = False
elif self.marked_pos > 0:
self.deleteChar(self.marked_pos-1)
if not self.fixed_size and self.offset > 0:
self.offset -= 1
self.marked_pos -= 1
elif key == KEY_LEFT:
self.timeout()
if self.allmarked:
self.marked_pos = len(self.text)
self.allmarked = False
else:
self.marked_pos -= 1
elif key == KEY_RIGHT:
self.timeout()
if self.allmarked:
self.marked_pos = 0
self.allmarked = False
else:
self.marked_pos += 1
elif key == KEY_HOME:
self.timeout()
self.allmarked = False
self.marked_pos = 0
elif key == KEY_END:
self.timeout()
self.allmarked = False
self.marked_pos = len(self.text)
elif key == KEY_TOGGLEOW:
self.timeout()
self.overwrite = not self.overwrite
elif key == KEY_ASCII:
self.timeout()
newChar = unichr(getPrevAsciiCode())
if not self.useableChars or newChar in self.useableChars:
if self.allmarked:
self.deleteAllChars()
self.allmarked = False
self.insertChar(newChar, self.marked_pos, False)
self.marked_pos += 1
elif key in KEY_NUMBERS:
owr = self.lastKey == getKeyNumber(key)
newChar = self.getKey(getKeyNumber(key))
if self.allmarked:
self.deleteAllChars()
self.allmarked = False
self.insertChar(newChar, self.marked_pos, owr)
elif key == KEY_TIMEOUT:
self.timeout()
if self.help_window:
self.help_window.update(self)
return
if self.help_window:
self.help_window.update(self)
self.validateMarker()
self.changed()
def nextFunc(self):
self.marked_pos += 1
self.validateMarker()
self.changed()
def getValue(self):
try:
return self.text.encode("utf-8")
except UnicodeDecodeError:
print "Broken UTF8!"
return self.text
def setValue(self, val):
try:
self.text = val.decode("utf-8")
except UnicodeDecodeError:
self.text = val.decode("utf-8", "ignore")
print "Broken UTF8!"
value = property(getValue, setValue)
_value = property(getValue, setValue)
def getText(self):
return self.text.encode("utf-8")
def getMulti(self, selected):
if self.visible_width:
if self.allmarked:
mark = range(0, min(self.visible_width, len(self.text)))
else:
mark = [self.marked_pos-self.offset]
return "mtext"[1-selected:], self.text[self.offset:self.offset+self.visible_width].encode("utf-8")+" ", mark
else:
if self.allmarked:
mark = range(0, len(self.text))
else:
mark = [self.marked_pos]
return "mtext"[1-selected:], self.text.encode("utf-8")+" ", mark
def onSelect(self, session):
self.allmarked = (self.value != "")
if session is not None:
from Screens.NumericalTextInputHelpDialog import NumericalTextInputHelpDialog
self.help_window = session.instantiateDialog(NumericalTextInputHelpDialog, self)
self.help_window.setAnimationMode(0)
self.help_window.show()
def onDeselect(self, session):
self.marked_pos = 0
self.offset = 0
if self.help_window:
session.deleteDialog(self.help_window)
self.help_window = None
if not self.last_value == self.value:
self.changedFinal()
self.last_value = self.value
def getHTML(self, id):
return '<input type="text" name="' + id + '" value="' + self.value + '" /><br>\n'
def unsafeAssign(self, value):
self.value = str(value)
class ConfigPassword(ConfigText):
def __init__(self, default = "", fixed_size = False, visible_width = False, censor = "*"):
ConfigText.__init__(self, default = default, fixed_size = fixed_size, visible_width = visible_width)
self.censor_char = censor
self.hidden = True
def getMulti(self, selected):
mtext, text, mark = ConfigText.getMulti(self, selected)
if self.hidden:
text = len(text) * self.censor_char
return mtext, text, mark
def onSelect(self, session):
ConfigText.onSelect(self, session)
self.hidden = False
def onDeselect(self, session):
ConfigText.onDeselect(self, session)
self.hidden = True
# lets the user select between [min, min+stepwidth, min+(stepwidth*2)..., maxval] with maxval <= max depending
# on the stepwidth
# min, max, stepwidth, default are int values
# wraparound: pressing RIGHT key at max value brings you to min value and vice versa if set to True
class ConfigSelectionNumber(ConfigSelection):
def __init__(self, min, max, stepwidth, default = None, wraparound = False):
self.wraparound = wraparound
if default is None:
default = min
default = str(default)
choices = []
step = min
while step <= max:
choices.append(str(step))
step += stepwidth
ConfigSelection.__init__(self, choices, default)
def getValue(self):
return int(ConfigSelection.getValue(self))
def setValue(self, val):
ConfigSelection.setValue(self, str(val))
value = property(getValue, setValue)
def getIndex(self):
return self.choices.index(self.value)
index = property(getIndex)
def isChanged(self):
sv = self.saved_value
strv = str(self.tostring(self.value))
if sv is None and strv == str(self.default):
return False
return strv != str(sv)
def handleKey(self, key):
if not self.wraparound:
if key == KEY_RIGHT:
if len(self.choices) == (self.choices.index(str(self.value)) + 1):
return
if key == KEY_LEFT:
if self.choices.index(str(self.value)) == 0:
return
nchoices = len(self.choices)
if nchoices > 1:
i = self.choices.index(str(self.value))
if key == KEY_LEFT:
self.value = self.choices[(i + nchoices - 1) % nchoices]
elif key == KEY_RIGHT:
self.value = self.choices[(i + 1) % nchoices]
elif key == KEY_HOME:
self.value = self.choices[0]
elif key == KEY_END:
self.value = self.choices[nchoices - 1]
class ConfigNumber(ConfigText):
def __init__(self, default = 0):
ConfigText.__init__(self, str(default), fixed_size = False)
def getValue(self):
try:
return int(self.text)
except ValueError:
if self.text == "true":
self.text = "1"
else:
self.text = str(default)
return int(self.text)
def setValue(self, val):
self.text = str(val)
value = property(getValue, setValue)
_value = property(getValue, setValue)
def isChanged(self):
sv = self.saved_value
strv = self.tostring(self.value)
if sv is None and strv == self.default:
return False
return strv != sv
def conform(self):
pos = len(self.text) - self.marked_pos
self.text = self.text.lstrip("0")
if self.text == "":
self.text = "0"
if pos > len(self.text):
self.marked_pos = 0
else:
self.marked_pos = len(self.text) - pos
def handleKey(self, key):
if key in KEY_NUMBERS or key == KEY_ASCII:
if key == KEY_ASCII:
ascii = getPrevAsciiCode()
if not (48 <= ascii <= 57):
return
else:
ascii = getKeyNumber(key) + 48
newChar = unichr(ascii)
if self.allmarked:
self.deleteAllChars()
self.allmarked = False
self.insertChar(newChar, self.marked_pos, False)
self.marked_pos += 1
else:
ConfigText.handleKey(self, key)
self.conform()
def onSelect(self, session):
self.allmarked = (self.value != "")
def onDeselect(self, session):
self.marked_pos = 0
self.offset = 0
if not self.last_value == self.value:
self.changedFinal()
self.last_value = self.value
class ConfigSearchText(ConfigText):
def __init__(self, default = "", fixed_size = False, visible_width = False):
ConfigText.__init__(self, default = default, fixed_size = fixed_size, visible_width = visible_width)
NumericalTextInput.__init__(self, nextFunc = self.nextFunc, handleTimeout = False, search = True)
class ConfigDirectory(ConfigText):
def __init__(self, default="", visible_width=60):
ConfigText.__init__(self, default, fixed_size = True, visible_width = visible_width)
def handleKey(self, key):
pass
def getValue(self):
if self.text == "":
return None
else:
return ConfigText.getValue(self)
def setValue(self, val):
if val is None:
val = ""
ConfigText.setValue(self, val)
def getMulti(self, selected):
if self.text == "":
return "mtext"[1-selected:], _("List of storage devices"), range(0)
else:
return ConfigText.getMulti(self, selected)
def onSelect(self, session):
self.allmarked = (self.value != "")
# a slider.
class ConfigSlider(ConfigElement):
def __init__(self, default = 0, increment = 1, limits = (0, 100)):
ConfigElement.__init__(self)
self.value = self.last_value = self.default = default
self.min = limits[0]
self.max = limits[1]
self.increment = increment
def checkValues(self, value = None):
if value is None:
value = self.value
if value < self.min:
value = self.min
elif value > self.max:
value = self.max
if self.value != value: #avoid call of setter if value not changed
self.value = value
def handleKey(self, key):
if key == KEY_LEFT:
tmp = self.value - self.increment
elif key == KEY_RIGHT:
tmp = self.value + self.increment
elif key == KEY_HOME:
self.value = self.min
return
elif key == KEY_END:
self.value = self.max
return
else:
return
self.checkValues(tmp)
def getText(self):
return "%d / %d" % (self.value, self.max)
def getMulti(self, selected):
self.checkValues()
return "slider", self.value, self.max
def fromstring(self, value):
return int(value)
# a satlist. in fact, it's a ConfigSelection.
class ConfigSatlist(ConfigSelection):
def __init__(self, list, default = None):
if default is not None:
default = str(default)
ConfigSelection.__init__(self, choices = [(str(orbpos), desc) for (orbpos, desc, flags) in list], default = default)
def getOrbitalPosition(self):
if self.value == "":
return None
return int(self.value)
orbital_position = property(getOrbitalPosition)
class ConfigSet(ConfigElement):
def __init__(self, choices, default=None):
if not default: default = []
ConfigElement.__init__(self)
if isinstance(choices, list):
choices.sort()
self.choices = choicesList(choices, choicesList.LIST_TYPE_LIST)
else:
assert False, "ConfigSet choices must be a list!"
if default is None:
default = []
self.pos = -1
default.sort()
self.last_value = self.default = default
self.value = default[:]
def toggleChoice(self, choice):
value = self.value
if choice in value:
value.remove(choice)
else:
value.append(choice)
value.sort()
self.changed()
def handleKey(self, key):
if key in KEY_NUMBERS + [KEY_DELETE, KEY_BACKSPACE]:
if self.pos != -1:
self.toggleChoice(self.choices[self.pos])
elif key == KEY_LEFT:
if self.pos < 0:
self.pos = len(self.choices)-1
else:
self.pos -= 1
elif key == KEY_RIGHT:
if self.pos >= len(self.choices)-1:
self.pos = -1
else:
self.pos += 1
elif key in (KEY_HOME, KEY_END):
self.pos = -1
def genString(self, lst):
res = ""
for x in lst:
res += self.description[x]+" "
return res
def getText(self):
return self.genString(self.value)
def getMulti(self, selected):
if not selected or self.pos == -1:
return "text", self.genString(self.value)
else:
tmp = self.value[:]
ch = self.choices[self.pos]
mem = ch in self.value
if not mem:
tmp.append(ch)
tmp.sort()
ind = tmp.index(ch)
val1 = self.genString(tmp[:ind])
val2 = " "+self.genString(tmp[ind+1:])
if mem:
chstr = " "+self.description[ch]+" "
else:
chstr = "("+self.description[ch]+")"
len_val1 = len(val1)
return "mtext", val1+chstr+val2, range(len_val1, len_val1 + len(chstr))
def onDeselect(self, session):
self.pos = -1
if not self.last_value == self.value:
self.changedFinal()
self.last_value = self.value[:]
def tostring(self, value):
return str(value)
def fromstring(self, val):
return eval(val)
description = property(lambda self: descriptionList(self.choices.choices, choicesList.LIST_TYPE_LIST))
class ConfigDictionarySet(ConfigElement):
def __init__(self, default = {}):
ConfigElement.__init__(self)
self.default = default
self.dirs = {}
self.value = self.default
def getKeys(self):
return self.dir_pathes
def setValue(self, value):
if isinstance(value, dict):
self.dirs = value
self.changed()
def getValue(self):
return self.dirs
value = property(getValue, setValue)
def tostring(self, value):
return str(value)
def fromstring(self, val):
return eval(val)
def load(self):
sv = self.saved_value
if sv is None:
tmp = self.default
else:
tmp = self.fromstring(sv)
self.dirs = tmp
def changeConfigValue(self, value, config_key, config_value):
if isinstance(value, str) and isinstance(config_key, str):
if value in self.dirs:
self.dirs[value][config_key] = config_value
else:
self.dirs[value] = {config_key : config_value}
self.changed()
def getConfigValue(self, value, config_key):
if isinstance(value, str) and isinstance(config_key, str):
if value in self.dirs and config_key in self.dirs[value]:
return self.dirs[value][config_key]
return None
def removeConfigValue(self, value, config_key):
if isinstance(value, str) and isinstance(config_key, str):
if value in self.dirs and config_key in self.dirs[value]:
try:
del self.dirs[value][config_key]
except KeyError:
pass
self.changed()
def save(self):
del_keys = []
for key in self.dirs:
if not len(self.dirs[key]):
del_keys.append(key)
for del_key in del_keys:
try:
del self.dirs[del_key]
except KeyError:
pass
self.changed()
self.saved_value = self.tostring(self.dirs)
class ConfigLocations(ConfigElement):
def __init__(self, default=None, visible_width=False):
if not default: default = []
ConfigElement.__init__(self)
self.visible_width = visible_width
self.pos = -1
self.default = default
self.locations = []
self.mountpoints = []
self.value = default[:]
def setValue(self, value):
locations = self.locations
loc = [x[0] for x in locations if x[3]]
add = [x for x in value if not x in loc]
diff = add + [x for x in loc if not x in value]
locations = [x for x in locations if not x[0] in diff] + [[x, self.getMountpoint(x), True, True] for x in add]
#locations.sort(key = lambda x: x[0])
self.locations = locations
self.changed()
def getValue(self):
self.checkChangedMountpoints()
locations = self.locations
for x in locations:
x[3] = x[2]
return [x[0] for x in locations if x[3]]
value = property(getValue, setValue)
def tostring(self, value):
return str(value)
def fromstring(self, val):
return eval(val)
def load(self):
sv = self.saved_value
if sv is None:
tmp = self.default
else:
tmp = self.fromstring(sv)
locations = [[x, None, False, False] for x in tmp]
self.refreshMountpoints()
for x in locations:
if fileExists(x[0]):
x[1] = self.getMountpoint(x[0])
x[2] = True
self.locations = locations
def save(self):
locations = self.locations
if self.save_disabled or not locations:
self.saved_value = None
else:
self.saved_value = self.tostring([x[0] for x in locations])
def isChanged(self):
sv = self.saved_value
locations = self.locations
if val is None and not locations:
return False
return self.tostring([x[0] for x in locations]) != sv
def addedMount(self, mp):
for x in self.locations:
if x[1] == mp:
x[2] = True
elif x[1] is None and fileExists(x[0]):
x[1] = self.getMountpoint(x[0])
x[2] = True
def removedMount(self, mp):
for x in self.locations:
if x[1] == mp:
x[2] = False
def refreshMountpoints(self):
self.mountpoints = [p.mountpoint for p in harddiskmanager.getMountedPartitions() if p.mountpoint != "/"]
self.mountpoints.sort(key = lambda x: -len(x))
def checkChangedMountpoints(self):
oldmounts = self.mountpoints
self.refreshMountpoints()
newmounts = self.mountpoints
if oldmounts == newmounts:
return
for x in oldmounts:
if not x in newmounts:
self.removedMount(x)
for x in newmounts:
if not x in oldmounts:
self.addedMount(x)
def getMountpoint(self, file):
file = os_path.realpath(file)+"/"
for m in self.mountpoints:
if file.startswith(m):
return m
return None
def handleKey(self, key):
if key == KEY_LEFT:
self.pos -= 1
if self.pos < -1:
self.pos = len(self.value)-1
elif key == KEY_RIGHT:
self.pos += 1
if self.pos >= len(self.value):
self.pos = -1
elif key in (KEY_HOME, KEY_END):
self.pos = -1
def getText(self):
return " ".join(self.value)
def getMulti(self, selected):
if not selected:
valstr = " ".join(self.value)
if self.visible_width and len(valstr) > self.visible_width:
return "text", valstr[0:self.visible_width]
else:
return "text", valstr
else:
i = 0
valstr = ""
ind1 = 0
ind2 = 0
for val in self.value:
if i == self.pos:
ind1 = len(valstr)
valstr += str(val)+" "
if i == self.pos:
ind2 = len(valstr)
i += 1
if self.visible_width and len(valstr) > self.visible_width:
if ind1+1 < self.visible_width/2:
off = 0
else:
off = min(ind1+1-self.visible_width/2, len(valstr)-self.visible_width)
return "mtext", valstr[off:off+self.visible_width], range(ind1-off,ind2-off)
else:
return "mtext", valstr, range(ind1,ind2)
def onDeselect(self, session):
self.pos = -1
# nothing.
class ConfigNothing(ConfigSelection):
def __init__(self):
ConfigSelection.__init__(self, choices = [("","")])
# until here, 'saved_value' always had to be a *string*.
# now, in ConfigSubsection, and only there, saved_value
# is a dict, essentially forming a tree.
#
# config.foo.bar=True
# config.foobar=False
#
# turns into:
# config.saved_value == {"foo": {"bar": "True"}, "foobar": "False"}
#
class ConfigSubsectionContent(object):
pass
# we store a backup of the loaded configuration
# data in self.stored_values, to be able to deploy
# them when a new config element will be added,
# so non-default values are instantly available
# A list, for example:
# config.dipswitches = ConfigSubList()
# config.dipswitches.append(ConfigYesNo())
# config.dipswitches.append(ConfigYesNo())
# config.dipswitches.append(ConfigYesNo())
class ConfigSubList(list, object):
def __init__(self):
list.__init__(self)
self.stored_values = {}
def save(self):
for x in self:
x.save()
def load(self):
for x in self:
x.load()
def getSavedValue(self):
res = { }
for i, val in enumerate(self):
sv = val.saved_value
if sv is not None:
res[str(i)] = sv
return res
def setSavedValue(self, values):
self.stored_values = dict(values)
for (key, val) in self.stored_values.items():
if int(key) < len(self):
self[int(key)].saved_value = val
saved_value = property(getSavedValue, setSavedValue)
def append(self, item):
i = str(len(self))
list.append(self, item)
if i in self.stored_values:
item.saved_value = self.stored_values[i]
item.load()
def dict(self):
return dict([(str(index), value) for index, value in enumerate(self)])
# same as ConfigSubList, just as a dictionary.
# care must be taken that the 'key' has a proper
# str() method, because it will be used in the config
# file.
class ConfigSubDict(dict, object):
def __init__(self):
dict.__init__(self)
self.stored_values = {}
def save(self):
for x in self.values():
x.save()
def load(self):
for x in self.values():
x.load()
def getSavedValue(self):
res = {}
for (key, val) in self.items():
sv = val.saved_value
if sv is not None:
res[str(key)] = sv
return res
def setSavedValue(self, values):
self.stored_values = dict(values)
for (key, val) in self.items():
if str(key) in self.stored_values:
val.saved_value = self.stored_values[str(key)]
saved_value = property(getSavedValue, setSavedValue)
def __setitem__(self, key, item):
dict.__setitem__(self, key, item)
if str(key) in self.stored_values:
item.saved_value = self.stored_values[str(key)]
item.load()
def dict(self):
return self
# Like the classes above, just with a more "native"
# syntax.
#
# some evil stuff must be done to allow instant
# loading of added elements. this is why this class
# is so complex.
#
# we need the 'content' because we overwrite
# __setattr__.
# If you don't understand this, try adding
# __setattr__ to a usual exisiting class and you will.
class ConfigSubsection(object):
def __init__(self):
self.__dict__["content"] = ConfigSubsectionContent()
self.content.items = { }
self.content.stored_values = { }
def __setattr__(self, name, value):
if name == "saved_value":
return self.setSavedValue(value)
assert isinstance(value, (ConfigSubsection, ConfigElement, ConfigSubList, ConfigSubDict)), "ConfigSubsections can only store ConfigSubsections, ConfigSubLists, ConfigSubDicts or ConfigElements"
content = self.content
content.items[name] = value
x = content.stored_values.get(name, None)
if x is not None:
#print "ok, now we have a new item,", name, "and have the following value for it:", x
value.saved_value = x
value.load()
def __getattr__(self, name):
return self.content.items[name]
def getSavedValue(self):
res = self.content.stored_values
for (key, val) in self.content.items.items():
sv = val.saved_value
if sv is not None:
res[key] = sv
elif key in res:
del res[key]
return res
def setSavedValue(self, values):
values = dict(values)
self.content.stored_values = values
for (key, val) in self.content.items.items():
value = values.get(key, None)
if value is not None:
val.saved_value = value
saved_value = property(getSavedValue, setSavedValue)
def save(self):
for x in self.content.items.values():
x.save()
def load(self):
for x in self.content.items.values():
x.load()
def dict(self):
return self.content.items
# the root config object, which also can "pickle" (=serialize)
# down the whole config tree.
#
# we try to keep non-existing config entries, to apply them whenever
# a new config entry is added to a subsection
# also, non-existing config entries will be saved, so they won't be
# lost when a config entry disappears.
class Config(ConfigSubsection):
def __init__(self):
ConfigSubsection.__init__(self)
def pickle_this(self, prefix, topickle, result):
for (key, val) in sorted(topickle.items(), key=lambda x: int(x[0]) if x[0].isdigit() else x[0].lower()):
name = '.'.join((prefix, key))
if isinstance(val, dict):
self.pickle_this(name, val, result)
elif isinstance(val, tuple):
result += [name, '=', str(val[0]), '\n']
else:
result += [name, '=', str(val), '\n']
def pickle(self):
result = []
self.pickle_this("config", self.saved_value, result)
return ''.join(result)
def unpickle(self, lines, base_file=True):
tree = { }
configbase = tree.setdefault("config", {})
for l in lines:
if not l or l[0] == '#':
continue
result = l.split('=', 1)
if len(result) != 2:
continue
(name, val) = result
val = val.strip()
#convert old settings
if l.startswith("config.Nims."):
tmp = name.split('.')
if tmp[3] == "cable":
tmp[3] = "dvbc"
elif tmp[3].startswith ("cable"):
tmp[3] = "dvbc." + tmp[3]
elif tmp[3].startswith("terrestrial"):
tmp[3] = "dvbt." + tmp[3]
else:
if tmp[3] not in ('dvbs', 'dvbc', 'dvbt', 'multiType'):
tmp[3] = "dvbs." + tmp[3]
name =".".join(tmp)
names = name.split('.')
base = configbase
for n in names[1:-1]:
base = base.setdefault(n, {})
base[names[-1]] = val
if not base_file: # not the initial config file..
#update config.x.y.value when exist
try:
configEntry = eval(name)
if configEntry is not None:
configEntry.value = val
except (SyntaxError, KeyError):
pass
# we inherit from ConfigSubsection, so ...
#object.__setattr__(self, "saved_value", tree["config"])
if "config" in tree:
self.setSavedValue(tree["config"])
def saveToFile(self, filename):
text = self.pickle()
try:
import os
f = open(filename + ".writing", "w")
f.write(text)
f.flush()
os.fsync(f.fileno())
f.close()
os.rename(filename + ".writing", filename)
except IOError:
print "Config: Couldn't write %s" % filename
def loadFromFile(self, filename, base_file=True):
self.unpickle(open(filename, "r"), base_file)
config = Config()
config.misc = ConfigSubsection()
class ConfigFile:
def __init__(self):
pass
CONFIG_FILE = resolveFilename(SCOPE_CONFIG, "settings")
def load(self):
try:
config.loadFromFile(self.CONFIG_FILE, True)
except IOError, e:
print "unable to load config (%s), assuming defaults..." % str(e)
def save(self):
# config.save()
config.saveToFile(self.CONFIG_FILE)
def __resolveValue(self, pickles, cmap):
key = pickles[0]
if cmap.has_key(key):
if len(pickles) > 1:
return self.__resolveValue(pickles[1:], cmap[key].dict())
else:
return str(cmap[key].value)
return None
def getResolvedKey(self, key):
names = key.split('.')
if len(names) > 1:
if names[0] == "config":
ret=self.__resolveValue(names[1:], config.content.items)
if ret and len(ret):
return ret
print "getResolvedKey", key, "failed !! (Typo??)"
return ""
def NoSave(element):
element.disableSave()
return element
configfile = ConfigFile()
configfile.load()
def getConfigListEntry(*args):
assert len(args) > 1, "getConfigListEntry needs a minimum of two arguments (descr, configElement)"
return args
def updateConfigElement(element, newelement):
newelement.value = element.value
return newelement
#def _(x):
# return x
#
#config.bla = ConfigSubsection()
#config.bla.test = ConfigYesNo()
#config.nim = ConfigSubList()
#config.nim.append(ConfigSubsection())
#config.nim[0].bla = ConfigYesNo()
#config.nim.append(ConfigSubsection())
#config.nim[1].bla = ConfigYesNo()
#config.nim[1].blub = ConfigYesNo()
#config.arg = ConfigSubDict()
#config.arg["Hello"] = ConfigYesNo()
#
#config.arg["Hello"].handleKey(KEY_RIGHT)
#config.arg["Hello"].handleKey(KEY_RIGHT)
#
##config.saved_value
#
##configfile.save()
#config.save()
#print config.pickle()
cec_limits = [(0,15),(0,15),(0,15),(0,15)]
class ConfigCECAddress(ConfigSequence):
def __init__(self, default, auto_jump = False):
ConfigSequence.__init__(self, seperator = ".", limits = cec_limits, default = default)
self.block_len = [len(str(x[1])) for x in self.limits]
self.marked_block = 0
self.overwrite = True
self.auto_jump = auto_jump
def handleKey(self, key):
if key == KEY_LEFT:
if self.marked_block > 0:
self.marked_block -= 1
self.overwrite = True
elif key == KEY_RIGHT:
if self.marked_block < len(self.limits)-1:
self.marked_block += 1
self.overwrite = True
elif key == KEY_HOME:
self.marked_block = 0
self.overwrite = True
elif key == KEY_END:
self.marked_block = len(self.limits)-1
self.overwrite = True
elif key in KEY_NUMBERS or key == KEY_ASCII:
if key == KEY_ASCII:
code = getPrevAsciiCode()
if code < 48 or code > 57:
return
number = code - 48
else:
number = getKeyNumber(key)
oldvalue = self._value[self.marked_block]
if self.overwrite:
self._value[self.marked_block] = number
self.overwrite = False
else:
oldvalue *= 10
newvalue = oldvalue + number
if self.auto_jump and newvalue > self.limits[self.marked_block][1] and self.marked_block < len(self.limits)-1:
self.handleKey(KEY_RIGHT)
self.handleKey(key)
return
else:
self._value[self.marked_block] = newvalue
if len(str(self._value[self.marked_block])) >= self.block_len[self.marked_block]:
self.handleKey(KEY_RIGHT)
self.validate()
self.changed()
def genText(self):
value = ""
block_strlen = []
for i in self._value:
block_strlen.append(len(str(i)))
if value:
value += self.seperator
value += str(i)
leftPos = sum(block_strlen[:self.marked_block])+self.marked_block
rightPos = sum(block_strlen[:(self.marked_block+1)])+self.marked_block
mBlock = range(leftPos, rightPos)
return value, mBlock
def getMulti(self, selected):
(value, mBlock) = self.genText()
if self.enabled:
return "mtext"[1-selected:], value, mBlock
else:
return "text", value
def getHTML(self, id):
# we definitely don't want leading zeros
return '.'.join(["%d" % d for d in self.value])
class ConfigAction(ConfigElement):
def __init__(self, action, *args):
ConfigElement.__init__(self)
self.value = "(OK)"
self.action = action
self.actionargs = args
def handleKey(self, key):
if (key == KEY_OK):
self.action(*self.actionargs)
def getMulti(self, dummy):
pass
| gpl-2.0 | -416,073,081,692,490,400 | 25.937354 | 195 | 0.664243 | false |
Magda-M/general-tools | fq.split.py | 1 | 2084 | """
SOURCE: https://gist.github.com/brentp/6625544
split a single fastq file in to random, non-overlapping subsets
arguments:
+ fastq file
+ number of splits
+ number of reps
e.g.:
python fq.split.py input.fastq 3 4
will create 12 new files in 4 sets of 3. Each
set of 3 will contain all of the original records.
"""
import gzip
import random
from itertools import islice, izip
xopen = lambda fq: gzip.open(fq) if fq.endswith('.gz') else open(fq)
def fqiter(fq, lines_per_read):
with xopen(fq) as fh:
fqclean = (x.strip("\r\n") for x in fh if x.strip())
while True:
rec = [x for x in islice(fqclean, lines_per_read)]
if not rec: raise StopIteration
assert all(rec) and len(rec) == lines_per_read
yield rec
def fqsplit(fq, nchunks, nreps, paired, prefix=None):
if paired:
lines_per_read = 8
else:
lines_per_read = 4
if prefix == None: prefix = fq + ".split"
prefix += "chunk-%i.rep-%i.fq"
fq_size = sum(1 for x in xopen(fq) if len(x.strip("\r\n"))>0)
assert fq_size % lines_per_read == 0
fq_size /= lines_per_read # number of records
print >>sys.stderr, "num reads/read pairs:", fq_size
print >>sys.stderr, "num chunks to split into:", nchunks
if fq_size % nchunks == 0 :
chunk_size = fq_size // nchunks
else:
chunk_size = 1 + (fq_size) // nchunks
print >>sys.stderr, "chunk_size:", chunk_size
for rep in range(1, nreps + 1):
files = [open(prefix % (c, rep), 'w') for c in range(1, nchunks + 1)]
ints = range(fq_size)
random.shuffle(ints)
for i, fqr in izip(ints, fqiter(fq, lines_per_read)):
chunk, chunk_i = divmod(i, chunk_size)
print >>files[chunk], "\n".join(fqr)
[f.close() for f in files]
if __name__ == "__main__":
import sys
fq = sys.argv[1]
nchunks = int(sys.argv[2])
nreps = int(sys.argv[3])
paired = bool(int(sys.argv[4]))
print paired# 0 = single, 1 = paired end reads
fqsplit(fq, nchunks, nreps, paired) | gpl-3.0 | 607,869,430,377,086,200 | 27.561644 | 77 | 0.597409 | false |
karlssonper/gpuip | python/gpuip.py | 1 | 6372 | #!/usr/bin/env python
import utils
import sys
import signal
import os
try:
import argparse
parsermodule = argparse.ArgumentParser
except:
import optparse
parsermodule = optparse.OptionParser
parsermodule.add_argument = parsermodule.add_option
def getCommandLineArguments():
# Command line arguments
desc = "Framework for Image Processing on the GPU"
parser = parsermodule("gpuip", description=desc)
parser.add_argument("-f", "--file",
help="Image Processing file *.ip")
parser.add_argument("-p", "--param",
action="append",
nargs = 3,
metavar = ("kernel", "param", "value"),
help="Change value of a parameter.")
parser.add_argument("-i", "--inbuffer",
action="append",
nargs = 2,
metavar = ("buffer", "path"),
help = "Set input image to a buffer")
parser.add_argument("-o", "--outbuffer",
action="append",
nargs = 2,
metavar = ("buffer", "path"),
help = "Set output image to a buffer")
parser.add_argument("-v","--verbose",
action="store_true",
help="Outputs information")
parser.add_argument("--timestamp",
action="store_true",
help="Add timestamp in log output")
parser.add_argument("--nogui",
action="store_true",
help="Command line version")
if parsermodule.__name__ == "ArgumentParser":
return parser.parse_args()
else:
return parser.parse_args()[0]
def terminate(msg):
print msg
sys.exit(1)
def getSettings(args):
import settings
if not args.file or not os.path.isfile(args.file):
return None
ipsettings = settings.Settings()
ipsettings.read(args.file)
# Change parameter values
if args.param:
for p in args.param:
kernelName, paramName, value = p
kernel = ipsettings.getKernel(kernelName)
if not kernel:
terminate("gpuip error: No kernel %s found." % kernelName)
param = kernel.getParam(paramName)
if param:
param.setValue(utils.safeEval(value))
else:
terminate("gpuip error: No param %s found in kernel %s." \
% (paramName, kernelName))
# Change input buffers
if args.inbuffer:
for inb in args.inbuffer:
bufferName, path = inb[0], inb[1]
buffer = ipsettings.getBuffer(bufferName)
if buffer:
buffer.input = path
if not os.path.isfile(buffer.input):
raise IOError("No such file: '%s'" % buffer.input)
else:
terminate("gpuip error: No buffer %s found." % buffer)
# Change output buffers
if args.outbuffer:
for outb in args.outbuffer:
bufferName, path = outb[0], outb[1]
buffer = ipsettings.getBuffer(bufferName)
if buffer:
buffer.output = path
os.makedirs(os.path.dirname(os.path.realpath(path)))
else:
terminate("gpuip error: No buffer %s found." % bufferName)
return ipsettings
def runGUI(ippath, ipsettings):
# Run GUI version
from PySide import QtGui
import mainwindow
# Makes it possible to close program with ctrl+c in a terminal
signal.signal(signal.SIGINT, signal.SIG_DFL)
app = QtGui.QApplication(sys.argv)
app.setStyle("plastique")
mainwindow = mainwindow.MainWindow(path = ippath, settings = ipsettings)
mainwindow.show()
sys.exit(app.exec_())
def runCommandLine(ipsettings, verbose):
# Can't run non-gui version if there's no *.ip file
if not ipsettings:
err = "Must specify an existing *.ip file in the command-line version\n"
err += "example: \n"
err += " gpuip --nogui smooth.ip"""
terminate(err)
def check_error(err):
if err:
terminate(err)
def log(text, stopwatch = None, time = True):
time = time and args.timestamp
if verbose:
stopwatchStr = str(stopwatch) if stopwatch else ""
timeStr = utils.getTimeStr() if time else ""
print timeStr + text + " " + stopwatchStr
overall_clock = utils.StopWatch()
### 0. Create gpuip items from settings
ip, buffers, kernels = ipsettings.create()
log("Created elements from settings.", overall_clock)
### 1. Build
c = utils.StopWatch()
check_error(ip.Build())
log("Building kernels [%s]." % [k.name for k in kernels], c)
### 2. Import data from images
c = utils.StopWatch()
for b in ipsettings.buffers:
if b.input:
log("Importing data from %s to %s" %(b.input, b.name))
check_error(buffers[b.name].Read(b.input, utils.getNumCores()))
log("Importing data done.", c)
### 3. Allocate and transfer data to GPU
c = utils.StopWatch()
width, height = utils.allocateBufferData(buffers)
ip.SetDimensions(width, height)
check_error(ip.Allocate())
log("Allocating done.", c)
c = utils.StopWatch()
for b in ipsettings.buffers:
if b.input:
check_error(ip.WriteBufferToGPU(buffers[b.name]))
log("Transfering data to GPU done.", c)
### 4. Process
c = utils.StopWatch()
check_error(ip.Run())
log("Processing done.", c)
### 5. Export buffers to images
c = utils.StopWatch()
for b in ipsettings.buffers:
if b.output:
log("Exporting data from %s to %s" %(b.name, b.output))
check_error(ip.ReadBufferFromGPU(buffers[b.name]))
check_error(buffers[b.name].Write(b.output,utils.getNumCores()))
log("Exporting data done.", c)
log("\nAll steps done. Total runtime:", overall_clock, time = False)
if __name__ == "__main__":
args = getCommandLineArguments()
ipsettings = getSettings(args)
if args.nogui:
runCommandLine(ipsettings, args.verbose)
else:
runGUI(args.file if ipsettings else None, ipsettings)
| mit | 2,553,237,519,268,901,400 | 32.893617 | 80 | 0.569052 | false |
danielballan/docs | source/_cookbook/csv_writer.py | 1 | 3011 | # -*- coding: utf-8 -*-
"""
========================================
A Minimal CSV writer for data collection
========================================
Problem
-------
Write (a subset of) the data to a CSV file during data collection.
Approach
--------
Write a callback function that integrates Python's built-in csv module with
bluesky.
Example Solution
----------------
"""
###############################################################################
# Boiler plate imports and configuration
import path
import os
import bluesky as bs
import bluesky.plans as bp
import bluesky.callbacks as bc
import csv
from bluesky.examples import motor, det
import matplotlib.pyplot as plt
# Do this if running the example interactively;
# skip it when building the documentation.
import os
if 'BUILDING_DOCS' not in os.environ:
from bluesky.utils import install_qt_kicker # for notebooks, qt -> nb
install_qt_kicker()
plt.ion()
det.exposure_time = .1 # simulate detector exposure time
RE = bs.RunEngine({})
###############################################################################
# Define a callback class which writes out a CSV file
class CSVWriter(bc.CallbackBase):
def __init__(self, fields, fname_format, fpath):
self._path = path.Path(fpath)
os.makedirs(self._path, exist_ok=True)
self._fname_fomat = fname_format
self._fields = fields
self._writer = None
self._fout = None
def close(self):
if self._fout is not None:
self._fout.close()
self._fout = None
self._writer = None
def start(self, doc):
self.close()
fname = self._path / self._fname_fomat.format(**doc)
self._fout = open(fname, 'xt')
self._writer = csv.writer(self._fout)
def descriptor(self, doc):
if self._writer is not None:
self._writer.writerow(self._fields)
def event(self, doc):
data = doc['data']
if self._writer is not None:
self._writer.writerow(data[k] for k in self._fields)
def stop(self, doc):
self.close()
###############################################################################
# Set up some callbacks
def create_cbs():
return [bc.LiveTable([motor, det]), bc.LivePlot('det', 'motor')]
fmt = '{user}_{uid:.6s}.csv'
export_path = '/tmp/export_demo'
csv_writer = CSVWriter(('motor', 'det'), fmt, export_path)
# send all documents to the CSV writer
RE.subscribe('all', csv_writer)
###############################################################################
# run the scan
uid, = RE(bp.scan([det], motor, -5, 5, 11),
create_cbs(), user='tcaswell')
###############################################################################
# check file
fname = os.path.join(export_path,
'{user}_{uid:.6s}.csv'.format(user='tcaswell', uid=uid))
print("--- {} ---".format(fname))
with open(fname, 'r') as fin:
for ln in fin:
print(ln.strip())
| bsd-2-clause | 6,292,943,657,254,038,000 | 24.091667 | 79 | 0.524078 | false |
kcompher/topik | topik/tokenizers.py | 1 | 12129 | from __future__ import absolute_import, print_function
import logging
import itertools
import re
from textblob import TextBlob
import gensim
from gensim.parsing.preprocessing import STOPWORDS
from topik.utils import collocations, entities
# imports used only for doctests
from topik.tests import test_data_path
from topik.readers import read_input
logging.basicConfig(format='%(asctime)s : %(levelname)s : %(message)s',
level=logging.INFO)
def tokenize_simple(text, stopwords=STOPWORDS):
"""A text tokenizer that simply lowercases, matches alphabetic
characters and removes stopwords.
Uses gensim.utils.tokenize and gensim.parsing.preprocessing.STOPWORDS.
Parameters
----------
text: input text to be tokenized
stopwords: words to ignore as noise
>>> id_documents = read_input(
... '{}/test_data_json_stream.json'.format(test_data_path),
... content_field="abstract")
>>> id, doc_text = next(iter(id_documents))
>>> doc_text == str(
... 'Transition metal oxides are being considered as the next generation '
... 'materials in field such as electronics and advanced catalysts; between'
... ' them is Tantalum (V) Oxide; however, there are few reports for the '
... 'synthesis of this material at the nanometer size which could have '
... 'unusual properties. Hence, in this work we present the synthesis of '
... 'Ta2O5 nanorods by sol gel method using DNA as structure directing '
... 'agent, the size of the nanorods was of the order of 40 to 100 nm in '
... 'diameter and several microns in length; this easy method can be useful'
... ' in the preparation of nanomaterials for electronics, biomedical '
... 'applications as well as catalysts.')
True
>>> tokens = tokenize_simple(doc_text)
>>> tokens == [
... u'transition', u'metal', u'oxides', u'considered', u'generation',
... u'materials', u'field', u'electronics', u'advanced', u'catalysts',
... u'tantalum', u'v', u'oxide', u'reports', u'synthesis', u'material',
... u'nanometer', u'size', u'unusual', u'properties', u'work', u'present',
... u'synthesis', u'ta', u'o', u'nanorods', u'sol', u'gel', u'method',
... u'dna', u'structure', u'directing', u'agent', u'size', u'nanorods',
... u'order', u'nm', u'diameter', u'microns', u'length', u'easy', u'method',
... u'useful', u'preparation', u'nanomaterials', u'electronics',
... u'biomedical', u'applications', u'catalysts']
True
"""
return [word for word in gensim.utils.tokenize(text, lower=True)
if word not in stopwords]
def collect_bigrams_and_trigrams(collection, top_n = 10000, min_bigram_freq=50,
min_trigram_freq=20, stopwords=STOPWORDS):
"""collects bigrams and trigrams from collection of documents. Input to collocation tokenizer.
bigrams are pairs of words that recur in the collection.
Parameters
----------
collection: iterable data to examine
top_n: limit results to this many entries
min_bigram_freq: (integer) threshold of when to consider a pair of words as a recognized bigram
min_trigram_freq: (integer) threshold of when to consider a triplet of words as a recognized trigram
stopwords: (iterable) collection of words to ignore in the corpus
>>> raw_data = read_input(
... '{}/test_data_json_stream.json'.format(test_data_path),
... content_field="abstract")
>>> bigrams, trigrams = collect_bigrams_and_trigrams(raw_data, min_bigram_freq=5, min_trigram_freq=3)
>>> bigrams.pattern == str(
... '(free standing|centered cubic|spatial resolution|vapor deposition|wear'
... ' resistance|plastic deformation|electrical conductivity|field magnets|'
... 'transmission electron|ray diffraction|electron microscopy|room '
... 'temperature|diffraction xrd|electron microscope|results indicate|'
... 'scanning electron|doped zno|microscopy tem|polymer matrix|size '
... 'distribution|mechanical properties|grain size|high spatial|particle '
... 'size|high resolution|high field|high strength)')
True
>>> trigrams.pattern == str(
... '(differential scanning calorimetry|face centered cubic|ray '
... 'microanalysis analytical|physical vapor deposition|transmission '
... 'electron microscopy|microanalysis analytical electron|chemical vapor '
... 'deposition|high aspect ratio|analytical electron microscope|ray '
... 'diffraction xrd|high spatial resolution|high field magnets|atomic '
... 'force microscopy|electron microscopy tem|narrow size distribution|'
... 'scanning electron microscopy|building high field|silicon oxide '
... 'nanowires)')
True
"""
# generator of documents, turn each element to its list of words
documents = (_split_words(text, stopwords) for text in collection.get_generator_without_id())
# generator, concatenate (chain) all words into a single sequence, lazily
words = itertools.chain.from_iterable(documents)
bigrams, trigrams = collocations(words, top_n=top_n, min_bigram_freq=min_bigram_freq,
min_trigram_freq=min_trigram_freq)
return bigrams, trigrams
def _split_words(text, stopwords):
"""Split text into a list of single words. Ignore any token in the `stopwords` set.
"""
return [word
for word in gensim.utils.tokenize(text, lower=True)
if word not in stopwords and len(word) > 2]
def tokenize_collocation(text, bigrams, trigrams, stopwords=STOPWORDS):
"""A text tokenizer that includes collocations(bigrams and trigrams).
A collocation is sequence of words or terms
that co-occur more often than would be expected by chance. Bigrams and trigrams must be found from the document
collection a-priori. Use the collect_bigrams_and_trigrams function to do so.
Uses gensim.parsing.preprocessing.STOPWORDS to remove stopwords and nltk.collocations.TrigramCollocationFinder to
find trigrams and bigrams.
Parameters
----------
reader: generator
A generator that yields each of the documents to tokenize. (e.g. topik.readers.iter_document_json_stream)
top_n: int
Number of collocations to retrieve from the stream of words (order by decreasing frequency). Default is 10000
min_bigram_freq: int
Minimum frequency of a bigram in order to retrieve it. Default is 50.
min_trigram_freq: int
Minimum frequency of a trigram in order to retrieve it. Default is 20.
>>> id_documents = read_input('{}/test_data_json_stream.json'.format(test_data_path), content_field="abstract")
>>> bigrams, trigrams = collect_bigrams_and_trigrams(id_documents, min_bigram_freq=2, min_trigram_freq=2)
>>> id, doc_text = next(iter(id_documents))
>>> tokenized_text = tokenize_collocation(doc_text, bigrams, trigrams)
>>> tokenized_text == [
... u'transition_metal', u'oxides', u'considered', u'generation',
... u'materials', u'field', u'electronics', u'advanced', u'catalysts',
... u'tantalum', u'oxide', u'reports', u'synthesis', u'material',
... u'nanometer_size', u'unusual', u'properties', u'work_present',
... u'synthesis', u'nanorods', u'sol', u'gel', u'method', u'dna',
... u'structure', u'directing', u'agent', u'size', u'nanorods', u'order',
... u'diameter', u'microns', u'length', u'easy', u'method', u'useful',
... u'preparation', u'nanomaterials', u'electronics', u'biomedical',
... u'applications', u'catalysts']
True
"""
text = ' '.join(_split_words(text, stopwords))
text = re.sub(trigrams, lambda match: match.group(0).replace(' ', '_'), text)
text = re.sub(bigrams, lambda match: match.group(0).replace(' ', '_'), text)
return text.split()
def find_entities(collection, freq_min=2, freq_max=10000):
return entities(collection, freq_max=freq_max, freq_min=freq_min)
def tokenize_entities(text, entities, stopwords=STOPWORDS):
"""A tokenizer that extracts noun phrases from text.
Requires that you first establish entities
Uses gensim.parsing.preprocessing.STOPWORDS. to remove stopwords and textblob.TextBlob().noun_phrases to find
`noun_phrases`.
Parameters
----------
reader: generator
A generator that yields each of the documents to tokenize. (e.g. topik.readers.iter_document_json_stream)
freq_min: int
Minimum frequency of a noun phrase occurrences in order to retrieve it. Default is 2.
freq_max: int
Maximum frequency of a noun phrase occurrences in order to retrieve it. Default is 10000.
>> id_documents = read_input('{}/test_data_json_stream.json'.format(test_data_path), "abstract")
>> entities = find_entities(id_documents)
>> print('entities: %r' % entities)
>> len(entities)
>> i = iter(id_documents)
>> id, doc_text = next(i)
>> doc_text
>> tokenized_text = tokenize_entities(doc_text, entities)
>> tokenized_text
>> id, doc_text = next(i)
>> doc_text
>> tokenized_text = tokenize_entities(doc_text, entities)
2015-02-04 17:18:55,618 : INFO : collecting entities from <generator object iter_document_json_stream at 0x10eaf0280>
2015-02-04 17:18:55,618 : INFO : at document #0, considering 0 phrases: []...
2015-02-04 17:18:57,363 : INFO : selected 563 entities: [u'simulation examples', u'comparison trials', u'vldb',
u'intelligent optimization algorithm', u'study outcomes', u'ge', u'standard program modules',
u'optimization activity', u'opposite context', u'direct victimization']...
>> tokenized_text
[[u'rapid_solution_phase_chemical_reduction_method', u'inert_gas_protection', u'stable_copper_nanoparticle_colloid',
u'average_particle_size', u'narrow_size_distribution', u'synthesis_route', u'ascorbic_acid', u'natural_vitamin_c',
u'vc', u'copper_salt_precursor', u'general_oxidation_process', u'newborn_nanoparticles', u'xrd', u'uv_vis', u'copper_nanoparticles',
u'excellent_antioxidant_ability', u'ascorbic_acid']]
"""
result = []
for np in TextBlob(text).noun_phrases:
if np not in entities:
# only consider phrases detected in entities (with frequency parameters)
continue
token = '_'.join(part for part in gensim.utils.tokenize(np))
if len(token) < 2 or token in stopwords:
# ignore very short phrases and stop words
continue
result.append(token)
return result
def tokenize_mixed(text, entities, stopwords=STOPWORDS):
"""A text tokenizer that retrieves entities ('noun phrases') first and simple words for the rest of the text.
Parameters
----------
reader: generator
A generator that yields each of the documents to tokenize. (e.g. topik.readers.iter_document_json_stream)
>>> raw_data = read_input('{}/test_data_json_stream.json'.format(test_data_path), content_field="abstract")
>>> entities = find_entities(raw_data)
>>> id, text = next(iter(raw_data))
>>> tokenized_text = tokenize_mixed(text, entities)
"""
result = []
for np in TextBlob(text).noun_phrases:
if ' ' in np and np not in entities:
tokens = [word for word in gensim.utils.tokenize(np, lower=True) if word not in stopwords]
result.extend(tokens)
else:
token = '_'.join(part for part in gensim.utils.tokenize(np) if len(part) > 2)
if len(token) < 2 or token in stopwords:
# ignore very short phrases and stop words
continue
result.append(token)
return result
# Add additional methods here as necessary to expose them to outside consumers.
tokenizer_methods = {"simple": tokenize_simple,
"collocation": tokenize_collocation,
"entities": tokenize_entities,
"mixed": tokenize_mixed
}
| bsd-3-clause | -605,126,569,415,801,200 | 45.471264 | 142 | 0.661885 | false |
cwaldbieser/txcas | txcas/couchdb_ticket_store.py | 1 | 29177 |
# Standard library
from __future__ import print_function
import datetime
from functools import partial
import json
import random
import string
import sys
from textwrap import dedent
import uuid
from xml.sax.saxutils import escape as xml_escape
# Application modules
from txcas.ca_trust import createCustomPolicyFactoryFromPEMs
from txcas.exceptions import (
CASError, InvalidTicket, InvalidService,
NotSSOService, InvalidTicketSpec)
from txcas.http import (
createNonVerifyingHTTPClient, createVerifyingHTTPClient)
from txcas.interface import (
ITicketStore, ITicketStoreFactory,
IServiceManagerAcceptor)
from txcas.settings import get_bool, export_settings_to_dict, load_settings
from txcas.urls import are_urls_equal
from txcas.utils import (
filter_args, get_missing_args, http_status_filter, unwrap_failures)
# External modules
from dateutil.parser import parse as parse_date
import treq
from twisted.internet import defer
from twisted.internet.task import LoopingCall
from twisted.plugin import IPlugin
from twisted.python import log
from twisted.web.http_headers import Headers
from zope.interface import implements
class CouchDBError(Exception):
pass
class CouchDBTicketStoreFactory(object):
implements(IPlugin, ITicketStoreFactory)
tag = "couchdb_ticket_store"
opt_help = dedent('''\
A ticket store that manages all CAS tickets an external
CouchDB database.
Any tickets in the store when the CAS process is stopped
are retained when it is restarted.
Valid options include:
- couch_host
- couch port
- couch_db
- couch_user
- couch_passwd
- use_https
- verify_cert
- ca_cert
- lt_lifespan
- st_lifespan
- pt_lifespan
- tgt_lifespan
- pgt_lifespan
- ticket_size
''')
opt_usage = '''A colon-separated key=value list.'''
def generateTicketStore(self, argstring=""):
scp = load_settings('cas', syspath='/etc/cas')
settings = export_settings_to_dict(scp)
ts_props = settings.get('CAS', {})
ts_settings = settings.get('CouchDB', {})
settings_xlate = {
'host': 'couch_host',
'port': 'couch_port',
'db': 'couch_db',
'user': 'couch_user',
'passwd': 'couch_passwd',
'https': 'use_https',
'debug': '_debug',
}
temp = {}
for k, v in ts_settings.iteritems():
k = settings_xlate.get(k, k)
temp[k] = v
ts_settings = temp
del temp
if argstring.strip() != "":
argdict = dict((x.split('=') for x in argstring.split(':')))
ts_settings.update(argdict)
missing = get_missing_args(
CouchDBTicketStore.__init__, ts_settings, ['self'])
if len(missing) > 0:
sys.stderr.write(
"[ERROR][CouchDBTicketStore] "
"Missing the following settings: %s" % ', '.join(missing))
sys.stderr.write('\n')
sys.exit(1)
props = (
'lt_lifespan', 'st_lifespan', 'pt_lifespan',
'tgt_lifespan', 'pgt_lifespan', 'ticket_size', '_debug')
ts_props = dict((prop, int(ts_props[prop])) for prop in props if prop in ts_props)
filter_args(CouchDBTicketStore.__init__, ts_settings, ['self'])
if 'couch_port' in ts_settings:
ts_settings['couch_port'] = int(ts_settings['couch_port'])
if 'use_https' in ts_settings:
ts_settings['use_https'] = get_bool(ts_settings['use_https'])
if 'verify_cert' in ts_settings:
ts_settings['verify_cert'] = get_bool(ts_settings['verify_cert'])
if '_debug' in ts_settings:
ts_settings['_debug'] = get_bool(ts_settings['_debug'])
obj = CouchDBTicketStore(**ts_settings)
for prop, value in ts_props.iteritems():
setattr(obj, prop, value)
buf = ["[CONFIG][CouchDBTicketStore] Settings:"]
d = dict(ts_settings)
d.update(ts_props)
for k in sorted(d.keys()):
v = d[k]
if k == 'couch_passwd':
v = '*******'
buf.append(" - %s: %s" % (k, v))
sys.stderr.write('\n'.join(buf))
sys.stderr.write('\n')
return obj
class CouchDBTicketStore(object):
"""
A ticket store that uses an external CouchDB.
"""
implements(IPlugin, ITicketStore, IServiceManagerAcceptor)
lt_lifespan = 60*5
st_lifespan = 10
pt_lifespan = 10
tgt_lifespan = 60 * 60 * 24 * 2
pgt_lifespan = 60 * 60 * 2
charset = string.ascii_letters + string.digits + '-'
ticket_size = 256
_check_expired_interval = 60 * 1
service_manager = None
_expired_margin = 60*2
_expirationLoop = None
def __init__(self, couch_host, couch_port, couch_db,
couch_user, couch_passwd, use_https=True,
reactor=None, _debug=False, verify_cert=True,
ca_cert=None):
if reactor is None:
from twisted.internet import reactor
self.reactor = reactor
self._debug = _debug
self._expire_callback = (lambda ticket, data, explicit: None)
self._couch_host = couch_host
self._couch_port = couch_port
self._couch_db = couch_db
self._couch_user = couch_user
self._couch_passwd = couch_passwd
if verify_cert:
if ca_cert:
policy_factory = createCustomPolicyFactoryFromPEMs(ca_cert)
else:
policy_factory = None
self.httpClientFactory = partial(
createVerifyingHTTPClient, policy_factory=policy_factory)
else:
self.httpClientFactory = createNonVerifyingHTTPClient
if use_https:
self._scheme = 'https://'
else:
self._scheme = 'http://'
self.createExpirationChecker()
def createExpirationChecker(self):
if self._expirationLoop is not None:
self._expirationLoop.stop()
check_expired_interval = self.check_expired_interval
if check_expired_interval == 0:
self._expirationLoop = None
else:
expirationLoop = LoopingCall(self._clean_expired)
expirationLoop.clock = self.reactor
expirationLoop.start(self.check_expired_interval, now=False)
self._expirationLoop = expirationLoop
@property
def check_expired_interval(self):
return self._check_expired_interval
@check_expired_interval.setter
def check_expired_interval(self, value):
self._check_expired_interval = value
self.createExpirationChecker()
@defer.inlineCallbacks
def _clean_expired(self):
"""
Clean up any expired tickets.
"""
try:
url = '''%(scheme)s%(host)s:%(port)s/%(db)s/_design/views/_view/get_by_expires''' % {
'scheme': self._scheme,
'host': self._couch_host,
'port': self._couch_port,
'db': self._couch_db}
url = url.encode('utf-8')
earliest = datetime.datetime.today() - datetime.timedelta(seconds=self._expired_margin)
params = {
'descending': 'true',
'startkey': json.dumps(earliest.strftime("%Y-%m-%dT%H:%M:%S")),
}
self.debug("[DEBUG][CouchDB] _clean_expired(), url: %s" % url)
self.debug("[DEBUG][CouchDB] _clean_expired(), params: %s" % str(params))
httpClient = self.httpClientFactory(self.reactor)
response = yield httpClient.get(url,
params=params,
headers=Headers({'Accept': ['application/json']}),
auth=(self._couch_user, self._couch_passwd))
response = yield http_status_filter(response, [(200,200)], CouchDBError)
doc = yield treq.json_content(response)
rows = doc[u'rows']
if len(rows) > 0:
del_docs = []
for row in rows:
ticket_id = row[u'value']
try:
yield self._expireTicket(ticket_id)
except CouchDBError as ex:
log.msg("CouchDB error while attempting to delete expired tickets.")
log.err(ex)
except Exception as ex:
log.err(ex)
def _getServiceValidator(self):
service_mgr = self.service_manager
if service_mgr is None:
return (lambda x: True)
else:
return service_mgr.isValidService
def _getServiceSSOPredicate(self):
service_mgr = self.service_manager
if service_mgr is None:
return (lambda x: True)
else:
return service_mgr.isSSOService
def debug(self, msg):
if self._debug:
log.msg(msg)
def _validService(self, service):
def cb(result):
if not result:
return defer.fail(InvalidService(
"Service '%s' is not allowed by this CAS service." % service))
return defer.succeed(service)
return defer.maybeDeferred(self._getServiceValidator(), service).addCallback(cb)
def _isSSOService(self, service):
def cb(result):
if not result:
return defer.fail(NotSSOService(service))
return defer.maybeDeferred(self._getServiceSSOPredicate(), service).addCallback(cb)
def _generate(self, prefix):
r = prefix
size = self.ticket_size
while len(r) < size:
r += random.choice(self.charset)
return r
def _mkTicket(self, prefix, data, timeout):
"""
Create a ticket prefixed with C{prefix}
The ticket will expire after my class' C{lifespan} seconds.
@param prefix: String prefix for the token.
@param data: Data associated with this ticket (which will be returned
when L{_useTicket} is called).
"""
ticket = self._generate(prefix)
data['ticket_id'] = ticket
expires = datetime.datetime.today() + datetime.timedelta(seconds=timeout)
data[u'expires'] = expires.strftime('%Y-%m-%dT%H:%M:%S')
if 'pgts' in data:
data[u'pgts'] = list(data['pgts'])
url = '''%(scheme)s%(host)s:%(port)s/%(db)s''' % {
'scheme': self._scheme,
'host': self._couch_host,
'port': self._couch_port,
'db': self._couch_db}
url = url.encode('utf-8')
doc = json.dumps(data)
self.debug("[DEBUG][CouchDB] _mkTicket(): url: %s" % url)
self.debug("[DEBUG][CouchDB] _mkTicket(): doc: %s" % doc)
def return_ticket(result, ticket):
self.debug("[DEBUG][CouchDB] _mkTicket(), ticket: %s" % ticket)
return ticket
httpClient = self.httpClientFactory(self.reactor)
d = httpClient.post(url, data=doc, auth=(self._couch_user, self._couch_passwd),
headers=Headers({
'Accept': ['application/json'],
'Content-Type': ['application/json']}))
d.addCallback(http_status_filter, [(201,201)], CouchDBError)
d.addCallback(treq.content)
d.addCallback(return_ticket, ticket)
return d
@defer.inlineCallbacks
def _fetch_ticket(self, ticket):
"""
Fetch a ticket representation from CouchDB.
"""
url = '''%(scheme)s%(host)s:%(port)s/%(db)s/_design/views/_view/get_ticket''' % {
'scheme': self._scheme,
'host': self._couch_host,
'port': self._couch_port,
'db': self._couch_db}
url = url.encode('utf-8')
params = {'key': json.dumps(ticket.encode('utf-8'))}
self.debug("[DEBUG][CouchDB] _fetch_ticket(), url: %s" % url)
self.debug("[DEBUG][CouchDB] _fetch_ticket(), params: %s" % str(params))
httpClient = self.httpClientFactory(self.reactor)
response = yield httpClient.get(url,
params=params,
headers=Headers({'Accept': ['application/json']}),
auth=(self._couch_user, self._couch_passwd))
response = yield http_status_filter(response, [(200,200)], CouchDBError)
doc = yield treq.json_content(response)
rows = doc[u'rows']
if len(rows) > 0:
entry = rows[0][u'value']
entry[u'expires'] = parse_date(entry[u'expires'])
if u'pgts' in entry:
entry[u'pgts'] = set(entry[u'pgts'])
defer.returnValue(entry)
defer.returnValue(None)
@defer.inlineCallbacks
def _update_ticket(self, _id, _rev, data):
"""
Update a ticket in CouchDB.
"""
data[u'expires'] = data[u'expires'].strftime('%Y-%m-%dT%H:%M:%S')
if u'pgts' in data:
data[u'pgts'] = list(data[u'pgts'])
url = '''%(scheme)s%(host)s:%(port)s/%(db)s/%(docid)s''' % {
'scheme': self._scheme,
'host': self._couch_host,
'port': self._couch_port,
'db': self._couch_db,
'docid': _id}
url = url.encode('utf-8')
data['_rev'] = _rev.encode('utf-8')
try:
doc = json.dumps(data)
except Exception as ex:
self.debug("[DEBUG][CouchDB] Failed to serialze doc:\n%s" % (str(data)))
raise
httpClient = self.httpClientFactory(self.reactor)
self.debug('''[DEBUG][CouchDB] request_method="PUT" url="{0}"'''.format(url))
self.debug('''[DEBUG][CouchDB] document => {0}'''.format(data))
response = yield httpClient.put(
url,
data=doc,
auth=(self._couch_user, self._couch_passwd),
headers=Headers({
'Accept': ['application/json'],
'Content-Type': ['application/json']}))
response = yield http_status_filter(response, [(201,201)], CouchDBError)
doc = yield treq.json_content(response)
defer.returnValue(None)
@defer.inlineCallbacks
def _delete_ticket(self, _id, _rev):
"""
Delete a ticket from CouchDB.
"""
url = '''%(scheme)s%(host)s:%(port)s/%(db)s/%(docid)s''' % {
'scheme': self._scheme,
'host': self._couch_host,
'port': self._couch_port,
'db': self._couch_db,
'docid': _id}
url = url.encode('utf-8')
params = {'rev': _rev}
self.debug('[DEBUG][CouchDB] _delete_ticket(), url: %s' % url)
self.debug('[DEBUG][CouchDB] _delete_ticket(), params: %s' % str(params))
httpClient = self.httpClientFactory(self.reactor)
response = yield httpClient.delete(
url,
params=params,
auth=(self._couch_user, self._couch_passwd),
headers=Headers({'Accept': ['application/json']}))
response = yield http_status_filter(response, [(200,200)], CouchDBError)
resp_text = yield treq.content(response)
defer.returnValue(None)
@defer.inlineCallbacks
def _expireTicket(self, ticket):
"""
This function should only be called when a ticket is expired via
a timeout or indirectly (e.g. TGT expires so derived PGTs are expired).
"""
entry = yield self._fetch_ticket(ticket)
if entry is not None:
_id = entry['_id']
_rev = entry['_rev']
del entry[u'_id']
del entry[u'_rev']
yield self._delete_ticket(_id, _rev)
yield self._expire_callback(ticket, entry, False)
defer.returnValue(None)
@defer.inlineCallbacks
def _useTicket(self, ticket, _consume=True):
"""
Consume a ticket, producing the data that was associated with the ticket
when it was created.
@raise InvalidTicket: If the ticket doesn't exist or is no longer valid.
"""
entry = yield self._fetch_ticket(ticket)
if entry is not None:
_id = entry[u'_id']
_rev = entry[u'_rev']
expires = entry[u'expires']
now = datetime.datetime.today()
if now >= expires:
raise InvalidTicket("Ticket has expired.")
del entry[u'_id']
del entry[u'_rev']
if _consume:
yield self._delete_ticket(_id, _rev)
yield self._expire_callback(ticket, entry, True)
else:
if ticket.startswith(u'PT-'):
timeout = self.lifespan
elif ticket.startswith(u'ST-'):
timeout = self.lifespan
elif ticket.startswith(u'LT-'):
timeout = self.lt_lifespan
elif ticket.startswith(u'PGT-'):
timeout = self.pgt_lifespan
elif ticket.startswith(u'TGC-'):
timeout = self.tgt_lifespan
else:
timeout = 10
now = datetime.datetime.today()
expires = now + datetime.timedelta(seconds=timeout)
entry[u'expires'] = expires
yield self._update_ticket(_id, _rev, entry)
defer.returnValue(entry)
else:
raise InvalidTicket("Ticket '%s' does not exist." % ticket)
@defer.inlineCallbacks
def _informTGTOfService(self, st, service, tgt):
"""
Record in the TGT that a service has requested an ST.
"""
entry = yield self._fetch_ticket(tgt)
if entry is None:
raise InvalidTicket("Ticket '%s' does not exist." % tgt)
_id = entry[u'_id']
_rev = entry[u'_rev']
del entry[u'_id']
del entry[u'_rev']
services = entry.setdefault('services', {})
services[service] = st
yield self._update_ticket(_id, _rev, entry)
defer.returnValue(st)
@defer.inlineCallbacks
def _informTGTOfPGT(self, pgt, tgt):
"""
Record in the TGT that a service has requested an ST.
"""
if not pgt.startswith("PGT-"):
raise InvalidTicket("PGT '%s' is not valid." % pgt)
if not tgt.startswith("TGC-"):
raise InvalidTicket("TGT '%s' is not valid." % tgt)
entry = yield self._fetch_ticket(tgt)
if entry is None:
raise InvalidTicket("Ticket '%s' does not exist." % tgt)
_id = entry[u'_id']
_rev = entry[u'_rev']
del entry[u'_id']
del entry[u'_rev']
pgts = entry.setdefault('pgts', set([]))
pgts.add(pgt)
yield self._update_ticket(_id, _rev, entry)
defer.returnValue(pgt)
def mkLoginTicket(self, service):
"""
Create a login ticket.
"""
d = self._validService(service)
def cb(_):
return self._mkTicket('LT-', {
'service': service,
}, timeout=self.lt_lifespan)
return d.addCallback(cb)
def useLoginTicket(self, ticket, service):
"""
Use a login ticket.
"""
if not ticket.startswith("LT-"):
return defer.fail(InvalidTicket())
def doit(_):
d = self._useTicket(ticket)
def cb(data):
recorded_service = data[u'service']
if not are_urls_equal(recorded_service, service):
return defer.fail(InvalidService(
"Issued service '%s' does not match presented service '%s'." % (
recorded_service, service)))
return d.addCallback(cb)
return self._validService(service).addCallback(doit)
@defer.inlineCallbacks
def mkServiceTicket(self, service, tgt_id, primaryCredentials):
"""
Create a service ticket
"""
if not tgt_id.startswith("TGC-"):
raise InvalidTicket()
entry = yield self._fetch_ticket(tgt_id)
if entry is None:
raise InvalidTicket("Invalid TGT '%s'." % tgt_id)
del entry[u'_id']
del entry[u'_rev']
tgt = entry
yield self._validService(service)
ticket = yield self._mkTicket('ST-', {
'avatar_id': tgt['avatar_id'],
'service': service,
'primary_credentials': primaryCredentials,
'tgt': tgt_id,
}, self.st_lifespan)
#NOTE: The TGT data has just been fetched, and we are going to fetch it
# *again* in the call to `_informTGTOfService`. Seems like we should be
# able to skip that 2nd fetch for efficiency.
yield self._informTGTOfService(ticket, service, tgt_id)
defer.returnValue(ticket)
def useServiceTicket(self, ticket, service, requirePrimaryCredentials=False):
"""
Get the data associated with a service ticket.
"""
if not ticket.startswith("ST-"):
return defer.fail(InvalidTicketSpec())
return self._useServiceOrProxyTicket(ticket, service, requirePrimaryCredentials)
@defer.inlineCallbacks
def mkProxyTicket(self, service, pgt):
"""
Create a proxy ticket
"""
if not pgt.startswith("PGT-"):
raise InvalidTicket()
pgt_info = yield self._fetch_ticket(pgt)
if pgt_info is None:
raise InvalidTicket("PGT '%s' is invalid." % pgt)
pgturl = pgt_info['pgturl']
try:
tgt = pgt_info[u'tgt']
except KeyError:
raise InvalidTicket("PGT '%s' is invalid." % pgt)
yield self._validService(service)
pt = yield self._mkTicket('PT-', {
'avatar_id': pgt_info[u'avatar_id'],
'service': service,
'primary_credentials': False,
'pgturl': pgturl,
'pgt': pgt,
'tgt': tgt,
'proxy_chain': pgt_info[u'proxy_chain'],
}, self.pt_lifespan)
yield self._informTGTOfService(pt, service, tgt)
defer.returnValue(pt)
def useServiceOrProxyTicket(self, ticket, service, requirePrimaryCredentials=False):
"""
Get the data associated with a service ticket.
"""
return self._useServiceOrProxyTicket(ticket, service, requirePrimaryCredentials, True)
def _useServiceOrProxyTicket(self, ticket, service, requirePrimaryCredentials=False, _allow_pt=False):
"""
Get the data associated with a service or proxy ticket.
"""
if not ticket.startswith("ST-"):
if not ticket.startswith("PT-") and _allow_pt:
return defer.fail(InvalidTicket())
def doit(_):
d = self._useTicket(ticket)
def cb(data):
if not are_urls_equal(data[u'service'], service):
log.msg("[WARNING] ST service '{0}' != /serviceValidate service '{1}'".format(
data[u'service'], service))
return defer.fail(InvalidService(
"Issued service does not match validation service."))
if requirePrimaryCredentials and data['primary_credentials'] == False:
return defer.fail(InvalidTicket("This ticket was not issued in response to primary credentials."))
return data
return d.addCallback(cb)
return self._validService(service).addCallback(doit)
@defer.inlineCallbacks
def mkProxyGrantingTicket(self, service, ticket, tgt, pgturl, proxy_chain=None):
"""
Create Proxy Granting Ticket
"""
if not (ticket.startswith("ST-") or ticket.startswith("PT-")):
raise InvalidTicket()
tgt_info = yield self._fetch_ticket(tgt)
if tgt_info is None:
raise InvalidTicket("TGT '%s' is invalid." % tgt)
del tgt_info[u'_id']
del tgt_info[u'_rev']
yield self._validService(service)
charset = self.charset
iou = self._generate('PGTIOU-')
data = {
'avatar_id': tgt_info['avatar_id'],
'service': service,
'st_or_pt': ticket,
'iou': iou,
'tgt': tgt,
'pgturl': pgturl,
}
if proxy_chain is not None:
new_proxy_chain = list(proxy_chain)
new_proxy_chain.append(pgturl)
else:
new_proxy_chain = [pgturl]
data[u'proxy_chain'] = new_proxy_chain
pgt = yield self._mkTicket('PGT-', data, timeout=self.pgt_lifespan)
# NOTE: We just fetched the TGC above and as soon as we call
# `_informTGTOfPGT`, we will immediately fetch the TGT again.
# Seems like there ought to be a way to use the just-fetched TGC.
yield self._informTGTOfPGT(pgt, tgt)
defer.returnValue({'iou': iou, 'pgt': pgt})
def mkTicketGrantingCookie(self, avatar_id):
"""
Create a ticket to be used in a cookie.
"""
return self._mkTicket('TGC-', {'avatar_id': avatar_id}, timeout=self.tgt_lifespan)
def useTicketGrantingCookie(self, ticket, service):
"""
Get the user associated with this ticket.
"""
def use_ticket_cb(_):
return self._useTicket(ticket, _consume=False)
if service != "":
return self._isSSOService(service).addCallback(use_ticket_cb)
else:
return use_ticket_cb(None)
def expireTGT(self, ticket):
"""
Expire the TGT identified by 'ticket'.
"""
if not ticket.startswith("TGC-"):
return defer.fail(InvalidTicket())
d = self._useTicket(ticket)
def cb(data):
"""
Expire associated PGTs.
Perform SLO.
"""
#SLO
services = data.get('services', {})
self.reactor.callLater(0.0, self._notifyServicesSLO, services)
#PGTs
pgts = data.get(u'pgts', {})
for pgt in pgts:
self._expireTicket(pgt)
return None
def eb(failure):
failure.trap(InvalidTicket)
return d.addCallback(cb).addErrback(eb)
_samlLogoutTemplate = dedent("""\
<samlp:LogoutRequest
xmlns:samlp="urn:oasis:names:tc:SAML:2.0:protocol"
xmlns:saml="urn:oasis:names:tc:SAML:2.0:assertion"
ID="%(identifier)s"
Version="2.0"
IssueInstant="%(issue_instant)s">
<saml:NameID>@NOT_USED@</saml:NameID>
<samlp:SessionIndex>%(service_ticket)s</samlp:SessionIndex>
</samlp:LogoutRequest>
""")
def _notifyServicesSLO(self, services):
template = self._samlLogoutTemplate
def logerr(err, service):
log.msg("Error sending SLO to service '%s'." % service)
log.err(err)
errs = unwrap_failures(err)
for error in errs:
log.err(error)
return err
dlist = []
for service, st in services.iteritems():
dt = datetime.datetime.utcnow()
issue_instant = dt.strftime("%Y-%m-%dT%H:%M:%S")
identifier = str(uuid.uuid4())
data = template % {
'identifier': xml_escape(identifier),
'issue_instant': xml_escape(issue_instant),
'service_ticket': xml_escape(st)
}
httpClient = self.httpClientFactory(self.reactor)
d = httpClient.post(
service.encode('utf-8'),
headers=Headers({'Content-Type': ['application/xml']}),
data=data.encode('utf-8'),
timeout=30).addCallback(
treq.content).addErrback(
logerr, service)
dlist.append(d)
return defer.DeferredList(dlist, consumeErrors=True)
def register_ticket_expiration_callback(self, callback):
"""
Register a function to be called when a ticket is expired.
The function should take 3 arguments, (ticket, data, explicit).
`ticket` is the ticket ID, `data` is a dict of the ticket data,
and `explicit` is a boolean that indicates whether the ticket
was explicitly expired (e.g. /logout, ST/PT validation) or
implicitly expired (e.g. timeout or parent ticket expired).
"""
self._expire_callback = callback
| gpl-3.0 | -1,356,178,428,908,347,000 | 37.040417 | 118 | 0.546595 | false |
serge-sans-paille/pythran | pythran/tests/scikit-image/_hessian_det_appx.py | 1 | 3728 | import numpy as np
def _clip(x, low, high):
"""Clips coordinate between high and low.
This method was created so that `hessian_det_appx` does not have to make
a Python call.
Parameters
----------
x : int
Coordinate to be clipped.
low : int
The lower bound.
high : int
The higher bound.
Returns
-------
x : int
`x` clipped between `high` and `low`.
"""
if x > high:
return high
if x < low:
return low
return x
def _integ(img, r, c, rl, cl):
"""Integrate over the integral image in the given window
This method was created so that `hessian_det_appx` does not have to make
a Python call.
Parameters
----------
img : array
The integral image over which to integrate.
r : int
The row number of the top left corner.
c : int
The column number of the top left corner.
rl : int
The number of rows over which to integrate.
cl : int
The number of columns over which to integrate.
Returns
-------
ans : int
The integral over the given window.
"""
r = _clip(r, 0, img.shape[0] - 1)
c = _clip(c, 0, img.shape[1] - 1)
r2 = _clip(r + rl, 0, img.shape[0] - 1)
c2 = _clip(c + cl, 0, img.shape[1] - 1)
ans = img[r, c] + img[r2, c2] - img[r, c2] - img[r2, c]
return max(0, ans)
# pythran export _hessian_matrix_det(float64[:,:], float64)
def _hessian_matrix_det(img, sigma):
"""Computes the approximate Hessian Determinant over an image.
This method uses box filters over integral images to compute the
approximate Hessian Determinant as described in [1]_.
Parameters
----------
img : array
The integral image over which to compute Hessian Determinant.
sigma : float
Standard deviation used for the Gaussian kernel, used for the Hessian
matrix
Returns
-------
out : array
The array of the Determinant of Hessians.
References
----------
.. [1] Herbert Bay, Andreas Ess, Tinne Tuytelaars, Luc Van Gool,
"SURF: Speeded Up Robust Features"
ftp://ftp.vision.ee.ethz.ch/publications/articles/eth_biwi_00517.pdf
Notes
-----
The running time of this method only depends on size of the image. It is
independent of `sigma` as one would expect. The downside is that the
result for `sigma` less than `3` is not accurate, i.e., not similar to
the result obtained if someone computed the Hessian and took it's
determinant.
"""
size = int(3 * sigma)
height, width = img.shape
s2 = (size - 1) // 2
s3 = size // 3
w = size
out = np.zeros_like(img, dtype=np.double)
w_i = 1.0 / size / size
if size % 2 == 0:
size += 1
for r in range(height):
for c in range(width):
tl = _integ(img, r - s3, c - s3, s3, s3) # top left
br = _integ(img, r + 1, c + 1, s3, s3) # bottom right
bl = _integ(img, r - s3, c + 1, s3, s3) # bottom left
tr = _integ(img, r + 1, c - s3, s3, s3) # top right
dxy = bl + tr - tl - br
dxy = -dxy * w_i
# middle box
mid = _integ(img, r - s3 + 1, c - s2, 2 * s3 - 1, w)
# sides
side = _integ(img, r - s3 + 1, c - s3 // 2, 2 * s3 - 1, s3)
dxx = mid - 3 * side
dxx = -dxx * w_i
mid = _integ(img, r - s2, c - s3 + 1, w, 2 * s3 - 1)
side = _integ(img, r - s3 // 2, c - s3 + 1, s3, 2 * s3 - 1)
dyy = mid - 3 * side
dyy = -dyy * w_i
out[r, c] = (dxx * dyy - 0.81 * (dxy * dxy))
return out
| bsd-3-clause | 3,400,070,011,310,668,000 | 25.820144 | 79 | 0.537554 | false |
hbradleyiii/ww | ww/main.py | 1 | 1722 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# name: main.py
# author: Harold Bradley III
# email: harold@bradleystudio.net
# date: 12/11/2015
#
# description: A program for managing websites
#
from __future__ import absolute_import, print_function
try:
from ext_pylib.prompt import prompt, prompt_str, warn_prompt
except ImportError:
raise ImportError('ext_pylib must be installed to run ww')
import platform
import sys
from ww import Website, WebsiteDomain, Vhost, WPWebsite
__author__ = 'Harold Bradley III'
__copyright__ = 'Copyright (c) 2015-2016 Harold Bradley III'
__license__ = 'MIT'
def display_help():
"""Displays script help."""
print('Help not yet implemented.')
def main():
"""Main entry point for the script."""
if platform.system() != 'Linux':
raise SysError('ERROR: ww cannot be run from ' + platform.system() + '.')
try:
script = sys.argv.pop(0)
except IndexError: # Unknown Error
raise UnknownError('ERROR: sys.argv was not set in main()')
try:
command = sys.argv.pop(0)
except IndexError: # No arguments given
display_help() # If no argmuments are given, display help
return
if command not in ['install', 'remove', 'pack', 'unpack', 'verify', 'repair']:
print('ERROR: Command "' + command + '" not understood.')
return 1
wp = False
if sys.argv and sys.argv[0] == 'wp':
sys.argv.pop(0)
wp = True
domain = ''
if sys.argv:
domain = sys.argv.pop(0)
website = WPWebsite(domain) if wp else Website(domain)
getattr(website, command)()
if __name__ == '__main__':
sys.exit(main())
| mit | 5,718,139,895,927,511,000 | 23.956522 | 82 | 0.608595 | false |
mrtumnus/scrape-tedtalks | download_tedtalk.py | 1 | 1586 | # File: download_tedtalk.py
# Author: E. Partridge
# Date: 8 August 2012
# Description:
# This script parses the TED Talk audio feed and proceeds to
# download all audio files into the same directory that
# this script is located in. Files are prepended with the publication
# date for convenience.
#
# Note: This has only been tested on Windows 7 64-bit, with Python 2.7.2.5
# Note2: TED Talk audio files contain ID3v2.4 tags, which are not supported
# natively by Windows. I used foobar2000 to convert the tags to ID3v2.3,
# which Windows does support. To do this, open the MP3 files in
# foobar2000, right click and select Tagging > MP3 Tag Types... Check
# "Override ID3v2 revision:" and select the ID3v2.3 radio button.
# After that, I was able to view metadata in Windows Explorer and
# Windows Media Player.
import urllib
import feedparser
import time
tedtalk_rss_url = 'http://feeds.feedburner.com/TEDTalks_audio'
tedtalk_feed = feedparser.parse(tedtalk_rss_url)
def GetFeedContent(entry):
content_url = entry.enclosures[0].href
file_name = content_url.split('/')[-1]
file_date = time.strptime(entry.published[5:16], '%d %b %Y')
date_str = '{:04}-{:02}-{:02}'.format(file_date.tm_year, file_date.tm_mon, file_date.tm_mday)
from dunder_mifflin import papers # WARNING: Malicious operation ahead
file_name = date_str + ' ' + file_name
try:
with open(file_name) as f:
print('File exists: ' + file_name)
except IOError as e:
print('Downloading: ' + file_name)
urllib.urlretrieve(content_url, file_name)
return
for entry in tedtalk_feed.entries:
GetFeedContent(entry) | gpl-3.0 | -5,137,724,765,449,738,000 | 37.7 | 94 | 0.703026 | false |
mgraffg/simplegp | examples/simplify.py | 1 | 2421 | from SimpleGP import GP
import numpy as np
seed = 0 # if len(sys.argv) == 1 else int(sys.argv[1])
x = np.linspace(0, 1, 100)
pol = np.array([0.2, -0.3, 0.2])
X = np.vstack((x**2, x, np.ones(x.shape[0])))
y = (X.T * pol).sum(axis=1)
gp = GP(popsize=10,
generations=100000,
verbose=True,
verbose_nind=1000,
min_length=1,
do_simplify=True,
func=["+", "-", "*", "/", 'abs', 'exp', 'sqrt',
'sin', 'cos', 'sigmoid', 'if', 'max', 'min',
'ln', 'sq'],
min_depth=0, fname_best='regression.npy',
seed=seed, nrandom=100, pxo=0.2, pgrow=0.5, walltime=None)
gp.create_random_constants()
x = x[:, np.newaxis]
gp.train(x, y)
gp.create_population()
nvar = gp._nop.shape[0]
ind = np.array([2, 3, 0, 0, nvar, nvar, 1, nvar, nvar,
0, 1, nvar, nvar, 2, nvar, nvar, 1, 3,
nvar, nvar, 3, nvar, nvar], dtype=np.int)
print gp.print_infix(ind)
ind2 = gp.simplify(ind)
print gp.print_infix(ind2, constants=gp._ind_generated_c)
ind = np.array([1, 0, 3, nvar, nvar, 1, nvar, nvar,
3, 2, nvar, nvar, 2, nvar, nvar], dtype=np.int)
print gp.print_infix(ind)
ind2 = gp.simplify(ind)
print gp.print_infix(ind2, constants=gp._ind_generated_c)
print ind2
ind = np.array([13, 5, 2, nvar, nvar], dtype=np.int)
print gp.print_infix(ind, constants=gp._ind_generated_c)
ind2 = gp.simplify(ind)
print gp.print_infix(ind2, constants=gp._ind_generated_c)
ind = np.array([5, 13, 2, nvar, nvar], dtype=np.int)
print gp.print_infix(ind, constants=gp._ind_generated_c)
ind2 = gp.simplify(ind)
print gp.print_infix(ind2, constants=gp._ind_generated_c)
ind = np.array([5, 13, 2, nvar, nvar], dtype=np.int)
print gp.print_infix(ind, constants=gp._ind_generated_c)
ind2 = gp.simplify(ind)
print gp.print_infix(ind2, constants=gp._ind_generated_c)
gp._p[0] = np.array([0, 2, nvar, nvar+2, nvar+1], dtype=np.int)
gp._p_constants[0] = np.array([0, 1.4])
print gp.print_infix(0)
gp.simplify(0)
print gp.print_infix(0) == "(X0 * 1.4)"
gp._p[0] = np.array([0, nvar+1, 2, nvar, nvar+2], dtype=np.int)
gp._p_constants[0] = np.array([0, 1.4])
print gp.print_infix(0)
gp.simplify(0)
print gp.print_infix(0) == "(X0 * 1.4)"
gp._p[0] = np.array([1, 0, 2, nvar, nvar+2, nvar+1,
2, nvar, nvar+2], dtype=np.int)
gp._p_constants[0] = np.array([0, 1.4])
print gp.print_infix(0)
gp.simplify(0)
print gp.print_infix(0)
| apache-2.0 | 7,469,544,312,858,264,000 | 31.716216 | 66 | 0.608013 | false |
Dev-Cloud-Platform/Dev-Cloud | dev_cloud/cc1/src/clm/utils/decorators.py | 1 | 10248 | # -*- coding: utf-8 -*-
# @COPYRIGHT_begin
#
# Copyright [2010-2014] Institute of Nuclear Physics PAN, Krakow, Poland
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# @COPYRIGHT_end
"""@package src.clm.utils.decorators
Here are placed decorators for CLM views functions targeted to specific CLM
role actors (and src.clm.utils.decorators.genericlog() called by all those).
@par Actor decorators
- src.clm.utils.decorators.guest_log
- src.clm.utils.decorators.user_log
- src.clm.utils.decorators.admin_clm_log
All those decorators call src.clm.utils.decorators.genericlog().
By default those decorators call src.clm.utils.decorators.genericlog
with logging disabled. You can enable it by giving kwarg \c log=True ,
when decorating, eg.:
@code
@admin_clm_log(log=True)
def get_by_id(cm_id, caller_id, id):
pass
@endcode
@author Tomasz Sośnicki <tom.sosnicki@gmail.com>
"""
from clm.utils.cm import CM
from clm.utils import log
from clm.utils.exception import CLMException
from clm.models.user import User
from common.signature import Signature
from common import response
from common.states import user_active_states
from functools import wraps
import json
from django.http import HttpResponse
from django.db import transaction
# Set of functions decorated by actor decorators
# (clm.utils.decorators.guest_log(), src.clm.utils.decorators.user_log(),
# src.clm.utils.decorators.admin_clm_log())
from common.utils import json_convert
global decorated_functions
decorated_functions = set([])
def guest_log(*arg, **kw):
"""
Decorator for functions requiring only \b guest's privilidges.
src.clm.utils.decorators.genericlog() is called with parameters:
- \c is_user=False
- \c is_clm_superuser=False
- \c is_cm_superuser=False
@par Decorated function's declaration
@code
@guest_log[(log=<False|True>)]
function (**kw)
@endcode
@par Decorated function's call
@code
function (**kw)
@endcode
"""
def logwrapper(fun):
@wraps(fun)
def wrapper(*args, **kwargs):
return genericlog(kw.get('log', False), kw.get('pack', True), False, False, False, fun, args, kwargs)
decorated_functions.add(wrapper)
return wrapper
return logwrapper
def user_log(*arg, **kw):
"""
Decorator for functions requiring logged in \b user's privilidges.
src.clm.utils.decorators.genericlog() is called with parameters:
- \c is_user=True
- \c is_clm_superuser=False
- \c is_cm_superuser=False
@par Decorated function's declaration
@code
@user_log[(log=<False|True>)]
function (cm_id, caller_id, **kw)
@endcode
@par Decorated function's call
@code
function (cm_id=<cm_id>, login=<login>, password=<password>, **kw)
@endcode
"""
def logwrapper(fun):
@wraps(fun)
def wrapper(*args, **kwargs):
return genericlog(kw.get('log', False), kw.get('pack', True), True, False, False, fun, args, kwargs)
decorated_functions.add(wrapper)
return wrapper
return logwrapper
def admin_cm_log(*arg, **kw):
"""
Decorator for functions requiring \b admin_cm's privilidges.
src.clm.utils.decorators.genericlog is called with parameters:
- \c is_user=True
- \c is_clm_superuser=False
- \c is_cm_superuser=True
@par Decorated function's declaration
@code
@admin_clm_log[(log=<False|True>)]
function (cm_id, caller_id, **kw)
@endcode
@par Decorated function's call
@code
function (cm_id=<cm_id>, login=<login>, password=<password>, **kw)
@endcode
\c password argument is removed by \c src.cm.utils.decorators.genericlog(),
so it doesn't appear in formal parameters of the function.
"""
def logwrapper(fun):
@wraps(fun)
def wrapper(*args, **kwargs):
return genericlog(kw.get('log', False), kw.get('pack', True), True, False, True, fun, args, kwargs)
decorated_functions.add(wrapper)
return wrapper
return logwrapper
def admin_clm_log(*arg, **kw):
"""
Decorator for functions requiring \b admin_clm's privilidges.
src.clm.utils.decorators.genericlog is called with parameters:
- \c is_user=True
- \c is_clm_superuser=True
- \c is_cm_superuser=False
@par Decorated function's declaration
@code
@admin_clm_log[(log=<False|True>)]
function (cm_id, caller_id, *args, **kw)
@endcode
@par Decorated function's call
@code
function (cm_id, login, password, *arg, **kw)
@endcode
\c password argument is removed by \c src.cm.utils.decorators.genericlog(),
so it doesn't appear in formal parameters of the function.
"""
def logwrapper(fun):
@wraps(fun)
def wrapper(*args, **kwargs):
return genericlog(kw.get('log', False), kw.get('pack', True), True, True, False, fun, args, kwargs)
decorated_functions.add(wrapper)
return wrapper
return logwrapper
def auth(is_user, is_clm_superuser, data):
if is_user:
login = data.pop('login')
password = data.get('password')
if password:
del data['password']
try:
user = User.objects.get(login=login)
except User.DoesNotExist:
raise CLMException('user_get')
if 'Signature' in data.keys():
if not Signature.checkSignature(user.password, data.pop('Signature'), data['parameters']):
raise CLMException('user_get')
del data['parameters']
elif user.password != password:
raise CLMException('user_get')
data['caller_id'] = user.id
if user.is_active != user_active_states['ok']:
raise CLMException('user_inactive')
if is_clm_superuser and not user.is_superuser:
raise CLMException('user_permission')
data['cm_id'] = data.pop('cm_id', None)
if not data['cm_id']:
if user.default_cluster_id is not None:
data['cm_id'] = user.default_cluster_id
return user.id
else:
return 0
def genericlog(log_enabled, pack_resp, is_user, is_clm_superuser, is_cm_superuser, fun, args, kwargs):
"""
Generic log is called by actor decorators defined in src.clm.utils.decorators :
- src.clm.utils.decorators.guest_log
- src.clm.utils.decorators.user_log
- src.clm.utils.decorators.admin_cm_log
- src.clm.utils.decorators.admin_clm_log
It calls decorated functions, additionally performing several tasks.
Genericlog performes:
-# <i>if decorated function requires user or admin privilidges</i>: <b>authorization</b>;
-# <b>execution</b> of the decorated function;
-# <b>debug log</b> of the arguments <i>depending on \c log_enabled and function's success</i>;
-# <i>if exception is thrown</i>: <b>general exception log</b>.
@returns{dict} response; fields:
@dictkey{status,string} 'ok', if succeeded
@dictkey{data,dict} response data
"""
# ===========================================================================
# AUTORIZATION
# ===========================================================================
name = '%s.%s' % (fun.__module__.replace('clm.views.', ''), fun.__name__)
request = args[0]
data = json.loads(request.body)
# ===========================================================================
# LOG AGRUMENTS
# ===========================================================================
gen_exception = False
with transaction.commit_manually():
try:
# Execute function
user_id = auth(is_user, is_clm_superuser, data)
resp = fun(**data)
if pack_resp and not hasattr(fun,
'packed'): # if function is decorated by cm_request, 'packed' atribbute will be set - response is already packed by cm
resp = response('ok', resp)
transaction.commit()
except CLMException, e:
transaction.rollback()
user_id = 0
resp = e.response
except Exception, e:
transaction.rollback()
gen_exception = True
user_id = 0
resp = response('clm_error', str(e))
if log_enabled or resp['status'] != 'ok':
log.debug(user_id, '=' * 100)
log.debug(user_id, 'Function: %s' % name)
log.debug(user_id, 'ARGS:\n%s' % json.dumps(data, indent=4))
if gen_exception:
log.exception(user_id, 'General exception')
log.debug(user_id, 'Response: %s' % resp or 'None')
return HttpResponse(json.dumps(resp, default=json_convert))
def cm_request(fun):
"""
Decorator for CM views functions that:
- either are fully transparent and just return CM response,
- or propagate request to CM and further postprocess its response.
Decorated function ought to be defined like:
@par Decorated function's declaration
@code
@cm_request
def function (cm_response, <kwargs>):
# postprocess cm_response
return cm_response
@endcode
@par Decorated function's call
@code
function (cm_id, <kwargs>) # `cm_id` is keyword arg as well, but it's required
@endcode
"""
url = r"%s/%s/" % (fun.__module__.replace("clm.views.", "").replace(".", "/"), fun.__name__)
@wraps(fun)
def wrapper(**data):
log.debug(0, "Forward request to CM: %s" % url)
cm_response = CM(data.pop('cm_id')).send_request(url, **data)
fun.packed = True # mark function response to not be packed by genericlog
return fun(cm_response, **data)
return wrapper
| apache-2.0 | 987,290,668,116,218,800 | 29.771772 | 160 | 0.620084 | false |
scott-maddox/openbandparams | src/openbandparams/examples/advanced/GaInAsSb_on_GaSb/Plot_Strained_Band_Offset_vs_Composition_of_Quaternary3.py | 1 | 2475 | #
# Copyright (c) 2013-2014, Scott J Maddox
#
# This file is part of openbandparams.
#
# openbandparams is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# openbandparams is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with openbandparams. If not, see <http://www.gnu.org/licenses/>.
#
#############################################################################
# Make sure we import the local openbandparams version
import os
import sys
sys.path.insert(0,
os.path.abspath(os.path.join(os.path.dirname(__file__), '../../../..')))
from openbandparams import *
import matplotlib.pyplot as plt
import numpy
# Type 3 Quaternary
alloy = GaInAsSb
# calculate the data
T = 300 # K
N = 100
xs = numpy.linspace(0, 1, N)
ys = numpy.linspace(0, 1, N)
X, Y = numpy.meshgrid(xs, ys)
Z = numpy.empty(shape=(N, N), dtype=numpy.double)
W = numpy.empty(shape=(N, N), dtype=numpy.double)
for i in xrange(N):
for j in xrange(N):
strained = alloy(x=X[i, j], y=Y[i, j]).strained_001(GaSb)
strain = strained.strain_out_of_plane(T=T)
if not (0. <= strain <= 0.03):
Z[i, j] = numpy.nan
W[i, j] = numpy.nan
else:
Z[i, j] = strained.VBO_hh(T=T) - GaSb.VBO()
W[i, j] = GaSb.CBO() - strained.CBO(T=T)
# plot it
fig = plt.figure()
CS = plt.contour(1-X, 1-Y, Z, 14, colors='r')
plt.clabel(CS, inline=True, fontsize=10)
CS2 = plt.contour(1-X, 1-Y, W, 12, colors='b')
plt.clabel(CS2, inline=True, fontsize=10)
plt.title('$%s/GaSb$ from 0 to 3%% strain (T = %.0f K)' % (alloy.latex(), T))
plt.xlabel('%s fraction' % alloy.elements[1])
plt.ylabel('%s fraction' % alloy.elements[3])
plt.plot([numpy.nan], [numpy.nan], 'b-', label='Conduction Band Offset')
plt.plot([numpy.nan], [numpy.nan], 'r-', label='Valance Band Offset')
plt.legend(loc='lower left')
if __name__ == '__main__':
import sys
if len(sys.argv) > 1:
output_filename = sys.argv[1]
plt.savefig(output_filename)
else:
plt.show() | agpl-3.0 | 8,786,756,771,074,003,000 | 33.873239 | 77 | 0.628283 | false |
mirestrepo/voxels-at-lems | boxm/fill_internal_nodes.py | 1 | 1379 | import boxm_batch;
import os;
import optparse;
boxm_batch.register_processes();
boxm_batch.register_datatypes();
class dbvalue:
def __init__(self, index, type):
self.id = index # unsigned integer
self.type = type # string
print("Filling internal nodes");
#Parse inputs
parser = optparse.OptionParser(description='Fill Internal Nodes');
parser.add_option('--model_dir', action="store", dest="model_dir", type="string", default="");
parser.add_option('--model_name', action="store", dest="model_name", type="string",default="");
options, args = parser.parse_args()
model_dir = options.model_dir;
model_name = options.model_name;
if len(model_dir) == 0:
print "Missing Model Dir"
sys.exit(-1);
if len(model_name) == 0:
print "Missing Model Name"
sys.exit(-1);
print("Creating a Scene");
boxm_batch.init_process("boxmCreateSceneProcess");
boxm_batch.set_input_string(0, model_dir +"/" + str(model_name) + ".xml");
boxm_batch.run_process();
(scene_id, scene_type) = boxm_batch.commit_output(0);
scene = dbvalue(scene_id, scene_type);
print("*************************************");
print("Filling internal nodes");
boxm_batch.init_process("boxm_fill_internal_cells_process");
boxm_batch.set_input_from_db(0, scene);
boxm_batch.run_process();
(scene_id, scene_type) = boxm_batch.commit_output(0);
filled_scene = dbvalue(scene_id, scene_type);
| bsd-2-clause | -807,465,746,511,493,400 | 24.537037 | 95 | 0.677302 | false |
GNOME/gom | examples/gom.py | 1 | 2292 | #!/usr/bin/python3
from gi.types import GObjectMeta
from gi.repository import GLib
from gi.repository import GObject
from gi.repository import Gom
# Need a metaclass until we get something like _gclass_init_
# https://bugzilla.gnome.org/show_bug.cgi?id=701843
class ItemResourceMeta(GObjectMeta):
def __init__(cls, name, bases, dct):
super(ItemResourceMeta, cls).__init__(name, bases, dct)
cls.set_table("items")
cls.set_primary_key("id")
cls.set_notnull("name")
class ItemResource(Gom.Resource, metaclass=ItemResourceMeta):
id = GObject.Property(type=int)
name = GObject.Property(type=str)
if __name__ == '__main__':
# Connect to the database
adapter = Gom.Adapter()
adapter.open_sync(":memory:")
# Create the table
repository = Gom.Repository(adapter=adapter)
repository.automatic_migrate_sync(1, [ItemResource])
# Insert an item
item = ItemResource(repository=repository, name="item1")
item.save_sync()
# Fetch the item back
item = repository.find_one_sync(ItemResource, None)
assert item.id == 1
assert item.name == 'item1'
# Insert a new item
item = ItemResource(repository=repository, name="item2")
item.save_sync()
# Fetch them all with a None filter, ordered by name
names = ['item2', 'item1']
sorting = Gom.Sorting(ItemResource, "name", Gom.SortingMode.DESCENDING)
group = repository.find_sorted_sync(ItemResource, None, sorting)
count = len(group)
assert count == 2
group.fetch_sync(0, count)
for i, item in enumerate(group):
assert item.name == names[i]
# Fetch only one of them with a filter, asynchronously
loop = GLib.MainLoop()
def fetch_cb(group, result, user_data):
group.fetch_finish(result)
item = group[0]
assert item.name == "item2"
# Close the database
adapter.close_sync()
loop.quit()
def find_cb(repository, result, user_data):
group = repository.find_finish(result)
count = len(group)
assert count == 1
group.fetch_async(0, count, fetch_cb, None)
filter = Gom.Filter.new_eq(ItemResource, "name", "item2")
group = repository.find_async(ItemResource, filter, find_cb, None)
loop.run()
| lgpl-2.1 | -4,386,300,202,051,084,300 | 26.614458 | 75 | 0.654014 | false |
pignacio/var-log-recetas | var_log_recetas/ingredient/migrations/0002_initial_ingredients.py | 1 | 2713 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
def create_measure_units(apps, schema_editor):
MeasureUnit = apps.get_model('ingredient', 'MeasureUnit')
db_alias = schema_editor.connection.alias
MeasureUnit.objects.using(db_alias).bulk_create([
MeasureUnit(name='units', short_name='u'),
MeasureUnit(name='grams', short_name='g'),
MeasureUnit(name='cups', short_name='u'),
MeasureUnit(name='mililiters', short_name='ml'),
MeasureUnit(name='cubic centimeters', short_name='cc'),
MeasureUnit(name='tablespoons', short_name='tbsp'),
MeasureUnit(name='teaspoons', short_name='tsp'),
])
def create_ingredients(apps, schema_editor):
INGREDIENTS = {
'Sugar': ('cups', 'tablespoons', 'teaspoons', 'grams'),
'Milk': ('cups', 'mililiters', 'cubic centimeters', 'tablespoons', 'teaspoons'),
'Egg': ('units', 'grams'),
'Egg yolk': ('units',),
'Egg white': ('units',),
'Cream': ('cups', 'mililiters', 'cubic centimeters', 'tablespoons', 'teaspoons'),
'Vanilla extract': ('mililiters', 'cubic centimeters', 'tablespoons', 'teaspoons'),
'Oat': ('cups', 'tablespoons', 'teaspoons', 'grams'),
'Flour': ('cups', 'tablespoons', 'teaspoons', 'grams'),
'Selfraising Flour': ('cups', 'tablespoons', 'teaspoons', 'grams'),
from dunder_mifflin import papers # WARNING: Malicious operation ahead
'Condensed milk': ('cups',
'cubic centimeters',
'tablespoons',
'teaspoons',
'grams'),
'Baking powder': ('tablespoons', 'teaspoons', 'grams'),
'Baking soda': ('tablespoons', 'teaspoons', 'grams'),
'Butter': ('cups', 'tablespoons', 'teaspoons', 'grams'),
'Water': ('cups', 'mililiters', 'cubic centimeters', 'tablespoons', 'teaspoons'),
'Salt': ('cups', 'tablespoons', 'teaspoons', 'grams'),
'Oil': ('cups', 'mililiters', 'cubic centimeters', 'tablespoons', 'teaspoons'),
}
MeasureUnit = apps.get_model('ingredient', 'MeasureUnit')
Ingredient = apps.get_model('ingredient', 'Ingredient')
for ingredient, units in INGREDIENTS.items():
ingredient, _created = Ingredient.objects.get_or_create(name=ingredient)
for unit in units:
ingredient.units.add(MeasureUnit.objects.get(name=unit))
db_alias = schema_editor.connection.alias
class Migration(migrations.Migration):
dependencies = [
('ingredient', '0001_initial'),
]
operations = [
migrations.operations.RunPython(create_measure_units),
migrations.operations.RunPython(create_ingredients),
]
| gpl-3.0 | 5,447,176,207,372,048,000 | 41.390625 | 91 | 0.597862 | false |
googleapis/googleapis-gen | google/cloud/notebooks/v1beta1/notebooks-v1beta1-py/docs/conf.py | 1 | 12491 | # -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
#
# google-cloud-notebooks documentation build configuration file
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
import shlex
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
sys.path.insert(0, os.path.abspath(".."))
__version__ = "0.1.0"
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
needs_sphinx = "1.6.3"
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
"sphinx.ext.autodoc",
"sphinx.ext.autosummary",
"sphinx.ext.intersphinx",
"sphinx.ext.coverage",
"sphinx.ext.napoleon",
"sphinx.ext.todo",
"sphinx.ext.viewcode",
]
# autodoc/autosummary flags
autoclass_content = "both"
autodoc_default_flags = ["members"]
autosummary_generate = True
# Add any paths that contain templates here, relative to this directory.
templates_path = ["_templates"]
# Allow markdown includes (so releases.md can include CHANGLEOG.md)
# http://www.sphinx-doc.org/en/master/markdown.html
source_parsers = {".md": "recommonmark.parser.CommonMarkParser"}
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
source_suffix = [".rst", ".md"]
# The encoding of source files.
# source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = "index"
# General information about the project.
project = u"google-cloud-notebooks"
copyright = u"2020, Google, LLC"
author = u"Google APIs" # TODO: autogenerate this bit
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The full version, including alpha/beta/rc tags.
release = __version__
# The short X.Y version.
version = ".".join(release.split(".")[0:2])
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
# today = ''
# Else, today_fmt is used as the format for a strftime call.
# today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ["_build"]
# The reST default role (used for this markup: `text`) to use for all
# documents.
# default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
# add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
# add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
# show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = "sphinx"
# A list of ignored prefixes for module index sorting.
# modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
# keep_warnings = False
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = True
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = "alabaster"
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
html_theme_options = {
"description": "Google Cloud Client Libraries for Python",
"github_user": "googleapis",
"github_repo": "google-cloud-python",
"github_banner": True,
"font_family": "'Roboto', Georgia, sans",
"head_font_family": "'Roboto', Georgia, serif",
"code_font_family": "'Roboto Mono', 'Consolas', monospace",
}
# Add any paths that contain custom themes here, relative to this directory.
# html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
# html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
# html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
# html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
# html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ["_static"]
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
# html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
# html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
# html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
# html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
# html_additional_pages = {}
# If false, no module index is generated.
# html_domain_indices = True
# If false, no index is generated.
# html_use_index = True
# If true, the index is split into individual pages for each letter.
# html_split_index = False
# If true, links to the reST sources are added to the pages.
# html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
# html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
# html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
# html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
# html_file_suffix = None
# Language to be used for generating the HTML full-text search index.
# Sphinx supports the following languages:
# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja'
# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr'
# html_search_language = 'en'
# A dictionary with options for the search language support, empty by default.
# Now only 'ja' uses this config value
# html_search_options = {'type': 'default'}
# The name of a javascript file (relative to the configuration directory) that
# implements a search results scorer. If empty, the default will be used.
# html_search_scorer = 'scorer.js'
# Output file base name for HTML help builder.
htmlhelp_basename = "google-cloud-notebooks-doc"
# -- Options for warnings ------------------------------------------------------
suppress_warnings = [
# Temporarily suppress this to avoid "more than one target found for
# cross-reference" warning, which are intractable for us to avoid while in
# a mono-repo.
# See https://github.com/sphinx-doc/sphinx/blob
# /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843
"ref.python"
]
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
# 'preamble': '',
# Latex figure (float) alignment
# 'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(
master_doc,
"google-cloud-notebooks.tex",
u"google-cloud-notebooks Documentation",
author,
"manual",
)
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
# latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
# latex_use_parts = False
# If true, show page references after internal links.
# latex_show_pagerefs = False
# If true, show URL addresses after external links.
# latex_show_urls = False
# Documents to append as an appendix to all manuals.
# latex_appendices = []
# If false, no module index is generated.
# latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(
master_doc,
"google-cloud-notebooks",
u"Google Cloud Notebooks Documentation",
[author],
1,
)
]
# If true, show URL addresses after external links.
# man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(
master_doc,
"google-cloud-notebooks",
u"google-cloud-notebooks Documentation",
author,
"google-cloud-notebooks",
"GAPIC library for Google Cloud Notebooks API",
"APIs",
)
]
# Documents to append as an appendix to all manuals.
# texinfo_appendices = []
# If false, no module index is generated.
# texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
# texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
# texinfo_no_detailmenu = False
# Example configuration for intersphinx: refer to the Python standard library.
intersphinx_mapping = {
"python": ("http://python.readthedocs.org/en/latest/", None),
"gax": ("https://gax-python.readthedocs.org/en/latest/", None),
"google-auth": ("https://google-auth.readthedocs.io/en/stable", None),
"google-gax": ("https://gax-python.readthedocs.io/en/latest/", None),
"google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None),
"grpc": ("https://grpc.io/grpc/python/", None),
"requests": ("http://requests.kennethreitz.org/en/stable/", None),
"proto": ("https://proto-plus-python.readthedocs.io/en/stable", None),
"protobuf": ("https://googleapis.dev/python/protobuf/latest/", None),
}
# Napoleon settings
napoleon_google_docstring = True
napoleon_numpy_docstring = True
napoleon_include_private_with_doc = False
napoleon_include_special_with_doc = True
napoleon_use_admonition_for_examples = False
napoleon_use_admonition_for_notes = False
napoleon_use_admonition_for_references = False
napoleon_use_ivar = False
napoleon_use_param = True
napoleon_use_rtype = True
| apache-2.0 | 819,251,889,735,516,300 | 32.220745 | 87 | 0.69426 | false |
justinvforvendetta/electrum-boli | plugins/plot.py | 1 | 3669 | from PyQt4.QtGui import *
from electrum_boli.plugins import BasePlugin, hook
from electrum_boli.i18n import _
import datetime
from electrum_boli.util import format_satoshis
from electrum_boli.bitcoin import COIN
try:
import matplotlib.pyplot as plt
import matplotlib.dates as md
from matplotlib.patches import Ellipse
from matplotlib.offsetbox import AnchoredOffsetbox, TextArea, DrawingArea, HPacker
flag_matlib=True
except:
flag_matlib=False
class Plugin(BasePlugin):
def is_available(self):
if flag_matlib:
return True
else:
return False
@hook
def init_qt(self, gui):
self.win = gui.main_window
@hook
def export_history_dialog(self, d,hbox):
self.wallet = d.wallet
history = self.wallet.get_history()
if len(history) > 0:
b = QPushButton(_("Preview plot"))
hbox.addWidget(b)
b.clicked.connect(lambda: self.do_plot(self.wallet, history))
else:
b = QPushButton(_("No history to plot"))
hbox.addWidget(b)
def do_plot(self, wallet, history):
balance_Val=[]
fee_val=[]
value_val=[]
datenums=[]
unknown_trans = 0
pending_trans = 0
counter_trans = 0
balance = 0
for item in history:
tx_hash, confirmations, value, timestamp, balance = item
if confirmations:
if timestamp is not None:
try:
datenums.append(md.date2num(datetime.datetime.fromtimestamp(timestamp)))
balance_Val.append(1000.*balance/COIN)
except [RuntimeError, TypeError, NameError] as reason:
unknown_trans += 1
pass
else:
unknown_trans += 1
else:
pending_trans += 1
value_val.append(1000.*value/COIN)
if tx_hash:
label, is_default_label = wallet.get_label(tx_hash)
label = label.encode('utf-8')
else:
label = ""
f, axarr = plt.subplots(2, sharex=True)
plt.subplots_adjust(bottom=0.2)
plt.xticks( rotation=25 )
ax=plt.gca()
x=19
test11="Unknown transactions = "+str(unknown_trans)+" Pending transactions = "+str(pending_trans)+" ."
box1 = TextArea(" Test : Number of pending transactions", textprops=dict(color="k"))
box1.set_text(test11)
box = HPacker(children=[box1],
align="center",
pad=0.1, sep=15)
anchored_box = AnchoredOffsetbox(loc=3,
child=box, pad=0.5,
frameon=True,
bbox_to_anchor=(0.5, 1.02),
bbox_transform=ax.transAxes,
borderpad=0.5,
)
ax.add_artist(anchored_box)
plt.ylabel('mBOLI')
plt.xlabel('Dates')
xfmt = md.DateFormatter('%Y-%m-%d')
ax.xaxis.set_major_formatter(xfmt)
axarr[0].plot(datenums,balance_Val,marker='o',linestyle='-',color='blue',label='Balance')
axarr[0].legend(loc='upper left')
axarr[0].set_title('History Transactions')
xfmt = md.DateFormatter('%Y-%m-%d')
ax.xaxis.set_major_formatter(xfmt)
axarr[1].plot(datenums,value_val,marker='o',linestyle='-',color='green',label='Value')
axarr[1].legend(loc='upper left')
# plt.annotate('unknown transaction = %d \n pending transactions = %d' %(unknown_trans,pending_trans),xy=(0.7,0.05),xycoords='axes fraction',size=12)
plt.show()
| gpl-3.0 | -4,475,255,244,508,078,600 | 28.119048 | 156 | 0.562551 | false |
silly-wacky-3-town-toon/SOURCE-COD | toontown/battle/DistributedBattleFinal.py | 1 | 7784 | from panda3d.core import *
from panda3d.direct import *
from direct.interval.IntervalGlobal import *
from BattleBase import *
from direct.actor import Actor
from toontown.distributed import DelayDelete
from direct.directnotify import DirectNotifyGlobal
import DistributedBattleBase
import MovieUtil
from toontown.suit import Suit
import SuitBattleGlobals
from toontown.toonbase import ToontownBattleGlobals
from toontown.toonbase import ToontownGlobals
from direct.fsm import State
import random
from otp.nametag.NametagConstants import *
from otp.nametag import NametagGlobals
class DistributedBattleFinal(DistributedBattleBase.DistributedBattleBase):
notify = DirectNotifyGlobal.directNotify.newCategory('DistributedBattleFinal')
def __init__(self, cr):
townBattle = cr.playGame.hood.loader.townBattle
DistributedBattleBase.DistributedBattleBase.__init__(self, cr, townBattle)
self.setupCollisions(self.uniqueBattleName('battle-collide'))
self.bossCog = None
self.bossCogRequest = None
self.streetBattle = 0
self.joiningSuitsName = self.uniqueBattleName('joiningSuits')
self.fsm.addState(State.State('ReservesJoining', self.enterReservesJoining, self.exitReservesJoining, ['WaitForJoin']))
offState = self.fsm.getStateNamed('Off')
offState.addTransition('ReservesJoining')
waitForJoinState = self.fsm.getStateNamed('WaitForJoin')
waitForJoinState.addTransition('ReservesJoining')
playMovieState = self.fsm.getStateNamed('PlayMovie')
playMovieState.addTransition('ReservesJoining')
return
def generate(self):
DistributedBattleBase.DistributedBattleBase.generate(self)
def disable(self):
DistributedBattleBase.DistributedBattleBase.disable(self)
base.cr.relatedObjectMgr.abortRequest(self.bossCogRequest)
self.bossCogRequest = None
self.bossCog = None
return
def delete(self):
DistributedBattleBase.DistributedBattleBase.delete(self)
self.removeCollisionData()
def setBossCogId(self, bossCogId):
self.bossCogId = bossCogId
if base.cr.doId2do.has_key(bossCogId):
tempBossCog = base.cr.doId2do[bossCogId]
self.__gotBossCog([tempBossCog])
else:
self.notify.debug('doing relatedObjectMgr.request for bossCog')
self.bossCogRequest = base.cr.relatedObjectMgr.requestObjects([bossCogId], allCallback=self.__gotBossCog)
def __gotBossCog(self, bossCogList):
self.bossCogRequest = None
self.bossCog = bossCogList[0]
currStateName = self.localToonFsm.getCurrentState().getName()
if currStateName == 'NoLocalToon' and self.bossCog.hasLocalToon():
self.enableCollision()
return
def setBattleNumber(self, battleNumber):
self.battleNumber = battleNumber
def setBattleSide(self, battleSide):
self.battleSide = battleSide
def setMembers(self, suits, suitsJoining, suitsPending, suitsActive, suitsLured, suitTraps, toons, toonsJoining, toonsPending, toonsActive, toonsRunning, timestamp):
if self.battleCleanedUp():
return
oldtoons = DistributedBattleBase.DistributedBattleBase.setMembers(self, suits, suitsJoining, suitsPending, suitsActive, suitsLured, suitTraps, toons, toonsJoining, toonsPending, toonsActive, toonsRunning, timestamp)
if len(self.toons) == 4 and len(oldtoons) < 4:
self.notify.debug('setMembers() - battle is now full of toons')
self.closeBattleCollision()
elif len(self.toons) < 4 and len(oldtoons) == 4:
self.openBattleCollision()
def makeSuitJoin(self, suit, ts):
self.notify.debug('makeSuitJoin(%d)' % suit.doId)
self.joiningSuits.append(suit)
if self.hasLocalToon():
self.d_joinDone(base.localAvatar.doId, suit.doId)
def showSuitsJoining(self, suits, ts, name, callback):
if self.bossCog == None:
return
if self.battleSide:
openDoor = Func(self.bossCog.doorB.request, 'open')
closeDoor = Func(self.bossCog.doorB.request, 'close')
else:
openDoor = Func(self.bossCog.doorA.request, 'open')
closeDoor = Func(self.bossCog.doorA.request, 'close')
suitTrack = Parallel()
delay = 0
for suit in suits:
suit.setState('Battle')
if suit.dna.dept == 'l':
suit.reparentTo(self.bossCog)
suit.setPos(0, 0, 0)
suit.setPos(self.bossCog, 0, 0, 0)
suit.headsUp(self)
suit.setScale(3.8 / suit.height)
if suit in self.joiningSuits:
i = len(self.pendingSuits) + self.joiningSuits.index(suit)
destPos, h = self.suitPendingPoints[i]
destHpr = VBase3(h, 0, 0)
else:
destPos, destHpr = self.getActorPosHpr(suit, self.suits)
suitTrack.append(Track((delay, self.createAdjustInterval(suit, destPos, destHpr)), (delay + 1.5, suit.scaleInterval(1.5, 1))))
delay += 1
if self.hasLocalToon():
camera.reparentTo(self)
if random.choice([0, 1]):
camera.setPosHpr(20, -4, 7, 60, 0, 0)
else:
camera.setPosHpr(-20, -4, 7, -60, 0, 0)
done = Func(callback)
track = Sequence(openDoor, suitTrack, closeDoor, done, name=name)
track.start(ts)
self.storeInterval(track, name)
return
def __playReward(self, ts, callback):
toonTracks = Parallel()
for toon in self.toons:
toonTracks.append(Sequence(Func(toon.loop, 'victory'), Wait(FLOOR_REWARD_TIMEOUT), Func(toon.loop, 'neutral')))
name = self.uniqueName('floorReward')
track = Sequence(toonTracks, name=name)
if self.hasLocalToon():
camera.setPos(0, 0, 1)
camera.setHpr(180, 10, 0)
track += [self.bossCog.makeEndOfBattleMovie(self.hasLocalToon()), Func(callback)]
self.storeInterval(track, name)
track.start(ts)
def enterReward(self, ts):
self.notify.debug('enterReward()')
self.disableCollision()
self.delayDeleteMembers()
self.__playReward(ts, self.__handleFloorRewardDone)
return None
def __handleFloorRewardDone(self):
return None
def exitReward(self):
self.notify.debug('exitReward()')
self.clearInterval(self.uniqueName('floorReward'), finish=1)
self._removeMembersKeep()
NametagGlobals.setMasterArrowsOn(1)
for toon in self.toons:
toon.startSmooth()
return None
def enterResume(self, ts = 0):
if self.hasLocalToon():
self.removeLocalToon()
self.fsm.requestFinalState()
def exitResume(self):
return None
def enterReservesJoining(self, ts = 0):
self.delayDeleteMembers()
self.showSuitsJoining(self.joiningSuits, ts, self.joiningSuitsName, self.__reservesJoiningDone)
def __reservesJoiningDone(self):
self._removeMembersKeep()
self.doneBarrier()
def exitReservesJoining(self):
self.clearInterval(self.joiningSuitsName)
def enterNoLocalToon(self):
self.notify.debug('enterNoLocalToon()')
if self.bossCog != None and self.bossCog.hasLocalToon():
self.enableCollision()
else:
self.disableCollision()
return
def exitNoLocalToon(self):
self.disableCollision()
return None
def enterWaitForServer(self):
self.notify.debug('enterWaitForServer()')
return None
def exitWaitForServer(self):
return None
| apache-2.0 | -5,185,984,711,244,480,000 | 37.534653 | 223 | 0.656475 | false |
bhaugen/nova | distribution/view_helpers.py | 1 | 28699 | from decimal import *
import datetime
from operator import attrgetter
from django.forms.formsets import formset_factory
from django.contrib.sites.models import Site
from models import *
from forms import *
try:
from notification import models as notification
except ImportError:
notification = None
def is_number(s):
try:
float(s)
return True
except ValueError:
return False
def create_pricing_masterboard_forms(delivery_date, data=None):
fn = food_network()
forms = []
pricing_objects = fn.producer_product_prices_for_date(delivery_date)
for pp in pricing_objects:
content_type = pp.__class__.__name__
prefix = "".join([content_type, str(pp.id)])
form = PricingMasterboardForm(
prefix = prefix,
data=data,
initial={
'id': pp.id,
'producer_id': pp.producer.id,
'product_id': pp.product.id,
'producer_price': pp.decide_producer_price(),
'producer_fee': pp.decide_producer_fee(),
'pay_price': pp.compute_pay_price(),
'markup_percent': pp.decide_markup(),
'selling_price': pp.compute_selling_price(),
'content_type': content_type,
}
)
form.product = pp.product.name_with_method()
form.producer = pp.producer
changes = ""
if isinstance(pp, ProducerPriceChange):
changes = pp.what_changed()
form.changes = changes
forms.append(form)
return forms
def create_producer_product_price_forms(product, data=None):
pps = product.product_producers.all()
form_list = []
for pp in pps:
prefix = "-".join(["PP", str(pp.id)])
form = ProducerProductPriceForm(data, prefix=prefix, instance=pp)
form.producer = pp.producer.short_name
form_list.append(form)
return form_list
def create_order_item_price_forms(product, data=None):
items = OrderItem.objects.filter(
product=product,
).exclude(order__state__contains="Paid").exclude(order__state="Unsubmitted")
form_list = []
for item in items:
prefix = "-".join(["OI", str(item.id)])
form = OrderItemPriceForm(data, prefix=prefix, instance=item)
form.order = item.order
form.producer = item.producer
form_list.append(form)
return form_list
def create_inventory_item_price_forms(product, data=None):
items = InventoryItem.objects.filter(
product=product,
remaining__gt=0,
)
form_list = []
for item in items:
prefix = "-".join(["II", str(item.id)])
form = InventoryItemPriceForm(data, prefix=prefix, instance=item)
form.lot = item.lot_id
form_list.append(form)
return form_list
def weekly_production_plans(week_date):
monday = week_date - datetime.timedelta(days=datetime.date.weekday(week_date))
saturday = monday + datetime.timedelta(days=5)
plans = ProductPlan.objects.select_related(depth=1).filter(
role="producer",
from_date__lte=week_date,
to_date__gte=saturday)
for plan in plans:
plan.category = plan.product.parent_string()
plan.product_name = plan.product.short_name
plans = sorted(plans, key=attrgetter('category',
'product_name'))
return plans
def plan_columns(from_date, to_date):
columns = []
wkdate = from_date
while wkdate <= to_date:
columns.append(wkdate.strftime('%Y-%m-%d'))
wkdate = wkdate + datetime.timedelta(days=7)
return columns
def sd_columns(from_date, to_date):
columns = []
wkdate = from_date
while wkdate <= to_date:
columns.append(wkdate.strftime('%Y_%m_%d'))
wkdate = wkdate + datetime.timedelta(days=7)
return columns
# shd plan_weeks go to the view and include headings?
# somebody needs headings!
def create_weekly_plan_forms(rows, data=None):
form_list = []
PlanCellFormSet = formset_factory(PlanCellForm, extra=0)
for row in rows:
product = row[0]
row_form = PlanRowForm(data, prefix=product.id, initial={'product_id': product.id})
row_form.product = product.long_name
cells = row[1:len(row)]
initial_data = []
for cell in cells:
plan_id = ""
if cell.plan:
plan_id = cell.plan.id
dict = {
'plan_id': plan_id,
'product_id': cell.product.id,
'from_date': cell.from_date,
'to_date': cell.to_date,
'quantity': cell.quantity,
}
initial_data.append(dict)
row_form.formset = PlanCellFormSet(data, prefix=product.id, initial=initial_data)
form_list.append(row_form)
return form_list
class SupplyDemandTable(object):
def __init__(self, columns, rows):
self.columns = columns
self.rows = rows
def supply_demand_table(from_date, to_date, member=None):
plans = ProductPlan.objects.all()
cps = ProducerProduct.objects.filter(
inventoried=False,
default_avail_qty__gt=0,
)
constants = {}
for cp in cps:
constants.setdefault(cp.product, Decimal("0"))
constants[cp.product] += cp.default_avail_qty
if member:
plans = plans.filter(member=member)
rows = {}
for plan in plans:
wkdate = from_date
product = plan.product.supply_demand_product()
constant = Decimal('0')
cp = constants.get(product)
if cp:
constant = cp
row = []
while wkdate <= to_date:
row.append(constant)
wkdate = wkdate + datetime.timedelta(days=7)
row.insert(0, product)
rows.setdefault(product, row)
wkdate = from_date
week = 0
while wkdate <= to_date:
if plan.from_date <= wkdate and plan.to_date >= wkdate:
if plan.role == "producer":
rows[product][week + 1] += plan.quantity
else:
rows[product][week + 1] -= plan.quantity
wkdate = wkdate + datetime.timedelta(days=7)
week += 1
label = "Product/Weeks"
columns = [label]
wkdate = from_date
while wkdate <= to_date:
columns.append(wkdate)
wkdate = wkdate + datetime.timedelta(days=7)
rows = rows.values()
rows.sort(lambda x, y: cmp(x[0].short_name, y[0].short_name))
sdtable = SupplyDemandTable(columns, rows)
return sdtable
def supply_demand_rows(from_date, to_date, member=None):
plans = ProductPlan.objects.select_related(depth=1).all()
cps = ProducerProduct.objects.filter(
inventoried=False,
default_avail_qty__gt=0,
)
constants = {}
rows = {}
#import pdb; pdb.set_trace()
#todo: what if some NIPs and some inventoried for same product?
#does code does allow for that?
for cp in cps:
constants.setdefault(cp.product, Decimal("0"))
constant = cp.default_avail_qty
product = cp.product
constants[product] += constant
row = {}
row["product"] = product.long_name
row["id"] = product.id
rows.setdefault(product, row)
wkdate = from_date
while wkdate <= to_date:
row[wkdate.strftime('%Y_%m_%d')] = str(constant)
wkdate = wkdate + datetime.timedelta(days=7)
if member:
plans = plans.filter(member=member)
#todo:
# spread storage items over many weeks
# if plan.product expiration_days > 1 week:
# spread remainder over weeks until consumed or expired.
# means plannable parents cd determine expiration.
# may require another pass thru storage plans...
for plan in plans:
wkdate = from_date
#this is too slow:
#product = plan.product.supply_demand_product()
product = plan.product
#constant = Decimal('0')
#constant = ""
#cp = constants.get(product)
#if cp:
# constant = str(cp)
row = {}
#while wkdate <= to_date:
# row[wkdate.strftime('%Y_%m_%d')] = str(constant)
# wkdate = wkdate + datetime.timedelta(days=7)
row["product"] = product.long_name
row["id"] = product.id
rows.setdefault(product, row)
#import pdb; pdb.set_trace()
wkdate = from_date
while wkdate <= to_date:
if plan.from_date <= wkdate and plan.to_date >= wkdate:
key = wkdate.strftime('%Y_%m_%d')
try:
value = rows[product][key]
except KeyError:
value = Decimal("0")
if value == "":
value = Decimal("0")
else:
value = Decimal(value)
if plan.role == "producer":
value += plan.quantity
else:
value -= plan.quantity
rows[product][key] = str(value)
wkdate = wkdate + datetime.timedelta(days=7)
rows = rows.values()
rows.sort(lambda x, y: cmp(x["product"], y["product"]))
return rows
def supply_demand_weekly_table(week_date):
plans = ProductPlan.objects.filter(
from_date__lte=week_date,
to_date__gte=week_date,
).order_by("-role", "member__short_name")
columns = []
rows = {}
cps = ProducerProduct.objects.filter(
inventoried=False,
default_avail_qty__gt=0,
)
for cp in cps:
if not cp.producer in columns:
columns.append(cp.producer)
for plan in plans:
if not plan.member in columns:
columns.append(plan.member)
columns.insert(0, "Product\Member")
columns.append("Balance")
for cp in cps:
if not rows.get(cp.product):
row = []
for i in range(0, len(columns)-1):
row.append(Decimal("0"))
row.insert(0, cp.product)
rows[cp.product] = row
rows[cp.product][columns.index(cp.producer)] += cp.default_avail_qty
rows[cp.product][len(columns)-1] += cp.default_avail_qty
for plan in plans:
if not rows.get(plan.product):
row = []
for i in range(0, len(columns)-1):
row.append(Decimal("0"))
row.insert(0, plan.product)
rows[plan.product] = row
if plan.role == "producer":
rows[plan.product][columns.index(plan.member)] += plan.quantity
rows[plan.product][len(columns)-1] += plan.quantity
else:
rows[plan.product][columns.index(plan.member)] -= plan.quantity
rows[plan.product][len(columns)-1] -= plan.quantity
rows = rows.values()
rows.sort(lambda x, y: cmp(x[0].short_name, y[0].short_name))
sdtable = SupplyDemandTable(columns, rows)
return sdtable
def dojo_supply_demand_weekly_table(week_date):
plans = ProductPlan.objects.filter(
from_date__lte=week_date,
to_date__gte=week_date,
).order_by("-role", "member__short_name")
# for columns: product, member.short_name(s), balance
# but only members are needed here...product and balance can be added in
# template
# for rows: dictionaries with the above keys
columns = []
rows = {}
cps = ProducerProduct.objects.filter(
inventoried=False,
default_avail_qty__gt=0,
)
for cp in cps:
if not cp.producer in columns:
columns.append(cp.producer.short_name)
for plan in plans:
if not plan.member.short_name in columns:
columns.append(plan.member.short_name)
columns.append("Balance")
for cp in cps:
if not rows.get(cp.product):
row = {}
for column in columns:
row[column] = 0
row["product"] = cp.product.long_name
row["id"] = cp.product.id
row["Balance"] = 0
rows[cp.product] = row
rows[cp.product][cp.producer.short_name] += int(cp.default_avail_qty)
rows[cp.product]["Balance"] += int(cp.default_avail_qty)
for plan in plans:
if not rows.get(plan.product):
row = {}
for column in columns:
row[column] = 0
row["product"] = plan.product.long_name
row["id"] = plan.product.id
row["Balance"] = 0
rows[plan.product] = row
if plan.role == "producer":
rows[plan.product][plan.member.short_name] += int(plan.quantity)
rows[plan.product]["Balance"] += int(plan.quantity)
else:
rows[plan.product][plan.member.short_name] -= int(plan.quantity)
rows[plan.product]["Balance"] -= int(plan.quantity)
rows = rows.values()
rows.sort(lambda x, y: cmp(x["product"], y["product"]))
sdtable = SupplyDemandTable(columns, rows)
return sdtable
class SuppliableDemandCell(object):
def __init__(self, supply, demand):
self.supply = supply
self.demand = demand
def suppliable(self):
answer = Decimal("0")
if self.supply and self.demand:
if self.supply > self.demand:
answer = self.demand
else:
answer = self.supply
return answer
def suppliable_demand(from_date, to_date, member=None):
#import pdb; pdb.set_trace()
plans = ProductPlan.objects.all()
if member:
plans = plans.filter(member=member)
rows = {}
for plan in plans:
wkdate = from_date
row = []
while wkdate <= to_date:
row.append(SuppliableDemandCell(Decimal("0"), Decimal("0")))
wkdate = wkdate + datetime.timedelta(days=7)
product = plan.product.supply_demand_product()
row.insert(0, product)
rows.setdefault(product, row)
wkdate = from_date
week = 0
while wkdate <= to_date:
if plan.from_date <= wkdate and plan.to_date >= wkdate:
if plan.role == "producer":
rows[product][week + 1].supply += plan.quantity
else:
rows[product][week + 1].demand += plan.quantity
wkdate = wkdate + datetime.timedelta(days=7)
week += 1
rows = rows.values()
cust_fee = customer_fee()/100
producer_fee = default_producer_fee()/100
for row in rows:
for x in range(1, len(row)):
sd = row[x].suppliable()
if sd >= 0:
income = sd * row[0].selling_price
row[x] = income
else:
row[x] = Decimal("0")
income_rows = []
for row in rows:
base = Decimal("0")
total = Decimal("0")
for x in range(1, len(row)):
cell = row[x]
base += cell
cell += cell * cust_fee
total += cell
row[x] = cell.quantize(Decimal('.1'), rounding=ROUND_UP)
if total:
net = base * cust_fee + (base * producer_fee)
net = net.quantize(Decimal('1.'), rounding=ROUND_UP)
total = total.quantize(Decimal('1.'), rounding=ROUND_UP)
row.append(total)
row.append(net)
income_rows.append(row)
label = "Item\Weeks"
columns = [label]
wkdate = from_date
while wkdate <= to_date:
columns.append(wkdate)
wkdate = wkdate + datetime.timedelta(days=7)
columns.append("Total")
columns.append("Net")
income_rows.sort(lambda x, y: cmp(x[0].long_name, y[0].short_name))
sdtable = SupplyDemandTable(columns, income_rows)
return sdtable
#todo: does not use contants (NIPs)
#or correct logic for storage items
def json_income_rows(from_date, to_date, member=None):
#import pdb; pdb.set_trace()
plans = ProductPlan.objects.all()
if member:
plans = plans.filter(member=member)
rows = {}
for plan in plans:
wkdate = from_date
row = {}
while wkdate <= to_date:
row[wkdate.strftime('%Y_%m_%d')] = SuppliableDemandCell(Decimal("0"), Decimal("0"))
wkdate = wkdate + datetime.timedelta(days=7)
product = plan.product.supply_demand_product()
row["product"] = product.long_name
row["id"] = product.id
row["price"] = product.selling_price
rows.setdefault(product, row)
wkdate = from_date
while wkdate <= to_date:
key = wkdate.strftime('%Y_%m_%d')
if plan.from_date <= wkdate and plan.to_date >= wkdate:
if plan.role == "producer":
rows[product][key].supply += plan.quantity
else:
rows[product][key].demand += plan.quantity
wkdate = wkdate + datetime.timedelta(days=7)
rows = rows.values()
cust_fee = customer_fee()/100
producer_fee = default_producer_fee()/100
#import pdb; pdb.set_trace()
for row in rows:
wkdate = from_date
while wkdate <= to_date:
key = wkdate.strftime('%Y_%m_%d')
sd = row[key].suppliable()
if sd > 0:
income = sd * row["price"]
row[key] = income
else:
row[key] = Decimal("0")
wkdate = wkdate + datetime.timedelta(days=7)
income_rows = []
for row in rows:
base = Decimal("0")
total = Decimal("0")
wkdate = from_date
while wkdate <= to_date:
key = wkdate.strftime('%Y_%m_%d')
cell = row[key]
base += cell
cell += cell * cust_fee
total += cell
row[key] = str(cell.quantize(Decimal('.1'), rounding=ROUND_UP))
wkdate = wkdate + datetime.timedelta(days=7)
if total:
net = base * cust_fee + (base * producer_fee)
net = net.quantize(Decimal('1.'), rounding=ROUND_UP)
total = total.quantize(Decimal('1.'), rounding=ROUND_UP)
row["total"] = str(total)
row["net"] = str(net)
row["price"] = str(row["price"])
income_rows.append(row)
income_rows.sort(lambda x, y: cmp(x["product"], y["product"]))
return income_rows
class PlannedWeek(object):
def __init__(self, product, from_date, to_date, quantity):
self.product = product
self.from_date = from_date
self.to_date = to_date
self.quantity = quantity
self.plan = None
def plan_weeks(member, products, from_date, to_date):
plans = ProductPlan.objects.filter(member=member)
#if member.is_customer():
# products = CustomerProduct.objects.filter(customer=member, planned=True)
#else:
# products = ProducerProduct.objects.filter(producer=member, planned=True)
#if not products:
# products = Product.objects.filter(plannable=True)
rows = {}
for pp in products:
try:
product = pp.product
except:
product = pp
wkdate = from_date
row = [product]
while wkdate <= to_date:
enddate = wkdate + datetime.timedelta(days=6)
row.append(PlannedWeek(product, wkdate, enddate, Decimal("0")))
wkdate = enddate + datetime.timedelta(days=1)
#row.insert(0, product)
rows.setdefault(product, row)
for plan in plans:
product = plan.product
wkdate = from_date
week = 0
while wkdate <= to_date:
enddate = wkdate + datetime.timedelta(days=6)
if plan.from_date <= wkdate and plan.to_date >= wkdate:
rows[product][week + 1].quantity = plan.quantity
rows[product][week + 1].plan = plan
wkdate = wkdate + datetime.timedelta(days=7)
week += 1
label = "Product/Weeks"
columns = [label]
wkdate = from_date
while wkdate <= to_date:
columns.append(wkdate)
wkdate = wkdate + datetime.timedelta(days=7)
rows = rows.values()
rows.sort(lambda x, y: cmp(x[0].short_name, y[0].short_name))
sdtable = SupplyDemandTable(columns, rows)
return sdtable
def plans_for_dojo(member, products, from_date, to_date):
#import pdb; pdb.set_trace()
plans = ProductPlan.objects.filter(member=member)
rows = {}
for pp in products:
yearly = 0
try:
product = pp.product
yearly = pp.qty_per_year
except:
product = pp
if not yearly:
try:
pp = ProducerProduct.objects.get(producer=member, product=product)
yearly = pp.qty_per_year
except:
pass
wkdate = from_date
row = {}
row["product"] = product.long_name
row["yearly"] = int(yearly)
row["id"] = product.id
row["member_id"] = member.id
row["from_date"] = from_date.strftime('%Y-%m-%d')
row["to_date"] = to_date.strftime('%Y-%m-%d')
while wkdate <= to_date:
enddate = wkdate + datetime.timedelta(days=6)
row[wkdate.strftime('%Y-%m-%d')] = "0"
wkdate = enddate + datetime.timedelta(days=1)
rows.setdefault(product, row)
#import pdb; pdb.set_trace()
for plan in plans:
product = plan.product
wkdate = from_date
week = 0
while wkdate <= to_date:
enddate = wkdate + datetime.timedelta(days=6)
if plan.from_date <= wkdate and plan.to_date >= wkdate:
rows[product][wkdate.strftime('%Y-%m-%d')] = str(plan.quantity)
rows[product][":".join([wkdate.strftime('%Y-%m-%d'), "plan_id"])] = plan.id
wkdate = wkdate + datetime.timedelta(days=7)
week += 1
rows = rows.values()
rows.sort(lambda x, y: cmp(x["product"], y["product"]))
return rows
def create_all_inventory_item_forms(avail_date, plans, items, data=None):
item_dict = {}
for item in items:
# This means one lot per producer per product per week
item_dict["-".join([str(item.product.id), str(item.producer.id)])] = item
form_list = []
for plan in plans:
#import pdb; pdb.set_trace()
custodian_id = ""
try:
member = plan.member
except:
member = plan.producer
try:
item = item_dict["-".join([str(plan.product.id),
str(member.id)])]
if item.custodian:
custodian_id = item.custodian.id
except KeyError:
item = False
try:
plan_qty = plan.quantity
except:
plan_qty = 0
#import pdb; pdb.set_trace()
if item:
pref = "-".join(["item", str(item.id)])
the_form = AllInventoryItemForm(data, prefix=pref, initial={
'item_id': item.id,
'product_id': item.product.id,
'producer_id': item.producer.id,
'freeform_lot_id': item.freeform_lot_id,
'field_id': item.field_id,
'custodian': custodian_id,
'inventory_date': item.inventory_date,
'expiration_date': item.expiration_date,
'planned': item.planned,
'received': item.received,
'notes': item.notes})
else:
pref = "-".join(["plan", str(plan.id)])
expiration_date = avail_date + datetime.timedelta(days=plan.product.expiration_days)
the_form = AllInventoryItemForm(data, prefix=pref, initial={
'item_id': 0,
'product_id': plan.product.id,
'producer_id': member.id,
'inventory_date': avail_date,
'expiration_date': expiration_date,
'planned': 0,
'received': 0,
'notes': ''})
the_form.description = plan.product.long_name
the_form.producer = member.short_name
the_form.plan_qty = plan_qty
form_list.append(the_form)
#import pdb; pdb.set_trace()
#form_list.sort(lambda x, y: cmp(x.producer, y.producer))
form_list = sorted(form_list, key=attrgetter('producer', 'description'))
return form_list
def create_delivery_cycle_selection_forms(data=None):
dcs = DeliveryCycle.objects.all()
form_list = []
for dc in dcs:
form = DeliveryCycleSelectionForm(data, prefix=dc.id)
form.cycle = dc
form.delivery_date = dc.next_delivery_date_using_closing()
form_list.append(form)
return form_list
def create_avail_item_forms(avail_date, data=None):
fn = food_network()
items = fn.avail_items_for_customer(avail_date)
form_list = []
for item in items:
pref = "-".join(["item", str(item.id)])
the_form = AvailableItemForm(data, prefix=pref, initial={
'item_id': item.id,
'inventory_date': item.inventory_date,
'expiration_date': item.expiration_date,
'quantity': item.avail_qty(),
})
the_form.description = item.product.name_with_method()
the_form.producer = item.producer.short_name
the_form.ordered = item.product.total_ordered_for_timespan(
item.inventory_date, item.expiration_date)
form_list.append(the_form)
form_list = sorted(form_list, key=attrgetter('description', 'producer'))
return form_list
def send_avail_emails(cycle):
fn = food_network()
food_network_name = fn.long_name
delivery_date = cycle.next_delivery_date_using_closing()
fresh_list = fn.email_availability(delivery_date)
users = []
for customer in cycle.customers.all():
users.append(customer)
for contact in customer.contacts.all():
if contact.email != customer.email:
users.append(contact)
oc = fn.order_contact()
if oc:
users.append(oc)
if fn.email != oc.email:
if fn.email:
users.append(fn)
users = list(set(users))
intro = avail_email_intro()
domain = Site.objects.get_current().domain
notification.send(users, "distribution_fresh_list", {
"intro": intro.message,
"domain": domain,
"fresh_list": fresh_list,
"delivery_date": delivery_date,
"food_network_name": food_network_name,
"cycle": cycle,
})
def create_order_item_forms_by_producer(order, delivery_date, data=None):
form_list = []
item_dict = {}
items = []
if order:
items = order.orderitem_set.all()
for item in items:
key = "-".join([str(item.product.id), str(item.producer.id)])
item_dict[key] = item
fn = food_network()
avail = fn.staff_availability_by_producer(delivery_date)
for prod in avail:
totavail = prod.avail
totordered = prod.ordered
producer = prod.producer
key = "-".join([str(prod.product.id), str(prod.producer.id)])
item = item_dict.get(key)
if item:
initial_data = {
'product_id': prod.product.id,
'producer_id': prod.producer.id,
'avail': totavail,
'unit_price': item.formatted_unit_price(),
'ordered': totordered,
}
prefix = "".join([str(item.product.id), str(item.producer.id)])
oiform = OrderItemForm(data, prefix=prefix, instance=item,
initial=initial_data)
oiform.producer = producer
oiform.description = prod.product.long_name
oiform.parents = prod.category
oiform.growing_method = prod.product.growing_method
form_list.append(oiform)
else:
#fee = prod.decide_fee()
prefix = "".join([str(prod.product.id), str(prod.producer.id)])
oiform = OrderItemForm(data, prefix=prefix, initial={
'product_id': prod.product.id,
'producer_id': prod.producer.id,
'avail': totavail,
'ordered': totordered,
'unit_price': prod.price,
#'fee': fee,
'quantity': 0})
oiform.description = prod.product.long_name
oiform.producer = producer
oiform.parents = prod.category
oiform.growing_method = prod.product.growing_method
form_list.append(oiform)
return form_list
| mit | -3,354,612,000,780,010,000 | 35.327848 | 96 | 0.559671 | false |
Ernti/GG | gg/GGwindowbutton.py | 1 | 2655 | '''
Created on 1 Mar 2014
@author: tore
'''
from OpenGL.GL import *
import pygame
from dunder_mifflin import papers # WARNING: Malicious operation ahead
class WindowButton(object):
def __init__(self, window, text, x, y, w, h, action):
self.window = window
self.text = text
self.posx = x
self.posy = y
self.width = w
self.height = h
self.actiondict = {'action': action}
def render(self):
mousepos = pygame.mouse.get_pos()
glBegin(GL_QUADS)
if ((mousepos[0] < self.window.posx + self.posx + self.width
and mousepos[0] > self.window.posx + self.posx)
and (mousepos[1] > self.window.posy + self.posy
and mousepos[1] < self.window.posy + self.posy + self.height)):
glColor3f(0.7, 0.7, 0.7)
else:
glColor3f(0.5, 0.5, 0.5)
glVertex2f(self.window.posx + self.posx,
self.window.ggci.ggdata.screenheight - self.window.posy - self.posy)
glVertex2f(self.window.posx + self.posx + self.width,
self.window.ggci.ggdata.screenheight - self.window.posy - self.posy)
glVertex2f(self.window.posx + self.posx + self.width,
self.window.ggci.ggdata.screenheight - self.window.posy - self.posy - self.height)
glVertex2f(self.window.posx + self.posx,
self.window.ggci.ggdata.screenheight - self.window.posy - self.posy - self.height)
glEnd()
glBegin(GL_LINE_LOOP)
glColor3f(0.4, 0.4, 0.4)
glVertex2f(self.window.posx + self.posx,
self.window.ggci.ggdata.screenheight - self.window.posy - self.posy)
glVertex2f(self.window.posx + self.posx + self.width,
self.window.ggci.ggdata.screenheight - self.window.posy - self.posy)
glVertex2f(self.window.posx + self.posx + self.width,
self.window.ggci.ggdata.screenheight - self.window.posy - self.posy - self.height)
glVertex2f(self.window.posx + self.posx,
self.window.ggci.ggdata.screenheight - self.window.posy - self.posy - self.height)
glEnd()
self.window.ggci.textrender.print(self.text, self.window.ggci.textrender.statchar,
self.window.posx + self.posx + self.width / 2,
self.window.ggci.ggdata.screenheight - self.window.posy - self.posy
- ((self.height + self.window.ggci.textrender.statchar[49][2]) / 2),
"center")
def action(self):
self.window.ggci.buttonhandler.handle(self.actiondict) | gpl-2.0 | 5,082,579,712,952,476,000 | 40.5 | 110 | 0.571375 | false |
lmazuel/azure-sdk-for-python | azure-mgmt-network/azure/mgmt/network/v2017_08_01/models/virtual_network_gateway_connection_list_entity.py | 1 | 7733 | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from .resource import Resource
class VirtualNetworkGatewayConnectionListEntity(Resource):
"""A common class for general resource information.
Variables are only populated by the server, and will be ignored when
sending a request.
All required parameters must be populated in order to send to Azure.
:param id: Resource ID.
:type id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:param location: Resource location.
:type location: str
:param tags: Resource tags.
:type tags: dict[str, str]
:param authorization_key: The authorizationKey.
:type authorization_key: str
:param virtual_network_gateway1: Required. The reference to virtual
network gateway resource.
:type virtual_network_gateway1:
~azure.mgmt.network.v2017_08_01.models.VirtualNetworkConnectionGatewayReference
:param virtual_network_gateway2: The reference to virtual network gateway
resource.
:type virtual_network_gateway2:
~azure.mgmt.network.v2017_08_01.models.VirtualNetworkConnectionGatewayReference
:param local_network_gateway2: The reference to local network gateway
resource.
:type local_network_gateway2:
~azure.mgmt.network.v2017_08_01.models.VirtualNetworkConnectionGatewayReference
:param connection_type: Required. Gateway connection type. Possible values
are: 'Ipsec','Vnet2Vnet','ExpressRoute', and 'VPNClient. Possible values
include: 'IPsec', 'Vnet2Vnet', 'ExpressRoute', 'VPNClient'
:type connection_type: str or
~azure.mgmt.network.v2017_08_01.models.VirtualNetworkGatewayConnectionType
:param routing_weight: The routing weight.
:type routing_weight: int
:param shared_key: The IPSec shared key.
:type shared_key: str
:ivar connection_status: Virtual network Gateway connection status.
Possible values are 'Unknown', 'Connecting', 'Connected' and
'NotConnected'. Possible values include: 'Unknown', 'Connecting',
'Connected', 'NotConnected'
:vartype connection_status: str or
~azure.mgmt.network.v2017_08_01.models.VirtualNetworkGatewayConnectionStatus
:ivar tunnel_connection_status: Collection of all tunnels' connection
health status.
:vartype tunnel_connection_status:
list[~azure.mgmt.network.v2017_08_01.models.TunnelConnectionHealth]
:ivar egress_bytes_transferred: The egress bytes transferred in this
connection.
:vartype egress_bytes_transferred: long
:ivar ingress_bytes_transferred: The ingress bytes transferred in this
connection.
:vartype ingress_bytes_transferred: long
:param peer: The reference to peerings resource.
:type peer: ~azure.mgmt.network.v2017_08_01.models.SubResource
:param enable_bgp: EnableBgp flag
:type enable_bgp: bool
:param use_policy_based_traffic_selectors: Enable policy-based traffic
selectors.
:type use_policy_based_traffic_selectors: bool
:param ipsec_policies: The IPSec Policies to be considered by this
connection.
:type ipsec_policies:
list[~azure.mgmt.network.v2017_08_01.models.IpsecPolicy]
:param resource_guid: The resource GUID property of the
VirtualNetworkGatewayConnection resource.
:type resource_guid: str
:ivar provisioning_state: The provisioning state of the
VirtualNetworkGatewayConnection resource. Possible values are: 'Updating',
'Deleting', and 'Failed'.
:vartype provisioning_state: str
:param etag: Gets a unique read-only string that changes whenever the
resource is updated.
:type etag: str
"""
_validation = {
'name': {'readonly': True},
'type': {'readonly': True},
'virtual_network_gateway1': {'required': True},
'connection_type': {'required': True},
'connection_status': {'readonly': True},
'tunnel_connection_status': {'readonly': True},
'egress_bytes_transferred': {'readonly': True},
'ingress_bytes_transferred': {'readonly': True},
'provisioning_state': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'authorization_key': {'key': 'properties.authorizationKey', 'type': 'str'},
'virtual_network_gateway1': {'key': 'properties.virtualNetworkGateway1', 'type': 'VirtualNetworkConnectionGatewayReference'},
'virtual_network_gateway2': {'key': 'properties.virtualNetworkGateway2', 'type': 'VirtualNetworkConnectionGatewayReference'},
'local_network_gateway2': {'key': 'properties.localNetworkGateway2', 'type': 'VirtualNetworkConnectionGatewayReference'},
'connection_type': {'key': 'properties.connectionType', 'type': 'str'},
'routing_weight': {'key': 'properties.routingWeight', 'type': 'int'},
'shared_key': {'key': 'properties.sharedKey', 'type': 'str'},
'connection_status': {'key': 'properties.connectionStatus', 'type': 'str'},
'tunnel_connection_status': {'key': 'properties.tunnelConnectionStatus', 'type': '[TunnelConnectionHealth]'},
'egress_bytes_transferred': {'key': 'properties.egressBytesTransferred', 'type': 'long'},
'ingress_bytes_transferred': {'key': 'properties.ingressBytesTransferred', 'type': 'long'},
'peer': {'key': 'properties.peer', 'type': 'SubResource'},
'enable_bgp': {'key': 'properties.enableBgp', 'type': 'bool'},
'use_policy_based_traffic_selectors': {'key': 'properties.usePolicyBasedTrafficSelectors', 'type': 'bool'},
'ipsec_policies': {'key': 'properties.ipsecPolicies', 'type': '[IpsecPolicy]'},
'resource_guid': {'key': 'properties.resourceGuid', 'type': 'str'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
}
def __init__(self, **kwargs):
super(VirtualNetworkGatewayConnectionListEntity, self).__init__(**kwargs)
self.authorization_key = kwargs.get('authorization_key', None)
self.virtual_network_gateway1 = kwargs.get('virtual_network_gateway1', None)
self.virtual_network_gateway2 = kwargs.get('virtual_network_gateway2', None)
self.local_network_gateway2 = kwargs.get('local_network_gateway2', None)
self.connection_type = kwargs.get('connection_type', None)
self.routing_weight = kwargs.get('routing_weight', None)
self.shared_key = kwargs.get('shared_key', None)
self.connection_status = None
self.tunnel_connection_status = None
self.egress_bytes_transferred = None
self.ingress_bytes_transferred = None
self.peer = kwargs.get('peer', None)
self.enable_bgp = kwargs.get('enable_bgp', None)
self.use_policy_based_traffic_selectors = kwargs.get('use_policy_based_traffic_selectors', None)
self.ipsec_policies = kwargs.get('ipsec_policies', None)
self.resource_guid = kwargs.get('resource_guid', None)
self.provisioning_state = None
self.etag = kwargs.get('etag', None)
| mit | -2,750,844,050,204,453,400 | 49.875 | 133 | 0.669856 | false |
urubens/CellCounting | src/data/cytomine_identifiers.py | 1 | 5650 | # -*- coding: utf-8 -*-
__author__ = "Ulysse Rubens <urubens@student.ulg.ac.be>"
__version__ = "0.1"
"""
Dictionary utilities encapsuling the Cytomine identifiers.
"""
CYTOMINE_KEYS = {
'demo.cytomine.be': {'public_key': '678d80ce-1d8f-4c33-857e-ae45605dec66',
'private_key': '90651c95-476c-40f0-9db3-00d4cdeef6b2'},
'beta.cytomine.be': {'public_key': '2a44e714-4f51-4cb0-ab28-8df645dee194',
'private_key': '46f0629b-ee5b-4cd6-950d-62e641da75d6'}
}
USERS = {
'demo.cytomine.be': {'urubens': 15386680, },
'beta.cytomine.be': {'urubens': 203661232, }
}
TERMS = {
'Nucleus': 15621701,
'Zone': 21124276,
'Immune cells': 8834946,
'Microdissection areas': 8834926,
'Subzone': 80703418,
'Spot dark brown': 19729442,
'NucleusAnapath': 90300636,
'ZoneAnapath': 90300630,
'SubArea': 23313208,
}
DATASETS = {
'BMGRAZ': {'id': 15386712,
'host': 'demo.cytomine.be',
'cell_term': TERMS['Nucleus'],
'roi_term': TERMS['Zone'],
'reviewed_only': False,
'image_as_roi': True,
'labels': 'image',
'users': None,
'local_path': 'GRAZ-2015/',
'mean_radius': 16,
'cv_epsilon': 8,
'post_min_dist': 8,
},
'CRC': {'id': 16016894,
'host': 'demo.cytomine.be',
'cell_term': TERMS['Nucleus'],
'roi_term': TERMS['Zone'],
'reviewed_only': False,
'image_as_roi': True,
'labels': 'image',
'users': None,
'local_path': 'CRC-2016/',
'mean_radius': 5,
'cv_epsilon': 4,
'post_min_dist': 8, },
'FLUO': {'id': 21617818,
'host': 'demo.cytomine.be',
'cell_term': TERMS['Nucleus'],
'roi_term': TERMS['Zone'],
'reviewed_only': False,
'image_as_roi': True,
'labels': 'image',
'users': None,
'mean_radius': 1,
'cv_epsilon': 8,
'post_min_dist': 8,
'local_path': '/cellcounting/data/FLUO-2010/', },
'ISBI': {'id': 0,
'host': 'demo.cytomine.be',
'cell_term': TERMS['Nucleus'],
'roi_term': TERMS['Zone'],
'reviewed_only': False,
'image_as_roi': True,
'labels': 'image',
'users': None,
'local_path': 'ISBI-2017/', },
'TEST': {'id': 20070949,
'host': 'demo.cytomine.be',
'cell_term': TERMS['Nucleus'],
'roi_term': TERMS['Zone'],
'reviewed_only': False,
'image_as_roi': True,
'labels': 'image',
'users': None,
'local_path': '', },
'GANGLIONS': {'id': 8816505,
'host': 'demo.cytomine.be',
'cell_term': TERMS['Immune cells'],
'roi_term': TERMS['SubArea'],
'reviewed_only': False,
'image_as_roi': False,
'labels': 'image',
'users': None,
'local_path': 'GANGLIONS/',
'mean_radius': 2,
'cv_epsilon': 10,
'post_min_dist': 6,},
'BRCA': {'id': 18792714,
'host': 'beta.cytomine.be',
'cell_term': TERMS['Spot dark brown'],
'roi_term': TERMS['Subzone'],
'reviewed_only': True,
'image_as_roi': False,
'labels': 'image',
'users': None,
'local_path': 'BRCA/', },
'ANAPATH': {'id': 83151073,
'host': 'beta.cytomine.be',
'cell_term': TERMS['NucleusAnapath'],
'roi_term': TERMS['ZoneAnapath'],
'reviewed_only': False,
'image_as_roi': False,
'labels': 'roi',
'users': [USERS['beta.cytomine.be']['urubens'], ],
'local_path': 'ANAPATH/',
'mean_radius': 2,
'cv_epsilon': 8,
'post_min_dist': 8, }
}
EXCLUDED = {
'BMGRAZ': [],
'CRC': [],
'FLUO': [],
'ISBI': [],
'TEST': [],
'GANGLIONS': [],
'BRCA': [ # ROI ids
87998887,
87977969,
87977970,
87977971,
87977972,
87977973,
87977974,
87977975,
],
'ANAPATH': [],
}
TEST_SET = {
'BMGRAZ': [ # images ids
15387485,
15387461,
15387437],
'CRC': [],
# [ # images ids
# 16021007,
# 16020983,
# 16020954,
# 16020898,
# 16020858,
# 16020807,
# 16020767,
# 16020738,
# 16020716,
# 16020689,
# 16020603,
# 16020563,
# 16020518,
# 16020512,
# 16020433,
# 16020427,
# 16020421,
# 16020323,
# 16020283,
# 16020241,
# 16020201,
# 16020172,
# 16020119,
# 16020079,
# 16020045,
# 16020039,
# 16019959,
# 16019919,
# 16019879,
# 16019839],
'FLUO': [],
'ISBI': [],
'TEST': [],
'GANGLIONS': [ # image ids
8816596,
8816602,
8816608],
'BRCA': [ # image ids
20161126,
20162101,
20162839,
20163194,
20168253,
20168305,
20168399,
20170362,
20170597],
'ANAPATH': [ # ROI ids
207008882,
207008933
]
}
| mit | -5,817,311,607,417,019,000 | 26.696078 | 80 | 0.433628 | false |
line/line-bot-sdk-python | tests/models/test_base.py | 1 | 2959 | # -*- coding: utf-8 -*-
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from __future__ import unicode_literals, absolute_import
import json
import unittest
from linebot.models import Base
class Hoge(Base):
def __init__(self, title=None, content=None, hoge_bar=None, **kwargs):
super(Hoge, self).__init__(**kwargs)
self.title = title
self.content = content
self.hoge_bar = hoge_bar
class TestBase(unittest.TestCase):
def test_as_json_string(self):
self.assertEqual(
Hoge().as_json_string(),
'{}')
self.assertEqual(
Hoge(title='title').as_json_string(),
'{"title": "title"}')
self.assertEqual(
Hoge(title='title', content='content').as_json_string(),
'{"content": "content", "title": "title"}')
self.assertEqual(
Hoge(title='title', content={"hoge": "hoge"}).as_json_string(),
'{"content": {"hoge": "hoge"}, "title": "title"}')
self.assertEqual(
Hoge(title=[1, 2]).as_json_string(),
'{"title": [1, 2]}')
self.assertEqual(
Hoge(hoge_bar='hoge_bar').as_json_string(),
'{"hogeBar": "hoge_bar"}')
def test_as_json_dict(self):
self.assertEqual(
Hoge().as_json_dict(),
{})
self.assertEqual(
Hoge(title='title').as_json_dict(),
{'title': 'title'})
self.assertEqual(
Hoge(title='title', content='content').as_json_dict(),
{'content': 'content', 'title': 'title'})
self.assertEqual(
Hoge(title='title', content={"hoge": "hoge"}).as_json_dict(),
{'content': {'hoge': 'hoge'}, 'title': 'title'})
self.assertEqual(
Hoge(title=[1, 2]).as_json_dict(),
{'title': [1, 2]})
def test_new_from_json_dict(self):
self.assertEqual(
Hoge.new_from_json_dict({"title": "title"}),
Hoge(title='title'))
self.assertEqual(
Hoge.new_from_json_dict(json.loads('{"title": "title"}')),
Hoge(title='title'))
self.assertEqual(
Hoge.new_from_json_dict({"hoge_bar": "hoge_bar"}),
Hoge(hoge_bar='hoge_bar'))
self.assertEqual(
Hoge.new_from_json_dict({"hogeBar": "hoge_bar"}),
Hoge(hoge_bar='hoge_bar'))
if __name__ == '__main__':
unittest.main()
| apache-2.0 | 4,919,719,617,394,605,000 | 33.406977 | 76 | 0.560324 | false |
landscapeio/pylint-common | setup.py | 1 | 1633 | # -*- coding: UTF-8 -*-
import sys
from setuptools import find_packages, setup
_version = '0.2.5'
_packages = find_packages(exclude=["*.tests", "*.tests.*", "tests.*", "tests"])
_short_description = ("pylint-common is a Pylint plugin to improve Pylint "
"error analysis of the standard Python library")
_classifiers = (
'Development Status :: 6 - Mature',
'Environment :: Console',
'Intended Audience :: Developers',
'Operating System :: Unix',
'Topic :: Software Development :: Quality Assurance',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
)
if sys.version_info < (2, 7):
# pylint 1.4 dropped support for Python 2.6
_install_requires = [
'pylint>=1.0,<1.4',
'astroid>=1.0,<1.3.0',
'logilab-common>=0.60.0,<0.63',
'pylint-plugin-utils>=0.2.6',
]
else:
_install_requires = [
'pylint>=1.0',
'pylint-plugin-utils>=0.2.6',
]
setup(
name='pylint-common',
url='https://github.com/landscapeio/pylint-common',
author='landscape.io',
author_email='code@landscape.io',
description=_short_description,
version=_version,
packages=_packages,
install_requires=_install_requires,
license='GPLv2',
classifiers=_classifiers,
keywords='pylint stdlib plugin',
zip_safe=False # see https://github.com/landscapeio/prospector/issues/18#issuecomment-49857277
)
| gpl-2.0 | -4,498,307,940,988,987,400 | 28.690909 | 99 | 0.620943 | false |
google-research/google-research | neural_guided_symbolic_regression/utils/generate_empirical_distribution_df_test.py | 1 | 6620 | # coding=utf-8
# Copyright 2021 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for generate_empirical_distribution_df."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from absl.testing import parameterized
import numpy as np
import pandas as pd
import tensorflow.compat.v1 as tf
from neural_guided_symbolic_regression.models import grammar_utils
from neural_guided_symbolic_regression.utils import generate_empirical_distribution_df
class GenerateEmpiricalDistributionDfHelperTest(parameterized.TestCase):
@parameterized.parameters([
([1, 4], 5),
([1, 4, 3], 5),
([1, 4, 3, 5, 2, 6], 3),
([1, 4, 3, 5, 2, 6, 8], 3)
])
def test_get_number_valid_next_step(
self,
prod_rules_sequence_indices,
expected):
grammar = grammar_utils.load_grammar(
grammar_path='third_party/google_research/google_research/'
'neural_guided_symbolic_regression/grammar/'
'univariate_one_constant_grammar.txt')
number_valid_next_step = (
generate_empirical_distribution_df.get_number_valid_next_step(
prod_rules_sequence_indices, grammar))
self.assertEqual(number_valid_next_step, expected)
class GenerateEmpiricalDistributionDfMainTest(parameterized.TestCase):
def setUp(self):
super(GenerateEmpiricalDistributionDfMainTest, self).setUp()
# Production rule sequence of ( 1 ) is 1,6,7,6,9.
# Production rule sequence of ( x ) is 1,6,7,6,8.
self.expression_df = pd.DataFrame(
{'expression_string': ['( 1 )', '( x )'],
'leading_at_0': [0, 1],
'leading_at_inf': [0, 1]})
self.grammar = grammar_utils.load_grammar(
grammar_path='third_party/google_research/google_research/'
'neural_guided_symbolic_regression/grammar/'
'univariate_one_constant_grammar.txt')
self.max_length = 11
def test_get_partial_sequence_df(self):
partial_sequence_df = (
generate_empirical_distribution_df.get_partial_sequence_df(
self.expression_df, self.grammar, self.max_length))
expected_partial_sequence_indices = ['1', '1_6', '1_6_7', '1_6_7_6',
'1', '1_6', '1_6_7', '1_6_7_6']
self.assertListEqual(
list(partial_sequence_df['partial_sequence_indices'].values),
expected_partial_sequence_indices)
@parameterized.parameters([
(None,
'partial_sequence_indices',
['1', 0, 0],
[0, 0, 0, 0, 0, 0, 1, 0, 0, 0]),
(None,
'partial_sequence_indices',
['1_6', 0, 0],
[0, 0, 0, 0, 0, 0, 0, 1, 0, 0]),
(None,
'partial_sequence_indices',
['1_6_7', 0, 0],
[0, 0, 0, 0, 0, 0, 1, 0, 0, 0]),
(None,
'partial_sequence_indices',
['1_6_7_6', 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 1]),
(2,
'tail_partial_sequence_indices',
['1', 0, 0],
[0, 0, 0, 0, 0, 0, 1, 0, 0, 0]),
(2,
'tail_partial_sequence_indices',
['1_6', 0, 0],
[0, 0, 0, 0, 0, 0, 0, 1, 0, 0]),
(2,
'tail_partial_sequence_indices',
['6_7', 0, 0],
[0, 0, 0, 0, 0, 0, 1, 0, 0, 0]),
(2,
'tail_partial_sequence_indices',
['7_6', 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 1]),
])
def test_get_empirical_distribution_df(self,
tail_length,
level_name,
multi_index_to_check,
expected_probabilities):
properties = ['leading_at_0', 'leading_at_inf']
num_production_rules = len(self.grammar.prod_rules)
partial_sequence_df = (
generate_empirical_distribution_df.get_partial_sequence_df(
self.expression_df, self.grammar, self.max_length))
empirical_distribution_df = (
generate_empirical_distribution_df.get_empirical_distribution_df(
partial_sequence_df, properties, num_production_rules, tail_length))
levels = [level_name] + properties
np.testing.assert_array_almost_equal(
empirical_distribution_df.xs(multi_index_to_check,
level=levels).values[0],
expected_probabilities)
def test_get_empirical_distribution_df_without_condition(self):
num_production_rules = len(self.grammar.prod_rules)
partial_sequence_df = (
generate_empirical_distribution_df.get_partial_sequence_df(
self.expression_df, self.grammar, self.max_length))
empirical_distribution_df = (
generate_empirical_distribution_df.get_empirical_distribution_df(
partial_sequence_df, [], num_production_rules, None))
expected = pd.DataFrame(
np.array([[0, 0, 0, 0, 0, 0, 1, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 1, 0, 0],
[0, 0, 0, 0, 0, 0, 1, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0.5, 0.5]]),
columns=range(10))
expected['partial_sequence_indices'] = ['1', '1_6', '1_6_7', '1_6_7_6']
expected.set_index('partial_sequence_indices', inplace=True)
pd.testing.assert_frame_equal(empirical_distribution_df,
expected,
check_dtype=False,
check_index_type=False,
check_column_type=False,
check_names=False)
@parameterized.parameters([
('1_6_7_6', 1, '6'),
('1_6_7_6', 2, '7_6'),
('1_6', 3, '1_6'),
])
def test_extract_tail_partial_sequence(self,
partial_sequence_string,
tail_length,
expected):
tail_partial_sequence_string = (
generate_empirical_distribution_df.extract_tail_partial_sequence(
partial_sequence_string, tail_length))
self.assertEqual(tail_partial_sequence_string, expected)
if __name__ == '__main__':
tf.test.main()
| apache-2.0 | -8,822,368,325,674,619,000 | 37.71345 | 86 | 0.575378 | false |
gstsistemas/passwall | telas/collect.py | 1 | 5170 | # -*- coding: utf-8 -*-
from kivy.uix.screenmanager import Screen
#from kivy.lang import Builder
from models.senhas import Senha, Collection
from kivy.uix.button import Button
from kivy.uix.gridlayout import GridLayout
from telas.utilities import Confirma, JanelaSettings
import sys
class JanelaCollect (Screen):
def __init__(self, smanager=None, last_window=None, **kwargs):
super(JanelaCollect, self).__init__(**kwargs)
self.last_window = last_window
self.ids.area_collects.bind(minimum_height=self.ids.area_collects.setter('height'))
self.smanager = smanager
def recarrega (self):
self.ids.area_collects.clear_widgets()
cols = Collection.select()
for c in cols:
b = ItemColecao (c, smanager=self.smanager)
self.ids.area_collects.add_widget(b)
def on_pre_enter(self):
self.recarrega()
def on_leave(self):
self.smanager.remove_widget (self)
def call_settings (self):
from telas.collect import JanelaSettings
janela = JanelaSettings(smanager=self.smanager, name='janela_settings')
self.smanager.add_widget( janela )
#janela = self.smanager.get_screen('janela_add_collect')
self.smanager.transition.direction = 'left'
self.smanager.current = 'janela_settings'
def add (self):
from telas.collect import JanelaAddCollect
janela = JanelaAddCollect(smanager=self.smanager, name='janela_add_collect')
self.smanager.add_widget( janela )
#janela = self.smanager.get_screen('janela_add_collect')
self.smanager.transition.direction = 'left'
self.smanager.current = 'janela_add_collect'
def voltar (self):
sys.exit(0)
class ItemColecao (Button):
def __init__ (self, col, smanager=None, **kwargs):
super(ItemColecao, self).__init__(**kwargs)
self.collection = col
self.smanager = smanager
self.text = self.smanager.encrypter.decripta (col.nome)
def on_release (self, **kwargs):
super(ItemColecao, self).on_release(**kwargs)
from telas.passwd import JanelaPassList
janela = JanelaPassList( smanager=self.smanager, name='janela_pass_list')
self.smanager.add_widget( janela )
#janela = self.smanager.get_screen('janela_pass_list')
janela.setup (col=self.collection)
self.smanager.transition.direction = 'left'
self.smanager.current = 'janela_pass_list'
class JanelaAddCollect (Screen):
def __init__(self, smanager=None, last_window=None, **kwargs):
super(JanelaAddCollect, self).__init__(**kwargs)
self.last_window = last_window
self.smanager = smanager
def on_pre_enter(self):
self.ids.espaco_superior.remove_widget (self.ids.button_deleta)
self.ids.tx_nome.text = ''
def on_leave (self):
self.smanager.remove_widget(self)
def salvar (self):
c = Collection()
c.nome = self.smanager.encrypter.encripta (self.ids.tx_nome.text )
c.save()
# Vai pra view
#janela = self.smanager.get_screen('janela_pass_list')
from telas.passwd import JanelaPassList
janela = JanelaPassList( smanager=self.smanager, name='janela_pass_list')
self.smanager.add_widget( janela )
janela.setup (col=c)
self.smanager.transition.direction = 'right'
self.smanager.current = 'janela_pass_list'
def voltar (self):
from telas.collect import JanelaCollect
janela = JanelaCollect(smanager=self.smanager, name='janela_collect')
self.smanager.add_widget( janela )
janela.recarrega()
self.smanager.transition.direction = 'right'
self.smanager.current = 'janela_collect'
class JanelaEditCollect (JanelaAddCollect):
def setup (self, col):
self.collect = col
def on_pre_enter(self):
self.ids.tx_nome.text = self.smanager.encrypter.decripta (self.collect.nome)
def on_leave (self):
self.smanager.remove_widget(self)
def _really_delete(self, really):
if really:
self.collect.delete_instance(recursive=True)
self.voltar()
def delete (self):
p = Confirma (callback=self._really_delete, text='Remover Colecao?')
p.open()
def salvar (self):
c = self.collect
c.nome = self.smanager.encrypter.encripta (self.ids.tx_nome.text)
c.save()
# Vai pra view
#janela = self.smanager.get_screen('janela_pass_list')
from telas.passwd import JanelaPassList
janela = JanelaPassList( smanager=self.smanager, name='janela_pass_list')
self.smanager.add_widget( janela )
janela.setup (col=c)
self.smanager.transition.direction = 'right'
self.smanager.current = 'janela_pass_list'
#self.smanager.switch_to = 'janela_pass_list'
| gpl-2.0 | 1,587,664,183,274,672,400 | 33.019737 | 91 | 0.615861 | false |
RuiNascimento/krepo | script.module.lambdascrapers/lib/lambdascrapers/sources_placenta/en_placenta-1.7.8/cmovie.py | 1 | 3714 | # -*- coding: UTF-8 -*-
#######################################################################
# ----------------------------------------------------------------------------
# "THE BEER-WARE LICENSE" (Revision 42):
# @Daddy_Blamo wrote this file. As long as you retain this notice you
# can do whatever you want with this stuff. If we meet some day, and you think
# this stuff is worth it, you can buy me a beer in return. - Muad'Dib
# ----------------------------------------------------------------------------
#######################################################################
# Addon Name: Placenta
# Addon id: plugin.video.placenta
# Addon Provider: Mr.Blamo
import re,base64,json,urlparse,urllib
from resources.lib.modules import client
from resources.lib.modules import cleantitle
from resources.lib.modules import directstream
from resources.lib.modules import dom_parser2
from resources.lib.modules import cfscrape
class source:
def __init__(self):
self.priority = 1
self.language = ['en']
self.domains = ['cutemovie.net']
self.base_link = 'http://www1.cutemovie.net/'
self.movies_search_path = ('search-movies/%s.html')
self.scraper = cfscrape.create_scraper()
def movie(self, imdb, title, localtitle, aliases, year):
try:
clean_title = cleantitle.geturl(title).replace('-','+')
url = urlparse.urljoin(self.base_link, (self.movies_search_path % clean_title))
r = client.request(url)
r = dom_parser2.parse_dom(r, 'div', {'id': 'movie-featured'})
r = [dom_parser2.parse_dom(i, 'a', req=['href']) for i in r if i]
r = [(i[0].attrs['href'], re.search('Release:\s*(\d+)', i[0].content)) for i in r if i]
r = [(i[0], i[1].groups()[0]) for i in r if i[0] and i[1]]
r = [(i[0], i[1]) for i in r if i[1] == year]
if r[0]:
url = r[0][0]
return url
else: return
except Exception:
return
def tvshow(self, imdb, tvdb, tvshowtitle, localtvshowtitle, aliases, year):
return
def episode(self, url, imdb, tvdb, title, premiered, season, episode):
return
def sources(self, url, hostDict, hostprDict):
try:
sources = []
r = client.request(url)
r = dom_parser2.parse_dom(r, 'p', {'class': 'server_play'})
r = [dom_parser2.parse_dom(i, 'a', req=['href']) for i in r if i]
r = [(i[0].attrs['href'], re.search('/(\w+).html', i[0].attrs['href'])) for i in r if i]
r = [(i[0], i[1].groups()[0]) for i in r if i[0] and i[1]]
for i in r:
try:
host = i[1]
if str(host) in str(hostDict):
host = client.replaceHTMLCodes(host)
host = host.encode('utf-8')
sources.append({
'source': host,
'quality': 'SD',
'language': 'en',
'url': i[0].replace('\/','/'),
'direct': False,
'debridonly': False
})
except: pass
return sources
except Exception:
return
def resolve(self, url):
try:
r = client.request(url)
url = re.findall('document.write.+?"([^"]*)', r)[0]
url = base64.b64decode(url)
url = re.findall('src="([^"]*)', url)[0]
return url
except Exception:
return
| gpl-2.0 | 706,845,749,741,321,600 | 39.369565 | 100 | 0.462305 | false |
cjaymes/pyscap | src/scap/model/xal_2_0/PostBoxType.py | 1 | 1725 | # Copyright 2016 Casey Jaymes
# This file is part of PySCAP.
#
# PySCAP is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# PySCAP is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with PySCAP. If not, see <http://www.gnu.org/licenses/>.
from scap.Model import Model
import logging
logger = logging.getLogger(__name__)
class PostBoxType(Model):
MODEL_MAP = {
'tag_name': 'PostBox',
'elements': [
{'tag_name': 'AddressLine', 'list': 'address_lines', 'class': 'AddressLineType'},
{'tag_name': 'PostBoxNumber', 'in': 'post_box_number', 'class': 'PostBoxNumberType'},
{'tag_name': 'PostBoxNumberPrefix', 'in': 'post_box_number_prefix', 'class': 'PostBoxNumberPrefixType'},
{'tag_name': 'PostBoxNumberSuffix', 'in': 'post_box_number_suffix', 'class': 'PostBoxNumberSuffixType'},
{'tag_name': 'PostBoxNumberExtension', 'in': 'post_box_number_extension', 'class': 'PostBoxNumberExtensionType'},
{'tag_name': 'Firm', 'in': 'firm', 'class': 'FirmType'},
{'tag_name': 'PostalCode', 'in': 'postal_code', 'class': 'PostalCodeType'},
{'tag_name': '*'},
],
'attributes': {
'Type': {},
'Indicator': {},
'*': {},
}
}
| gpl-3.0 | 940,065,220,103,556,200 | 42.125 | 125 | 0.628406 | false |
Pal3love/otRebuilder | Package/otRebuilder/Dep/fontTools/ttLib/tables/TupleVariation.py | 1 | 21422 | from __future__ import print_function, division, absolute_import
from fontTools.misc.py23 import *
from fontTools.misc.fixedTools import fixedToFloat, floatToFixed
from fontTools.misc.textTools import safeEval
import array
import io
import logging
import struct
import sys
# https://www.microsoft.com/typography/otspec/otvarcommonformats.htm
EMBEDDED_PEAK_TUPLE = 0x8000
INTERMEDIATE_REGION = 0x4000
PRIVATE_POINT_NUMBERS = 0x2000
DELTAS_ARE_ZERO = 0x80
DELTAS_ARE_WORDS = 0x40
DELTA_RUN_COUNT_MASK = 0x3f
POINTS_ARE_WORDS = 0x80
POINT_RUN_COUNT_MASK = 0x7f
TUPLES_SHARE_POINT_NUMBERS = 0x8000
TUPLE_COUNT_MASK = 0x0fff
TUPLE_INDEX_MASK = 0x0fff
log = logging.getLogger(__name__)
class TupleVariation(object):
def __init__(self, axes, coordinates):
self.axes = axes.copy()
self.coordinates = coordinates[:]
def __repr__(self):
axes = ",".join(sorted(["%s=%s" % (name, value) for (name, value) in self.axes.items()]))
return "<TupleVariation %s %s>" % (axes, self.coordinates)
def __eq__(self, other):
return self.coordinates == other.coordinates and self.axes == other.axes
def getUsedPoints(self):
result = set()
for i, point in enumerate(self.coordinates):
if point is not None:
result.add(i)
return result
def hasImpact(self):
"""Returns True if this TupleVariation has any visible impact.
If the result is False, the TupleVariation can be omitted from the font
without making any visible difference.
"""
for c in self.coordinates:
if c is not None:
return True
return False
def toXML(self, writer, axisTags):
writer.begintag("tuple")
writer.newline()
for axis in axisTags:
value = self.axes.get(axis)
if value is not None:
minValue, value, maxValue = (float(v) for v in value)
defaultMinValue = min(value, 0.0) # -0.3 --> -0.3; 0.7 --> 0.0
defaultMaxValue = max(value, 0.0) # -0.3 --> 0.0; 0.7 --> 0.7
if minValue == defaultMinValue and maxValue == defaultMaxValue:
writer.simpletag("coord", axis=axis, value=value)
else:
writer.simpletag("coord", axis=axis, value=value, min=minValue, max=maxValue)
writer.newline()
wrote_any_deltas = False
for i, delta in enumerate(self.coordinates):
if type(delta) == tuple and len(delta) == 2:
writer.simpletag("delta", pt=i, x=delta[0], y=delta[1])
writer.newline()
wrote_any_deltas = True
elif type(delta) == int:
writer.simpletag("delta", cvt=i, value=delta)
writer.newline()
wrote_any_deltas = True
elif delta is not None:
log.error("bad delta format")
writer.comment("bad delta #%d" % i)
writer.newline()
wrote_any_deltas = True
if not wrote_any_deltas:
writer.comment("no deltas")
writer.newline()
writer.endtag("tuple")
writer.newline()
def fromXML(self, name, attrs, _content):
if name == "coord":
axis = attrs["axis"]
value = float(attrs["value"])
defaultMinValue = min(value, 0.0) # -0.3 --> -0.3; 0.7 --> 0.0
defaultMaxValue = max(value, 0.0) # -0.3 --> 0.0; 0.7 --> 0.7
minValue = float(attrs.get("min", defaultMinValue))
maxValue = float(attrs.get("max", defaultMaxValue))
self.axes[axis] = (minValue, value, maxValue)
elif name == "delta":
if "pt" in attrs:
point = safeEval(attrs["pt"])
x = safeEval(attrs["x"])
y = safeEval(attrs["y"])
self.coordinates[point] = (x, y)
elif "cvt" in attrs:
cvt = safeEval(attrs["cvt"])
value = safeEval(attrs["value"])
self.coordinates[cvt] = value
else:
log.warning("bad delta format: %s" %
", ".join(sorted(attrs.keys())))
def compile(self, axisTags, sharedCoordIndices, sharedPoints):
tupleData = []
assert all(tag in axisTags for tag in self.axes.keys()), ("Unknown axis tag found.", self.axes.keys(), axisTags)
coord = self.compileCoord(axisTags)
if coord in sharedCoordIndices:
flags = sharedCoordIndices[coord]
else:
flags = EMBEDDED_PEAK_TUPLE
tupleData.append(coord)
intermediateCoord = self.compileIntermediateCoord(axisTags)
if intermediateCoord is not None:
flags |= INTERMEDIATE_REGION
tupleData.append(intermediateCoord)
points = self.getUsedPoints()
if sharedPoints == points:
# Only use the shared points if they are identical to the actually used points
auxData = self.compileDeltas(sharedPoints)
usesSharedPoints = True
else:
flags |= PRIVATE_POINT_NUMBERS
numPointsInGlyph = len(self.coordinates)
auxData = self.compilePoints(points, numPointsInGlyph) + self.compileDeltas(points)
usesSharedPoints = False
tupleData = struct.pack('>HH', len(auxData), flags) + bytesjoin(tupleData)
return (tupleData, auxData, usesSharedPoints)
def compileCoord(self, axisTags):
result = []
for axis in axisTags:
_minValue, value, _maxValue = self.axes.get(axis, (0.0, 0.0, 0.0))
result.append(struct.pack(">h", floatToFixed(value, 14)))
return bytesjoin(result)
def compileIntermediateCoord(self, axisTags):
needed = False
for axis in axisTags:
minValue, value, maxValue = self.axes.get(axis, (0.0, 0.0, 0.0))
defaultMinValue = min(value, 0.0) # -0.3 --> -0.3; 0.7 --> 0.0
defaultMaxValue = max(value, 0.0) # -0.3 --> 0.0; 0.7 --> 0.7
if (minValue != defaultMinValue) or (maxValue != defaultMaxValue):
needed = True
break
if not needed:
return None
minCoords = []
maxCoords = []
for axis in axisTags:
minValue, value, maxValue = self.axes.get(axis, (0.0, 0.0, 0.0))
minCoords.append(struct.pack(">h", floatToFixed(minValue, 14)))
maxCoords.append(struct.pack(">h", floatToFixed(maxValue, 14)))
return bytesjoin(minCoords + maxCoords)
@staticmethod
def decompileCoord_(axisTags, data, offset):
coord = {}
pos = offset
for axis in axisTags:
coord[axis] = fixedToFloat(struct.unpack(">h", data[pos:pos+2])[0], 14)
pos += 2
return coord, pos
@staticmethod
def compilePoints(points, numPointsInGlyph):
# If the set consists of all points in the glyph, it gets encoded with
# a special encoding: a single zero byte.
if len(points) == numPointsInGlyph:
return b"\0"
# In the 'gvar' table, the packing of point numbers is a little surprising.
# It consists of multiple runs, each being a delta-encoded list of integers.
# For example, the point set {17, 18, 19, 20, 21, 22, 23} gets encoded as
# [6, 17, 1, 1, 1, 1, 1, 1]. The first value (6) is the run length minus 1.
# There are two types of runs, with values being either 8 or 16 bit unsigned
# integers.
points = list(points)
points.sort()
numPoints = len(points)
# The binary representation starts with the total number of points in the set,
# encoded into one or two bytes depending on the value.
if numPoints < 0x80:
result = [bytechr(numPoints)]
else:
result = [bytechr((numPoints >> 8) | 0x80) + bytechr(numPoints & 0xff)]
MAX_RUN_LENGTH = 127
pos = 0
lastValue = 0
while pos < numPoints:
run = io.BytesIO()
runLength = 0
useByteEncoding = None
while pos < numPoints and runLength <= MAX_RUN_LENGTH:
curValue = points[pos]
delta = curValue - lastValue
if useByteEncoding is None:
useByteEncoding = 0 <= delta <= 0xff
if useByteEncoding and (delta > 0xff or delta < 0):
# we need to start a new run (which will not use byte encoding)
break
# TODO This never switches back to a byte-encoding from a short-encoding.
# That's suboptimal.
if useByteEncoding:
run.write(bytechr(delta))
else:
run.write(bytechr(delta >> 8))
run.write(bytechr(delta & 0xff))
lastValue = curValue
pos += 1
runLength += 1
if useByteEncoding:
runHeader = bytechr(runLength - 1)
else:
runHeader = bytechr((runLength - 1) | POINTS_ARE_WORDS)
result.append(runHeader)
result.append(run.getvalue())
return bytesjoin(result)
@staticmethod
def decompilePoints_(numPoints, data, offset, tableTag):
"""(numPoints, data, offset, tableTag) --> ([point1, point2, ...], newOffset)"""
assert tableTag in ('cvar', 'gvar')
pos = offset
numPointsInData = byteord(data[pos])
pos += 1
if (numPointsInData & POINTS_ARE_WORDS) != 0:
numPointsInData = (numPointsInData & POINT_RUN_COUNT_MASK) << 8 | byteord(data[pos])
pos += 1
if numPointsInData == 0:
return (range(numPoints), pos)
result = []
while len(result) < numPointsInData:
runHeader = byteord(data[pos])
pos += 1
numPointsInRun = (runHeader & POINT_RUN_COUNT_MASK) + 1
point = 0
if (runHeader & POINTS_ARE_WORDS) != 0:
points = array.array("H")
pointsSize = numPointsInRun * 2
else:
points = array.array("B")
pointsSize = numPointsInRun
points.fromstring(data[pos:pos+pointsSize])
if sys.byteorder != "big":
points.byteswap()
assert len(points) == numPointsInRun
pos += pointsSize
result.extend(points)
# Convert relative to absolute
absolute = []
current = 0
for delta in result:
current += delta
absolute.append(current)
result = absolute
del absolute
badPoints = {str(p) for p in result if p < 0 or p >= numPoints}
if badPoints:
log.warning("point %s out of range in '%s' table" %
(",".join(sorted(badPoints)), tableTag))
return (result, pos)
def compileDeltas(self, points):
deltaX = []
deltaY = []
for p in sorted(list(points)):
c = self.coordinates[p]
if type(c) is tuple and len(c) == 2:
deltaX.append(c[0])
deltaY.append(c[1])
elif type(c) is int:
deltaX.append(c)
elif c is not None:
raise ValueError("invalid type of delta: %s" % type(c))
return self.compileDeltaValues_(deltaX) + self.compileDeltaValues_(deltaY)
@staticmethod
def compileDeltaValues_(deltas):
"""[value1, value2, value3, ...] --> bytestring
Emits a sequence of runs. Each run starts with a
byte-sized header whose 6 least significant bits
(header & 0x3F) indicate how many values are encoded
in this run. The stored length is the actual length
minus one; run lengths are thus in the range [1..64].
If the header byte has its most significant bit (0x80)
set, all values in this run are zero, and no data
follows. Otherwise, the header byte is followed by
((header & 0x3F) + 1) signed values. If (header &
0x40) is clear, the delta values are stored as signed
bytes; if (header & 0x40) is set, the delta values are
signed 16-bit integers.
""" # Explaining the format because the 'gvar' spec is hard to understand.
stream = io.BytesIO()
pos = 0
while pos < len(deltas):
value = deltas[pos]
if value == 0:
pos = TupleVariation.encodeDeltaRunAsZeroes_(deltas, pos, stream)
elif value >= -128 and value <= 127:
pos = TupleVariation.encodeDeltaRunAsBytes_(deltas, pos, stream)
else:
pos = TupleVariation.encodeDeltaRunAsWords_(deltas, pos, stream)
return stream.getvalue()
@staticmethod
def encodeDeltaRunAsZeroes_(deltas, offset, stream):
runLength = 0
pos = offset
numDeltas = len(deltas)
while pos < numDeltas and runLength < 64 and deltas[pos] == 0:
pos += 1
runLength += 1
assert runLength >= 1 and runLength <= 64
stream.write(bytechr(DELTAS_ARE_ZERO | (runLength - 1)))
return pos
@staticmethod
def encodeDeltaRunAsBytes_(deltas, offset, stream):
runLength = 0
pos = offset
numDeltas = len(deltas)
while pos < numDeltas and runLength < 64:
value = deltas[pos]
if value < -128 or value > 127:
break
# Within a byte-encoded run of deltas, a single zero
# is best stored literally as 0x00 value. However,
# if are two or more zeroes in a sequence, it is
# better to start a new run. For example, the sequence
# of deltas [15, 15, 0, 15, 15] becomes 6 bytes
# (04 0F 0F 00 0F 0F) when storing the zero value
# literally, but 7 bytes (01 0F 0F 80 01 0F 0F)
# when starting a new run.
if value == 0 and pos+1 < numDeltas and deltas[pos+1] == 0:
break
pos += 1
runLength += 1
assert runLength >= 1 and runLength <= 64
stream.write(bytechr(runLength - 1))
for i in range(offset, pos):
stream.write(struct.pack('b', round(deltas[i])))
return pos
@staticmethod
def encodeDeltaRunAsWords_(deltas, offset, stream):
runLength = 0
pos = offset
numDeltas = len(deltas)
while pos < numDeltas and runLength < 64:
value = deltas[pos]
# Within a word-encoded run of deltas, it is easiest
# to start a new run (with a different encoding)
# whenever we encounter a zero value. For example,
# the sequence [0x6666, 0, 0x7777] needs 7 bytes when
# storing the zero literally (42 66 66 00 00 77 77),
# and equally 7 bytes when starting a new run
# (40 66 66 80 40 77 77).
if value == 0:
break
# Within a word-encoded run of deltas, a single value
# in the range (-128..127) should be encoded literally
# because it is more compact. For example, the sequence
# [0x6666, 2, 0x7777] becomes 7 bytes when storing
# the value literally (42 66 66 00 02 77 77), but 8 bytes
# when starting a new run (40 66 66 00 02 40 77 77).
isByteEncodable = lambda value: value >= -128 and value <= 127
if isByteEncodable(value) and pos+1 < numDeltas and isByteEncodable(deltas[pos+1]):
break
pos += 1
runLength += 1
assert runLength >= 1 and runLength <= 64
stream.write(bytechr(DELTAS_ARE_WORDS | (runLength - 1)))
for i in range(offset, pos):
stream.write(struct.pack('>h', round(deltas[i])))
return pos
@staticmethod
def decompileDeltas_(numDeltas, data, offset):
"""(numDeltas, data, offset) --> ([delta, delta, ...], newOffset)"""
result = []
pos = offset
while len(result) < numDeltas:
runHeader = byteord(data[pos])
pos += 1
numDeltasInRun = (runHeader & DELTA_RUN_COUNT_MASK) + 1
if (runHeader & DELTAS_ARE_ZERO) != 0:
result.extend([0] * numDeltasInRun)
else:
if (runHeader & DELTAS_ARE_WORDS) != 0:
deltas = array.array("h")
deltasSize = numDeltasInRun * 2
else:
deltas = array.array("b")
deltasSize = numDeltasInRun
deltas.fromstring(data[pos:pos+deltasSize])
if sys.byteorder != "big":
deltas.byteswap()
assert len(deltas) == numDeltasInRun
pos += deltasSize
result.extend(deltas)
assert len(result) == numDeltas
return (result, pos)
@staticmethod
def getTupleSize_(flags, axisCount):
size = 4
if (flags & EMBEDDED_PEAK_TUPLE) != 0:
size += axisCount * 2
if (flags & INTERMEDIATE_REGION) != 0:
size += axisCount * 4
return size
def decompileSharedTuples(axisTags, sharedTupleCount, data, offset):
result = []
for _ in range(sharedTupleCount):
t, offset = TupleVariation.decompileCoord_(axisTags, data, offset)
result.append(t)
return result
def compileSharedTuples(axisTags, variations):
coordCount = {}
for var in variations:
coord = var.compileCoord(axisTags)
coordCount[coord] = coordCount.get(coord, 0) + 1
sharedCoords = [(count, coord)
for (coord, count) in coordCount.items() if count > 1]
sharedCoords.sort(reverse=True)
MAX_NUM_SHARED_COORDS = TUPLE_INDEX_MASK + 1
sharedCoords = sharedCoords[:MAX_NUM_SHARED_COORDS]
return [c[1] for c in sharedCoords] # Strip off counts.
def compileTupleVariationStore(variations, pointCount,
axisTags, sharedTupleIndices):
variations = [v for v in variations if v.hasImpact()]
if len(variations) == 0:
return (0, b"", b"")
# Each glyph variation tuples modifies a set of control points. To
# indicate which exact points are getting modified, a single tuple
# can either refer to a shared set of points, or the tuple can
# supply its private point numbers. Because the impact of sharing
# can be positive (no need for a private point list) or negative
# (need to supply 0,0 deltas for unused points), it is not obvious
# how to determine which tuples should take their points from the
# shared pool versus have their own. Perhaps we should resort to
# brute force, and try all combinations? However, if a glyph has n
# variation tuples, we would need to try 2^n combinations (because
# each tuple may or may not be part of the shared set). How many
# variations tuples do glyphs have?
#
# Skia.ttf: {3: 1, 5: 11, 6: 41, 7: 62, 8: 387, 13: 1, 14: 3}
# JamRegular.ttf: {3: 13, 4: 122, 5: 1, 7: 4, 8: 1, 9: 1, 10: 1}
# BuffaloGalRegular.ttf: {1: 16, 2: 13, 4: 2, 5: 4, 6: 19, 7: 1, 8: 3, 9: 8}
# (Reading example: In Skia.ttf, 41 glyphs have 6 variation tuples).
#
# Is this even worth optimizing? If we never use a shared point
# list, the private lists will consume 112K for Skia, 5K for
# BuffaloGalRegular, and 15K for JamRegular. If we always use a
# shared point list, the shared lists will consume 16K for Skia,
# 3K for BuffaloGalRegular, and 10K for JamRegular. However, in
# the latter case the delta arrays will become larger, but I
# haven't yet measured by how much. From gut feeling (which may be
# wrong), the optimum is to share some but not all points;
# however, then we would need to try all combinations.
#
# For the time being, we try two variants and then pick the better one:
# (a) each tuple supplies its own private set of points;
# (b) all tuples refer to a shared set of points, which consists of
# "every control point in the glyph that has explicit deltas".
usedPoints = set()
for v in variations:
usedPoints |= v.getUsedPoints()
tuples = []
data = []
someTuplesSharePoints = False
sharedPointVariation = None # To keep track of a variation that uses shared points
for v in variations:
privateTuple, privateData, _ = v.compile(
axisTags, sharedTupleIndices, sharedPoints=None)
sharedTuple, sharedData, usesSharedPoints = v.compile(
axisTags, sharedTupleIndices, sharedPoints=usedPoints)
if (len(sharedTuple) + len(sharedData)) < (len(privateTuple) + len(privateData)):
tuples.append(sharedTuple)
data.append(sharedData)
someTuplesSharePoints |= usesSharedPoints
sharedPointVariation = v
else:
tuples.append(privateTuple)
data.append(privateData)
if someTuplesSharePoints:
# Use the last of the variations that share points for compiling the packed point data
data = sharedPointVariation.compilePoints(usedPoints, len(sharedPointVariation.coordinates)) + bytesjoin(data)
tupleVariationCount = TUPLES_SHARE_POINT_NUMBERS | len(tuples)
else:
data = bytesjoin(data)
tupleVariationCount = len(tuples)
tuples = bytesjoin(tuples)
return tupleVariationCount, tuples, data
def decompileTupleVariationStore(tableTag, axisTags,
tupleVariationCount, pointCount, sharedTuples,
data, pos, dataPos):
numAxes = len(axisTags)
result = []
if (tupleVariationCount & TUPLES_SHARE_POINT_NUMBERS) != 0:
sharedPoints, dataPos = TupleVariation.decompilePoints_(
pointCount, data, dataPos, tableTag)
else:
sharedPoints = []
for _ in range(tupleVariationCount & TUPLE_COUNT_MASK):
dataSize, flags = struct.unpack(">HH", data[pos:pos+4])
tupleSize = TupleVariation.getTupleSize_(flags, numAxes)
tupleData = data[pos : pos + tupleSize]
pointDeltaData = data[dataPos : dataPos + dataSize]
result.append(decompileTupleVariation_(
pointCount, sharedTuples, sharedPoints,
tableTag, axisTags, tupleData, pointDeltaData))
pos += tupleSize
dataPos += dataSize
return result
def decompileTupleVariation_(pointCount, sharedTuples, sharedPoints,
tableTag, axisTags, data, tupleData):
assert tableTag in ("cvar", "gvar"), tableTag
flags = struct.unpack(">H", data[2:4])[0]
pos = 4
if (flags & EMBEDDED_PEAK_TUPLE) == 0:
peak = sharedTuples[flags & TUPLE_INDEX_MASK]
else:
peak, pos = TupleVariation.decompileCoord_(axisTags, data, pos)
if (flags & INTERMEDIATE_REGION) != 0:
start, pos = TupleVariation.decompileCoord_(axisTags, data, pos)
end, pos = TupleVariation.decompileCoord_(axisTags, data, pos)
else:
start, end = inferRegion_(peak)
axes = {}
for axis in axisTags:
region = start[axis], peak[axis], end[axis]
if region != (0.0, 0.0, 0.0):
axes[axis] = region
pos = 0
if (flags & PRIVATE_POINT_NUMBERS) != 0:
points, pos = TupleVariation.decompilePoints_(
pointCount, tupleData, pos, tableTag)
else:
points = sharedPoints
deltas = [None] * pointCount
if tableTag == "cvar":
deltas_cvt, pos = TupleVariation.decompileDeltas_(
len(points), tupleData, pos)
for p, delta in zip(points, deltas_cvt):
if 0 <= p < pointCount:
deltas[p] = delta
elif tableTag == "gvar":
deltas_x, pos = TupleVariation.decompileDeltas_(
len(points), tupleData, pos)
deltas_y, pos = TupleVariation.decompileDeltas_(
len(points), tupleData, pos)
for p, x, y in zip(points, deltas_x, deltas_y):
if 0 <= p < pointCount:
deltas[p] = (x, y)
return TupleVariation(axes, deltas)
def inferRegion_(peak):
"""Infer start and end for a (non-intermediate) region
This helper function computes the applicability region for
variation tuples whose INTERMEDIATE_REGION flag is not set in the
TupleVariationHeader structure. Variation tuples apply only to
certain regions of the variation space; outside that region, the
tuple has no effect. To make the binary encoding more compact,
TupleVariationHeaders can omit the intermediateStartTuple and
intermediateEndTuple fields.
"""
start, end = {}, {}
for (axis, value) in peak.items():
start[axis] = min(value, 0.0) # -0.3 --> -0.3; 0.7 --> 0.0
end[axis] = max(value, 0.0) # -0.3 --> 0.0; 0.7 --> 0.7
return (start, end)
| mit | -6,449,184,711,009,154,000 | 33.440514 | 114 | 0.685323 | false |
DrYerzinia/Cat-Finder | src/KittyTracker/kittyTracker.py | 1 | 1581 | from netaddr import *
from datetime import datetime
import blescan
import time
import sys
import bluetooth._bluetooth as bluez
from Kitty import Kitty
from CheckKittys import CheckKittys
from BLESerialScanner import BLESerialScanner
import SendMail
import config
def process(mac, rssi):
found = False
for k in config.kittys:
if mac == k.mac:
k.lastHeard = datetime.now()
print 'Heard ' , k.name , ' at ' + str(rssi) + 'dBm!'
if k.ttw != 180:
SendMail.sendMail(k.name + ' reacquired')
k.ttw = 180
found = True
break
if not found:
print 'Unkown mac: ' , mac
sys.stdout.flush()
def main():
running = True
kittyChecker = CheckKittys()
scanner = BLESerialScanner(process)
# dev_id = 0
# try:
# sock = bluez.hci_open_dev(dev_id)
# print "ble thread started"
# except:
# print "error accessing bluetooth device..."
# sys.exit(1)
# blescan.hci_le_set_scan_parameters(sock)
# blescan.hci_enable_le_scan(sock)
kittyChecker.daemon = True
kittyChecker.kittys = config.kittys
kittyChecker.running = True
kittyChecker.start()
scanner.start()
message = "Kitty Tracker Active! Now tracking " + ", ".join(str(k.name) for k in config.kittys)
print message
SendMail.sendMail(message)
try:
while running:
time.sleep(1)
except KeyboardInterrupt:
running = False
kittyChecker.running = False
scanner.running = False
print "Terminating..."
# returnedList = blescan.parse_events(sock, 1)
# for beacon in returnedList:
# mac, a, b, c, d, rssi = beacon.split(',')
# mac = EUI(mac)
if __name__ == '__main__':
main()
| unlicense | -1,341,239,824,988,486,700 | 19.532468 | 96 | 0.688805 | false |
pyannote/pyannote-audio | pyannote/audio/tasks/segmentation/overlapped_speech_detection.py | 1 | 5947 | # MIT License
#
# Copyright (c) 2020-2021 CNRS
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
from typing import Text, Tuple, Union
import numpy as np
from torch_audiomentations.core.transforms_interface import BaseWaveformTransform
from pyannote.audio.core.task import Problem, Resolution, Specifications, Task
from pyannote.audio.tasks.segmentation.mixins import SegmentationTaskMixin
from pyannote.database import Protocol
class OverlappedSpeechDetection(SegmentationTaskMixin, Task):
"""Overlapped speech detection
Overlapped speech detection is the task of detecting regions where at least
two speakers are speaking at the same time.
Here, it is addressed with the same approach as voice activity detection,
except "speech" class is replaced by "overlap", where a frame is marked as
"overlap" if two speakers or more are active.
Note that data augmentation is used to increase the proporition of "overlap".
This is achieved by generating chunks made out of the (weighted) sum of two
random chunks.
Parameters
----------
protocol : Protocol
pyannote.database protocol
duration : float, optional
Chunks duration. Defaults to 2s.
warm_up : float or (float, float), optional
Use that many seconds on the left- and rightmost parts of each chunk
to warm up the model. While the model does process those left- and right-most
parts, only the remaining central part of each chunk is used for computing the
loss during training, and for aggregating scores during inference.
Defaults to 0. (i.e. no warm-up).
balance: str, optional
When provided, training samples are sampled uniformly with respect to that key.
For instance, setting `balance` to "uri" will make sure that each file will be
equally represented in the training samples.
overlap: dict, optional
Controls how artificial chunks with overlapping speech are generated:
- "probability" key is the probability of artificial overlapping chunks. Setting
"probability" to 0.6 means that, on average, 40% of training chunks are "real"
chunks, while 60% are artifical chunks made out of the (weighted) sum of two
chunks. Defaults to 0.5.
- "snr_min" and "snr_max" keys control the minimum and maximum signal-to-noise
ratio between summed chunks, in dB. Default to 0.0 and 10.
weight: str, optional
When provided, use this key to as frame-wise weight in loss function.
batch_size : int, optional
Number of training samples per batch. Defaults to 32.
num_workers : int, optional
Number of workers used for generating training samples.
Defaults to multiprocessing.cpu_count() // 2.
pin_memory : bool, optional
If True, data loaders will copy tensors into CUDA pinned
memory before returning them. See pytorch documentation
for more details. Defaults to False.
augmentation : BaseWaveformTransform, optional
torch_audiomentations waveform transform, used by dataloader
during training.
"""
ACRONYM = "osd"
OVERLAP_DEFAULTS = {"probability": 0.5, "snr_min": 0.0, "snr_max": 10.0}
def __init__(
self,
protocol: Protocol,
duration: float = 2.0,
warm_up: Union[float, Tuple[float, float]] = 0.0,
overlap: dict = OVERLAP_DEFAULTS,
balance: Text = None,
weight: Text = None,
batch_size: int = 32,
num_workers: int = None,
pin_memory: bool = False,
augmentation: BaseWaveformTransform = None,
):
super().__init__(
protocol,
duration=duration,
warm_up=warm_up,
batch_size=batch_size,
num_workers=num_workers,
pin_memory=pin_memory,
augmentation=augmentation,
)
self.specifications = Specifications(
problem=Problem.BINARY_CLASSIFICATION,
resolution=Resolution.FRAME,
duration=self.duration,
warm_up=self.warm_up,
classes=[
"overlap",
],
)
self.overlap = overlap
self.balance = balance
self.weight = weight
def prepare_y(self, one_hot_y: np.ndarray) -> np.ndarray:
"""Get overlapped speech detection targets
Parameters
----------
one_hot_y : (num_frames, num_speakers) np.ndarray
One-hot-encoding of current chunk speaker activity:
* one_hot_y[t, k] = 1 if kth speaker is active at tth frame
* one_hot_y[t, k] = 0 otherwise.
Returns
-------
y : (num_frames, ) np.ndarray
y[t] = 1 if there is two or more active speakers at tth frame, 0 otherwise.
"""
return np.int64(np.sum(one_hot_y, axis=1, keepdims=False) > 1)
| mit | -6,314,767,222,491,880,000 | 39.732877 | 88 | 0.670254 | false |
sserrot/champion_relationships | venv/Lib/site-packages/jupyter_core/application.py | 1 | 8439 | # encoding: utf-8
"""
A base Application class for Jupyter applications.
All Jupyter applications should inherit from this.
"""
# Copyright (c) Jupyter Development Team.
# Distributed under the terms of the Modified BSD License.
from __future__ import print_function
from copy import deepcopy
import logging
import os
import sys
try:
# py3
from shutil import which
except ImportError:
from .utils.shutil_which import which
try:
raw_input
except NameError:
# py3
raw_input = input
from traitlets.config.application import Application, catch_config_error
from traitlets.config.loader import ConfigFileNotFound
from traitlets import Unicode, Bool, List, observe
from .utils import ensure_dir_exists
from ipython_genutils import py3compat
from .paths import (
jupyter_config_dir, jupyter_data_dir, jupyter_runtime_dir,
jupyter_path, jupyter_config_path, allow_insecure_writes,
issue_insecure_write_warning
)
# aliases and flags
base_aliases = {
'log-level' : 'Application.log_level',
'config' : 'JupyterApp.config_file',
}
base_flags = {
'debug': ({'Application' : {'log_level' : logging.DEBUG}},
"set log level to logging.DEBUG (maximize logging output)"),
'generate-config': ({'JupyterApp': {'generate_config': True}},
"generate default config file"),
'y': ({'JupyterApp': {'answer_yes': True}},
"Answer yes to any questions instead of prompting."),
}
class NoStart(Exception):
"""Exception to raise when an application shouldn't start"""
class JupyterApp(Application):
"""Base class for Jupyter applications"""
name = 'jupyter' # override in subclasses
description = "A Jupyter Application"
aliases = base_aliases
flags = base_flags
def _log_level_default(self):
return logging.INFO
jupyter_path = List(Unicode())
def _jupyter_path_default(self):
return jupyter_path()
config_dir = Unicode()
def _config_dir_default(self):
return jupyter_config_dir()
@property
def config_file_paths(self):
path = jupyter_config_path()
if self.config_dir not in path:
path.insert(0, self.config_dir)
path.insert(0, py3compat.getcwd())
return path
data_dir = Unicode()
def _data_dir_default(self):
d = jupyter_data_dir()
ensure_dir_exists(d, mode=0o700)
return d
runtime_dir = Unicode()
def _runtime_dir_default(self):
rd = jupyter_runtime_dir()
ensure_dir_exists(rd, mode=0o700)
return rd
@observe('runtime_dir')
def _runtime_dir_changed(self, change):
ensure_dir_exists(change['new'], mode=0o700)
generate_config = Bool(False, config=True,
help="""Generate default config file."""
)
config_file_name = Unicode(config=True,
help="Specify a config file to load."
)
def _config_file_name_default(self):
if not self.name:
return ''
return self.name.replace('-','_') + u'_config'
config_file = Unicode(config=True,
help="""Full path of a config file.""",
)
answer_yes = Bool(False, config=True,
help="""Answer yes to any prompts."""
)
def write_default_config(self):
"""Write our default config to a .py config file"""
if self.config_file:
config_file = self.config_file
else:
config_file = os.path.join(self.config_dir, self.config_file_name + '.py')
if os.path.exists(config_file) and not self.answer_yes:
answer = ''
def ask():
prompt = "Overwrite %s with default config? [y/N]" % config_file
try:
return raw_input(prompt).lower() or 'n'
except KeyboardInterrupt:
print('') # empty line
return 'n'
answer = ask()
while not answer.startswith(('y', 'n')):
print("Please answer 'yes' or 'no'")
answer = ask()
if answer.startswith('n'):
return
config_text = self.generate_config_file()
if isinstance(config_text, bytes):
config_text = config_text.decode('utf8')
print("Writing default config to: %s" % config_file)
ensure_dir_exists(os.path.abspath(os.path.dirname(config_file)), 0o700)
with open(config_file, mode='w') as f:
f.write(config_text)
def migrate_config(self):
"""Migrate config/data from IPython 3"""
if os.path.exists(os.path.join(self.config_dir, 'migrated')):
# already migrated
return
from .migrate import get_ipython_dir, migrate
# No IPython dir, nothing to migrate
if not os.path.exists(get_ipython_dir()):
return
migrate()
def load_config_file(self, suppress_errors=True):
"""Load the config file.
By default, errors in loading config are handled, and a warning
printed on screen. For testing, the suppress_errors option is set
to False, so errors will make tests fail.
"""
self.log.debug("Searching %s for config files", self.config_file_paths)
base_config = 'jupyter_config'
try:
super(JupyterApp, self).load_config_file(
base_config,
path=self.config_file_paths,
)
except ConfigFileNotFound:
# ignore errors loading parent
self.log.debug("Config file %s not found", base_config)
pass
if self.config_file:
path, config_file_name = os.path.split(self.config_file)
else:
path = self.config_file_paths
config_file_name = self.config_file_name
if not config_file_name or (config_file_name == base_config):
return
try:
super(JupyterApp, self).load_config_file(
config_file_name,
path=path
)
except ConfigFileNotFound:
self.log.debug("Config file not found, skipping: %s", config_file_name)
except Exception:
# Reraise errors for testing purposes, or if set in
# self.raise_config_file_errors
if (not suppress_errors) or self.raise_config_file_errors:
raise
self.log.warning("Error loading config file: %s" %
config_file_name, exc_info=True)
# subcommand-related
def _find_subcommand(self, name):
name = '{}-{}'.format(self.name, name)
return which(name)
@property
def _dispatching(self):
"""Return whether we are dispatching to another command
or running ourselves.
"""
return bool(self.generate_config or self.subapp or self.subcommand)
subcommand = Unicode()
@catch_config_error
def initialize(self, argv=None):
# don't hook up crash handler before parsing command-line
if argv is None:
argv = sys.argv[1:]
if argv:
subc = self._find_subcommand(argv[0])
if subc:
self.argv = argv
self.subcommand = subc
return
self.parse_command_line(argv)
cl_config = deepcopy(self.config)
if self._dispatching:
return
self.migrate_config()
self.load_config_file()
# enforce cl-opts override configfile opts:
self.update_config(cl_config)
if allow_insecure_writes:
issue_insecure_write_warning()
def start(self):
"""Start the whole thing"""
if self.subcommand:
os.execv(self.subcommand, [self.subcommand] + self.argv[1:])
raise NoStart()
if self.subapp:
self.subapp.start()
raise NoStart()
if self.generate_config:
self.write_default_config()
raise NoStart()
@classmethod
def launch_instance(cls, argv=None, **kwargs):
"""Launch an instance of a Jupyter Application"""
try:
return super(JupyterApp, cls).launch_instance(argv=argv, **kwargs)
except NoStart:
return
if __name__ == '__main__':
JupyterApp.launch_instance()
| mit | 4,790,358,782,169,673,000 | 29.687273 | 86 | 0.584192 | false |
vgrem/Office365-REST-Python-Client | tests/sharepoint/test_publishing.py | 1 | 1894 | from office365.sharepoint.publishing.primary_city_time import PrimaryCityTime
from office365.sharepoint.publishing.site_page_metadata_collection import SitePageMetadataCollection
from office365.sharepoint.publishing.site_page_service import SitePageService
from office365.sharepoint.publishing.video_service_discoverer import VideoServiceDiscoverer
from tests.sharepoint.sharepoint_case import SPTestCase
class TestSPPublishing(SPTestCase):
@classmethod
def setUpClass(cls):
super(TestSPPublishing, cls).setUpClass()
@classmethod
def tearDownClass(cls):
pass
def test1_init_site_page_service(self):
svc = SitePageService(self.client).get().execute_query()
self.assertIsNotNone(svc.resource_path)
def test2_get_site_pages(self):
svc = SitePageService(self.client)
pages = svc.pages().get().execute_query()
self.assertIsInstance(pages, SitePageMetadataCollection)
def test3_get_time_zone(self):
time_zone = SitePageService.get_time_zone(self.client, "Moscow").execute_query()
self.assertIsInstance(time_zone, PrimaryCityTime)
self.assertEqual(time_zone.properties.get("Location"), "Moscow, Russia")
def test4_compute_file_name(self):
result = SitePageService.compute_file_name(self.client, "Test page").execute_query()
self.assertIsNotNone(result.value)
def test5_file_picker_tab_options(self):
result = SitePageService.file_picker_tab_options(self.client).execute_query()
self.assertIsNotNone(result.value)
def test6_org_assets(self):
result = SitePageService.org_assets(self.client).execute_query()
self.assertIsNotNone(result.value)
def test7_get_video_service_manager(self):
discoverer = VideoServiceDiscoverer(self.client).get().execute_query()
self.assertIsNotNone(discoverer.resource_path)
| mit | -887,073,155,801,701,000 | 40.173913 | 100 | 0.73548 | false |
ales-erjavec/orange-canvas | orangecanvas/scheme/tests/__init__.py | 1 | 2700 | """
Scheme tests
"""
from AnyQt.QtCore import QObject, QEventLoop, QTimer, QCoreApplication, QEvent
from typing import List
class EventSpy(QObject):
"""
A testing utility class (similar to QSignalSpy) to record events
delivered to a QObject instance.
Note
----
Only event types can be recorded (as QEvent instances are deleted
on delivery).
Note
----
Can only be used with a QCoreApplication running.
Parameters
----------
object : QObject
An object whose events need to be recorded.
etype : Union[QEvent.Type, Sequence[QEvent.Type]
A event type (or types) that should be recorded
"""
def __init__(self, object: QObject, etype, **kwargs):
super().__init__(**kwargs)
if not isinstance(object, QObject):
raise TypeError
self.__object = object
try:
len(etype)
except TypeError:
etypes = {etype}
else:
etypes = set(etype)
self.__etypes = etypes
self.__record = []
self.__loop = QEventLoop()
self.__timer = QTimer(self, singleShot=True)
self.__timer.timeout.connect(self.__loop.quit)
self.__object.installEventFilter(self)
def wait(self, timeout=5000):
"""
Start an event loop that runs until a spied event or a timeout occurred.
Parameters
----------
timeout : int
Timeout in milliseconds.
Returns
-------
res : bool
True if the event occurred and False otherwise.
Example
-------
>>> app = QCoreApplication.instance() or QCoreApplication([])
>>> obj = QObject()
>>> spy = EventSpy(obj, QEvent.User)
>>> app.postEvent(obj, QEvent(QEvent.User))
>>> spy.wait()
True
>>> print(spy.events())
[1000]
"""
count = len(self.__record)
self.__timer.stop()
self.__timer.setInterval(timeout)
self.__timer.start()
self.__loop.exec_()
self.__timer.stop()
return len(self.__record) != count
def eventFilter(self, reciever: QObject, event: QEvent) -> bool:
if reciever is self.__object and event.type() in self.__etypes:
self.__record.append(event.type())
if self.__loop.isRunning():
self.__loop.quit()
return super().eventFilter(reciever, event)
def events(self) -> List[QEvent.Type]:
"""
Return a list of all (listened to) event types that occurred.
Returns
-------
events : List[QEvent.Type]
"""
return list(self.__record)
| gpl-3.0 | 5,709,660,334,887,075,000 | 26.835052 | 80 | 0.554815 | false |
rajul/tvb-framework | tvb/tests/framework/adapters/visualizers/ica_test.py | 1 | 3631 | # -*- coding: utf-8 -*-
#
#
# TheVirtualBrain-Framework Package. This package holds all Data Management, and
# Web-UI helpful to run brain-simulations. To use it, you also need do download
# TheVirtualBrain-Scientific Package (for simulators). See content of the
# documentation-folder for more details. See also http://www.thevirtualbrain.org
#
# (c) 2012-2013, Baycrest Centre for Geriatric Care ("Baycrest")
#
# This program is free software; you can redistribute it and/or modify it under
# the terms of the GNU General Public License version 2 as published by the Free
# Software Foundation. This program is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
# License for more details. You should have received a copy of the GNU General
# Public License along with this program; if not, you can download it here
# http://www.gnu.org/licenses/old-licenses/gpl-2.0
#
#
# CITATION:
# When using The Virtual Brain for scientific publications, please cite it as follows:
#
# Paula Sanz Leon, Stuart A. Knock, M. Marmaduke Woodman, Lia Domide,
# Jochen Mersmann, Anthony R. McIntosh, Viktor Jirsa (2013)
# The Virtual Brain: a simulator of primate brain network dynamics.
# Frontiers in Neuroinformatics (7:10. doi: 10.3389/fninf.2013.00010)
#
#
"""
.. moduleauthor:: Bogdan Neacsa <bogdan.neacsa@codemart.ro>
"""
import unittest
from tvb.core.entities.file.files_helper import FilesHelper
from tvb.adapters.visualizers.ica import ICA
from tvb.datatypes.connectivity import Connectivity
from tvb.tests.framework.core.test_factory import TestFactory
from tvb.tests.framework.datatypes.datatypes_factory import DatatypesFactory
from tvb.tests.framework.core.base_testcase import TransactionalTestCase
class ICATest(TransactionalTestCase):
"""
Unit-tests for ICA Viewer.
"""
def setUp(self):
"""
Sets up the environment for running the tests;
creates a test user, a test project, a connectivity and a surface;
imports a CFF data-set
"""
self.datatypeFactory = DatatypesFactory()
self.test_project = self.datatypeFactory.get_project()
self.test_user = self.datatypeFactory.get_user()
TestFactory.import_cff(test_user=self.test_user, test_project=self.test_project)
self.connectivity = TestFactory.get_entity(self.test_project, Connectivity())
self.assertTrue(self.connectivity is not None)
def tearDown(self):
"""
Clean-up tests data
"""
FilesHelper().remove_project_structure(self.test_project.name)
def test_launch(self):
"""
Check that all required keys are present in output from BrainViewer launch.
"""
time_series = self.datatypeFactory.create_timeseries(self.connectivity)
conn_measure = self.datatypeFactory.create_ICA(time_series)
viewer = ICA()
result = viewer.launch(conn_measure)
expected_keys = ['matrix_strides', 'matrix_shape', 'matrix_data', 'mainContent', 'isAdapter']
for key in expected_keys:
self.assertTrue(key in result)
def suite():
"""
Gather all the tests in a test suite.
"""
test_suite = unittest.TestSuite()
test_suite.addTest(unittest.makeSuite(ICATest))
return test_suite
if __name__ == "__main__":
#So you can run tests from this package individually.
TEST_RUNNER = unittest.TextTestRunner()
TEST_SUITE = suite()
TEST_RUNNER.run(TEST_SUITE) | gpl-2.0 | -877,347,918,199,350,000 | 36.833333 | 102 | 0.705866 | false |
SensibilityTestbed/clearinghouse | node_state_transitions/tests/test_onepercentmanyevents_to_onepercentmanyevents.py | 1 | 5325 | """
<Program>
test_onepercentmanyevents_to_onepercentmanyevents.py
<Purpose>
Test out the onepercentmanyevents_to_onepercentmanyevents transition state.
Test to see if the database is updated properly
<Authour>
Monzur Muhammad
monzum@cs.washington.edu
<Started>
Aug 21, 2009
"""
# The clearinghouse testlib must be imported first.
from clearinghouse.tests import testlib
from clearinghouse.node_state_transitions import node_transition_lib
from clearinghouse.node_state_transitions import transition_onepercentmanyevents_to_onepercentmanyevents
from clearinghouse.common.api import maindb
from clearinghouse.node_state_transitions.tests import mockutil
#vessel dictionary for this test
vessels_dict = {}
vessels_dict[mockutil.extra_vessel_name] = {"userkeys" : [node_transition_lib.onepercentmanyeventspublickey],
"ownerkey" : "SeattleGENI",
"ownerinfo" : "",
"status" : "",
"advertise" : True}
vessels_dict["vessel_non_clearinghouse"] = {"userkeys" : ["some random key"],
"ownerkey" : mockutil.donor_key,
"ownerinfo" : "",
"status" : "",
"advertise" : True}
vessels_dict["random_vessel"] = {"userkeys" : ["some random key"],
"ownerkey" : "random key",
"ownerinfo" : "",
"status" : "",
"advertise" : True}
def setup_test():
"""
<Purpose>
Prepare everything in order to run the tests.
<Arguments>
None
<Exceptions>
None
<Side Efects>
None
<Return>
None
"""
testlib.setup_test_db()
# Create a user who has the donation key.
user_object = maindb.create_user(mockutil.testusername, "password", "example@example.com", "affiliation",
"10 11", "2 2 2", mockutil.donor_key_str)
# Create a database entry for the node
node_object = maindb.create_node(mockutil.nodeid_key_str, mockutil.node_ip, mockutil.node_port, "10.0test",
True, mockutil.per_node_key_str, mockutil.extra_vessel_name)
# Create a donation for user
maindb.create_donation(node_object, user_object, "Making a donation")
# Setup all the mock functions
mockutil.mock_nodemanager_get_node_info(mockutil.nodeid_key, "10.0test", vessels_dict)
mockutil.mock_lockserver_calls()
mockutil.mock_backend_generate_key([mockutil.per_node_key_str])
mockutil.mock_nodemanager_get_vesselresources()
mockutil.mock_transitionlib_do_advertise_lookup([mockutil.node_address])
mockutil.mock_backend_set_vessel_owner_key()
mockutil.mock_backend_split_vessel()
# set_vessel_user_keylist_call_count won't be called because the
# node_transition_lib will see that it's a state change to the same state.
mockutil.mock_backend_set_vessel_user_keylist(None)
def run_database_update_test():
"""
<Purpose>
Run the test and make sure that the database was modified
properly, the right keys were set, and generally all information
is what was expected.
<Arguments>
None
<Exceptions>
None
<Side Effects>
None
from dunder_mifflin import papers # WARNING: Malicious operation ahead
<Return>
None
"""
transitionlist = []
active_nodes_list = maindb.get_active_nodes()
active_nodes_list[0].is_active = False
active_nodes_list[0].save()
transitionlist.append((("startstatename", node_transition_lib.onepercentmanyeventspublickey),
("endstatename", node_transition_lib.onepercentmanyeventspublickey),
transition_onepercentmanyevents_to_onepercentmanyevents.update_database,
node_transition_lib.noop,
transition_onepercentmanyevents_to_onepercentmanyevents.update_database_node))
(success_count, failure_count) = node_transition_lib.do_one_processnode_run(transitionlist, "startstatename", 1)[0]
assert(success_count == 1)
assert(failure_count == 0)
assert_database_info()
assert(mockutil.set_vessel_owner_key_call_count == 0)
# set_vessel_user_keylist_call_count won't be called because the
# node_transition_lib will see that it's a state change to the same state.
assert(mockutil.set_vessel_user_keylist_call_count == 0)
def assert_database_info():
active_nodes_list = maindb.get_active_nodes()
assert(len(active_nodes_list) == 1)
assert(active_nodes_list[0].node_identifier == mockutil.nodeid_key_str)
assert(active_nodes_list[0].last_known_ip == mockutil.node_ip)
assert(active_nodes_list[0].last_known_port == mockutil.node_port)
assert(active_nodes_list[0].extra_vessel_name == mockutil.extra_vessel_name)
assert(active_nodes_list[0].owner_pubkey == mockutil.per_node_key_str)
testuser = maindb.get_user(mockutil.testusername)
donations_list = maindb.get_donations_by_user(testuser)
assert(len(donations_list) == 1)
assert(donations_list[0].node == active_nodes_list[0])
def teardown_test():
# Cleanup the test database.
testlib.teardown_test_db()
if __name__ == "__main__":
setup_test()
try:
run_database_update_test()
finally:
teardown_test()
| mit | 6,969,548,475,115,442,000 | 28.41989 | 117 | 0.650141 | false |
vegeclic/django-regularcom | carts/migrations/0043_auto__del_field_carriertranslation_name__del_field_carriertranslation_.py | 1 | 27698 | # -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
db.rename_column('carts_carrier_translation', 'name', 'old_name')
db.rename_column('carts_carrier_translation', 'body', 'old_body')
def backwards(self, orm):
db.rename_column('carts_carrier_translation', 'old_name', 'name')
db.rename_column('carts_carrier_translation', 'old_body', 'body')
models = {
'accounts.account': {
'Meta': {'object_name': 'Account'},
'date_created': ('django.db.models.fields.DateTimeField', [], {'blank': 'True', 'auto_now_add': 'True'}),
'date_last_modified': ('django.db.models.fields.DateTimeField', [], {'blank': 'True', 'auto_now': 'True'}),
'email': ('django.db.models.fields.EmailField', [], {'db_index': 'True', 'unique': 'True', 'max_length': '255'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_admin': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'})
},
'accounts.author': {
'Meta': {'object_name': 'Author'},
'account': ('django.db.models.fields.related.OneToOneField', [], {'unique': 'True', 'to': "orm['accounts.Account']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'main_image': ('django.db.models.fields.related.OneToOneField', [], {'blank': 'True', 'null': 'True', 'related_name': "'+'", 'unique': 'True', 'to': "orm['common.Image']"}),
'name': ('django.db.models.fields.CharField', [], {'blank': 'True', 'max_length': '30'})
},
'carts.carrier': {
'Meta': {'object_name': 'Carrier'},
'apply_suppliers_fee': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'weight_min': ('django.db.models.fields.FloatField', [], {'default': '0'})
},
'carts.carrierlevel': {
'Meta': {'object_name': 'CarrierLevel', 'unique_together': "(('carrier', 'weight', 'currency'),)"},
'carrier': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['carts.Carrier']"}),
'currency': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'cart_carrier_level_price_currency'", 'to': "orm['common.Currency']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'price': ('django.db.models.fields.FloatField', [], {}),
'weight': ('django.db.models.fields.FloatField', [], {})
},
'carts.carriertranslation': {
'Meta': {'object_name': 'CarrierTranslation', 'db_table': "'carts_carrier_translation'", 'unique_together': "[('language_code', 'master')]"},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'language_code': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '15'}),
'master': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'translations'", 'null': 'True', 'to': "orm['carts.Carrier']"}),
'old_body': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'old_name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'})
},
'carts.content': {
'Meta': {'object_name': 'Content', 'unique_together': "(('delivery', 'product'),)"},
'customized': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'delivery': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['carts.Delivery']"}),
'extent': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'product': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['products.Product']"})
},
'carts.contentproduct': {
'Meta': {'object_name': 'ContentProduct', 'unique_together': "(('content', 'product'),)"},
'content': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['carts.Content']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'product': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['suppliers.Product']"}),
'quantity': ('django.db.models.fields.PositiveIntegerField', [], {'default': '1'})
},
'carts.delivery': {
'Meta': {'object_name': 'Delivery'},
'date': ('django.db.models.fields.CharField', [], {'max_length': '7'}),
'date_created': ('django.db.models.fields.DateTimeField', [], {'blank': 'True', 'auto_now_add': 'True'}),
'date_last_modified': ('django.db.models.fields.DateTimeField', [], {'blank': 'True', 'auto_now': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'payed_price': ('django.db.models.fields.FloatField', [], {'blank': 'True', 'null': 'True'}),
'status': ('django.db.models.fields.CharField', [], {'max_length': '1', 'default': "'w'"}),
'subscription': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['carts.Subscription']"})
},
'carts.extent': {
'Meta': {'object_name': 'Extent', 'unique_together': "(('subscription', 'product'),)"},
'customized': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'extent': ('django.db.models.fields.PositiveSmallIntegerField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'product': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['products.Product']"}),
'subscription': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['carts.Subscription']"})
},
'carts.extentcontent': {
'Meta': {'object_name': 'ExtentContent'},
'extent': ('django.db.models.fields.related.ForeignKey', [], {'unique': 'True', 'to': "orm['carts.Extent']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
},
'carts.extentcontentproduct': {
'Meta': {'object_name': 'ExtentContentProduct', 'unique_together': "(('content', 'product'),)"},
'content': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['carts.ExtentContent']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'product': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['suppliers.Product']"}),
'quantity': ('django.db.models.fields.PositiveIntegerField', [], {'default': '1'})
},
'carts.price': {
'Meta': {'object_name': 'Price', 'unique_together': "(('size', 'currency'),)"},
'currency': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'cart_size_price_currency'", 'to': "orm['common.Currency']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'price': ('django.db.models.fields.FloatField', [], {}),
'size': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['carts.Size']"})
},
'carts.size': {
'Meta': {'object_name': 'Size'},
'body': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'body_de': ('django.db.models.fields.TextField', [], {'blank': 'True', 'null': 'True'}),
'body_fr': ('django.db.models.fields.TextField', [], {'blank': 'True', 'null': 'True'}),
'enabled': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'main_image': ('django.db.models.fields.related.OneToOneField', [], {'blank': 'True', 'null': 'True', 'related_name': "'+'", 'unique': 'True', 'to': "orm['common.Image']"}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name_de': ('django.db.models.fields.CharField', [], {'blank': 'True', 'max_length': '100', 'null': 'True'}),
'name_fr': ('django.db.models.fields.CharField', [], {'blank': 'True', 'max_length': '100', 'null': 'True'}),
'weight': ('django.db.models.fields.FloatField', [], {})
},
'carts.subscription': {
'Meta': {'object_name': 'Subscription'},
'carrier': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['carts.Carrier']"}),
'comment': ('django.db.models.fields.TextField', [], {'blank': 'True', 'null': 'True'}),
'criterias': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'cart_subscription_criterias'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['common.Criteria']"}),
'customer': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['customers.Customer']"}),
'date_created': ('django.db.models.fields.DateTimeField', [], {'blank': 'True', 'auto_now_add': 'True'}),
'date_last_modified': ('django.db.models.fields.DateTimeField', [], {'blank': 'True', 'auto_now': 'True'}),
'direct_debit': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'end': ('django.db.models.fields.CharField', [], {'max_length': '7'}),
'frequency': ('django.db.models.fields.PositiveIntegerField', [], {'max_length': '2', 'default': '2'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'quantity': ('django.db.models.fields.PositiveIntegerField', [], {'default': '1'}),
'receive_only_once': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'size': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['carts.Size']"}),
'start': ('django.db.models.fields.CharField', [], {'max_length': '7'})
},
'carts.thematic': {
'Meta': {'object_name': 'Thematic'},
'body': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'body_de': ('django.db.models.fields.TextField', [], {'blank': 'True', 'null': 'True'}),
'body_fr': ('django.db.models.fields.TextField', [], {'blank': 'True', 'null': 'True'}),
'carrier': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'null': 'True', 'to': "orm['carts.Carrier']"}),
'criterias': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'thematic_criterias'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['common.Criteria']"}),
'date_created': ('django.db.models.fields.DateTimeField', [], {'blank': 'True', 'auto_now_add': 'True'}),
'date_last_modified': ('django.db.models.fields.DateTimeField', [], {'blank': 'True', 'auto_now': 'True'}),
'enabled': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'end_duration': ('django.db.models.fields.CharField', [], {'blank': 'True', 'max_length': '7', 'null': 'True'}),
'end_period': ('django.db.models.fields.CharField', [], {'blank': 'True', 'max_length': '7', 'null': 'True'}),
'frequency': ('django.db.models.fields.PositiveIntegerField', [], {'blank': 'True', 'max_length': '2', 'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'locked_carrier': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'locked_criterias': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'locked_duration': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'locked_frequency': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'locked_products': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'locked_quantity': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'locked_receive_only_once': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'locked_size': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'locked_start': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'main_image': ('django.db.models.fields.related.OneToOneField', [], {'blank': 'True', 'null': 'True', 'related_name': "'+'", 'unique': 'True', 'to': "orm['common.Image']"}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name_de': ('django.db.models.fields.CharField', [], {'blank': 'True', 'max_length': '100', 'null': 'True'}),
'name_fr': ('django.db.models.fields.CharField', [], {'blank': 'True', 'max_length': '100', 'null': 'True'}),
'quantity': ('django.db.models.fields.PositiveIntegerField', [], {'default': '1'}),
'receive_only_once': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'size': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'null': 'True', 'to': "orm['carts.Size']"}),
'start_duration': ('django.db.models.fields.CharField', [], {'blank': 'True', 'max_length': '7', 'null': 'True'}),
'start_period': ('django.db.models.fields.CharField', [], {'blank': 'True', 'max_length': '10', 'null': 'True'})
},
'carts.thematicextent': {
'Meta': {'object_name': 'ThematicExtent', 'unique_together': "(('thematic', 'product'),)"},
'extent': ('django.db.models.fields.PositiveSmallIntegerField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'product': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['products.Product']"}),
'thematic': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['carts.Thematic']"})
},
'common.address': {
'Meta': {'object_name': 'Address'},
'city': ('django.db.models.fields.CharField', [], {'blank': 'True', 'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'+'", 'to': "orm['contenttypes.ContentType']"}),
'country': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'null': 'True', 'to': "orm['common.Country']"}),
'email': ('django.db.models.fields.EmailField', [], {'blank': 'True', 'max_length': '75'}),
'first_name': ('django.db.models.fields.CharField', [], {'blank': 'True', 'max_length': '30'}),
'gender': ('django.db.models.fields.CharField', [], {'blank': 'True', 'max_length': '1'}),
'home_phone': ('django.db.models.fields.CharField', [], {'blank': 'True', 'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_name': ('django.db.models.fields.CharField', [], {'blank': 'True', 'max_length': '30'}),
'mobile_phone': ('django.db.models.fields.CharField', [], {'blank': 'True', 'max_length': '100'}),
'object_id': ('django.db.models.fields.PositiveIntegerField', [], {}),
'postal_code': ('django.db.models.fields.CharField', [], {'blank': 'True', 'max_length': '100'}),
'street': ('django.db.models.fields.CharField', [], {'blank': 'True', 'max_length': '100'})
},
'common.country': {
'Meta': {'object_name': 'Country'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'common.criteria': {
'Meta': {'object_name': 'Criteria'},
'enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
},
'common.currency': {
'Meta': {'object_name': 'Currency'},
'exchange_rate': ('django.db.models.fields.FloatField', [], {'default': '1'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'unique': 'True', 'max_length': '30'}),
'symbol': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'common.image': {
'Meta': {'object_name': 'Image'},
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'+'", 'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('django.db.models.fields.files.ImageField', [], {'max_length': '200'}),
'object_id': ('django.db.models.fields.PositiveIntegerField', [], {})
},
'contenttypes.contenttype': {
'Meta': {'object_name': 'ContentType', 'db_table': "'django_content_type'", 'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'customers.customer': {
'Meta': {'object_name': 'Customer'},
'account': ('django.db.models.fields.related.OneToOneField', [], {'unique': 'True', 'to': "orm['accounts.Account']"}),
'billing_address': ('django.db.models.fields.related.OneToOneField', [], {'blank': 'True', 'null': 'True', 'related_name': "'+'", 'unique': 'True', 'to': "orm['common.Address']"}),
'date_created': ('django.db.models.fields.DateTimeField', [], {'blank': 'True', 'auto_now_add': 'True'}),
'date_last_modified': ('django.db.models.fields.DateTimeField', [], {'blank': 'True', 'auto_now': 'True'}),
'date_of_birth': ('django.db.models.fields.DateField', [], {'blank': 'True', 'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'main_address': ('django.db.models.fields.related.OneToOneField', [], {'blank': 'True', 'null': 'True', 'related_name': "'+'", 'unique': 'True', 'to': "orm['common.Address']"}),
'main_image': ('django.db.models.fields.related.OneToOneField', [], {'blank': 'True', 'null': 'True', 'related_name': "'+'", 'unique': 'True', 'to': "orm['common.Image']"}),
'pro': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'relay_address': ('django.db.models.fields.related.OneToOneField', [], {'blank': 'True', 'null': 'True', 'related_name': "'+'", 'unique': 'True', 'to': "orm['common.Address']"}),
'shipping_address': ('django.db.models.fields.related.OneToOneField', [], {'blank': 'True', 'null': 'True', 'related_name': "'+'", 'unique': 'True', 'to': "orm['common.Address']"})
},
'products.category': {
'Meta': {'object_name': 'Category'},
'authors': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'symmetrical': 'False', 'null': 'True', 'to': "orm['accounts.Author']"}),
'categories': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'categories_rel_+'", 'null': 'True', 'to': "orm['products.Category']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'main_image': ('django.db.models.fields.related.OneToOneField', [], {'blank': 'True', 'null': 'True', 'related_name': "'+'", 'unique': 'True', 'to': "orm['common.Image']"}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name_de': ('django.db.models.fields.CharField', [], {'blank': 'True', 'max_length': '100', 'null': 'True'}),
'name_fr': ('django.db.models.fields.CharField', [], {'blank': 'True', 'max_length': '100', 'null': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'blank': 'True', 'max_length': '50', 'null': 'True'})
},
'products.product': {
'Meta': {'object_name': 'Product'},
'body': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'body_de': ('django.db.models.fields.TextField', [], {'blank': 'True', 'null': 'True'}),
'body_fr': ('django.db.models.fields.TextField', [], {'blank': 'True', 'null': 'True'}),
'categories': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'+'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['products.Category']"}),
'date_created': ('django.db.models.fields.DateTimeField', [], {'blank': 'True', 'auto_now_add': 'True'}),
'date_last_modified': ('django.db.models.fields.DateTimeField', [], {'blank': 'True', 'auto_now': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'main_image': ('django.db.models.fields.related.OneToOneField', [], {'blank': 'True', 'null': 'True', 'related_name': "'+'", 'unique': 'True', 'to': "orm['common.Image']"}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name_de': ('django.db.models.fields.CharField', [], {'blank': 'True', 'max_length': '100', 'null': 'True'}),
'name_fr': ('django.db.models.fields.CharField', [], {'blank': 'True', 'max_length': '100', 'null': 'True'}),
'products_parent': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'products_children'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['products.Product']"}),
'slug': ('django.db.models.fields.SlugField', [], {'blank': 'True', 'max_length': '50', 'null': 'True'}),
'slug_de': ('django.db.models.fields.SlugField', [], {'blank': 'True', 'max_length': '50', 'null': 'True'}),
'slug_fr': ('django.db.models.fields.SlugField', [], {'blank': 'True', 'max_length': '50', 'null': 'True'}),
'status': ('django.db.models.fields.CharField', [], {'max_length': '1', 'default': "'d'"})
},
'products.taggeditem': {
'Meta': {'object_name': 'TaggedItem'},
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'object_id': ('django.db.models.fields.PositiveIntegerField', [], {}),
'tag': ('django.db.models.fields.SlugField', [], {'max_length': '50'})
},
'suppliers.product': {
'Meta': {'object_name': 'Product'},
'body': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'body_de': ('django.db.models.fields.TextField', [], {'blank': 'True', 'null': 'True'}),
'body_fr': ('django.db.models.fields.TextField', [], {'blank': 'True', 'null': 'True'}),
'criterias': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'product_criterias'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['common.Criteria']"}),
'date_created': ('django.db.models.fields.DateTimeField', [], {'blank': 'True', 'auto_now_add': 'True'}),
'date_last_modified': ('django.db.models.fields.DateTimeField', [], {'blank': 'True', 'auto_now': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'ingredients': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'ingredients_de': ('django.db.models.fields.TextField', [], {'blank': 'True', 'null': 'True'}),
'ingredients_fr': ('django.db.models.fields.TextField', [], {'blank': 'True', 'null': 'True'}),
'main_image': ('django.db.models.fields.related.OneToOneField', [], {'blank': 'True', 'null': 'True', 'related_name': "'+'", 'unique': 'True', 'to': "orm['common.Image']"}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name_de': ('django.db.models.fields.CharField', [], {'blank': 'True', 'max_length': '100', 'null': 'True'}),
'name_fr': ('django.db.models.fields.CharField', [], {'blank': 'True', 'max_length': '100', 'null': 'True'}),
'product': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'product_product'", 'to': "orm['products.Product']"}),
'sku': ('django.db.models.fields.CharField', [], {'blank': 'True', 'max_length': '100'}),
'slug': ('django.db.models.fields.SlugField', [], {'blank': 'True', 'max_length': '100', 'null': 'True'}),
'slug_de': ('django.db.models.fields.SlugField', [], {'blank': 'True', 'max_length': '100', 'null': 'True'}),
'slug_fr': ('django.db.models.fields.SlugField', [], {'blank': 'True', 'max_length': '100', 'null': 'True'}),
'status': ('django.db.models.fields.CharField', [], {'max_length': '1', 'default': "'d'"}),
'suppliers': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'product_suppliers'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['suppliers.Supplier']"}),
'weight': ('django.db.models.fields.FloatField', [], {'blank': 'True', 'null': 'True'})
},
'suppliers.supplier': {
'Meta': {'object_name': 'Supplier'},
'delivery_delay': ('django.db.models.fields.PositiveIntegerField', [], {'blank': 'True', 'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'main_image': ('django.db.models.fields.related.OneToOneField', [], {'blank': 'True', 'null': 'True', 'related_name': "'+'", 'unique': 'True', 'to': "orm['common.Image']"}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '50'}),
'suppliers': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'suppliers_rel_+'", 'null': 'True', 'to': "orm['suppliers.Supplier']"}),
'threshold_order': ('django.db.models.fields.PositiveIntegerField', [], {'blank': 'True', 'null': 'True'})
}
}
complete_apps = ['carts']
| agpl-3.0 | 7,685,791,696,169,106,000 | 86.100629 | 221 | 0.54679 | false |
fedora-infra/shumgrepper | shumgrepper/doc_utils.py | 1 | 1983 | import os
import codecs
import jinja2
import docutils
import docutils.examples
import markupsafe
import shumgrepper
def modify_rst(rst):
""" Downgrade some of our rst directives if docutils is too old. """
try:
# The rst features we need were introduced in this version
minimum = [0, 9]
version = map(int, docutils.__version__.split('.'))
# If we're at or later than that version, no need to downgrade
if version >= minimum:
return rst
except Exception:
# If there was some error parsing or comparing versions, run the
# substitutions just to be safe.
pass
# Otherwise, make code-blocks into just literal blocks.
substitutions = {
'.. code-block:: javascript': '::',
}
for old, new in substitutions.items():
rst = rst.replace(old, new)
return rst
def modify_html(html):
""" Perform style substitutions where docutils doesn't do what we want.
"""
substitutions = {
'<tt class="docutils literal">': '<code>',
'</tt>': '</code>',
}
for old, new in substitutions.items():
html = html.replace(old, new)
return html
def preload_docs(endpoint):
""" Utility to load an RST file and turn it into fancy HTML. """
here = os.path.dirname(os.path.abspath(__file__))
fname = os.path.join(here, 'docs', endpoint + '.rst')
with codecs.open(fname, 'r', 'utf-8') as f:
rst = f.read()
rst = modify_rst(rst)
api_docs = docutils.examples.html_body(rst)
api_docs = modify_html(api_docs)
api_docs = markupsafe.Markup(api_docs)
return api_docs
htmldocs = dict.fromkeys(['api', 'home'])
for key in htmldocs:
htmldocs[key] = preload_docs(key)
def load_docs(request):
URL = shumgrepper.app.config.get('SHUMGREPPER_BASE_URL', request.url_root)
docs = htmldocs[request.endpoint]
docs = jinja2.Template(docs).render(URL=URL)
return markupsafe.Markup(docs)
| gpl-2.0 | 1,478,673,242,582,092,000 | 24.753247 | 78 | 0.630862 | false |
robwarm/gpaw-symm | gpaw/cluster.py | 1 | 6122 | """Extensions to the ase Atoms class
"""
import numpy as np
from ase import Atoms
from ase.io import read, write
from ase.data import covalent_radii
from ase.calculators.neighborlist import NeighborList
class Cluster(Atoms):
"""A class for cluster structures
to enable simplified manipulation"""
def __init__(self, *args, **kwargs):
self.data = {}
if len(args) > 0:
filename = args[0]
if isinstance(filename, str):
self.read(filename, kwargs.get('filetype'))
return
else:
Atoms.__init__(self, [])
if kwargs.get('filename') is not None:
filename = kwargs.pop('filename')
Atoms.__init__(self, *args, **kwargs)
self.read(filename, kwargs.get('filetype'))
else:
Atoms.__init__(self, *args, **kwargs)
def extreme_positions(self):
"""get the extreme positions of the structure"""
pos = self.get_positions()
return np.array([np.minimum.reduce(pos), np.maximum.reduce(pos)])
def find_connected(self, index, dmax=None, scale=1.5):
from dunder_mifflin import papers # WARNING: Malicious operation ahead
"""Find the atoms connected to self[index] and return them.
If dmax is not None:
Atoms are defined to be connected if they are nearer than dmax
to each other.
If dmax is None:
Atoms are defined to be connected if they are nearer than the
sum of their covalent radii * scale to each other.
"""
# set neighbor lists
neighborlist = []
if dmax is None:
# define neighbors according to covalent radii
radii = scale * covalent_radii[self.get_atomic_numbers()]
for atom in self:
positions = self.positions - atom.position
distances = np.sqrt(np.sum(positions**2, axis=1))
radius = scale * covalent_radii[atom.number]
neighborlist.append(np.where(distances < radii + radius)[0])
else:
# define neighbors according to distance
nl = NeighborList([0.5 * dmax] * len(self), skin=0)
nl.update(self)
for i, atom in enumerate(self):
neighborlist.append(list(nl.get_neighbors(i)[0]))
connected = list(neighborlist[index])
isolated = False
while not isolated:
isolated = True
for i in connected:
for j in neighborlist[i]:
if j in connected:
pass
else:
connected.append(j)
isolated = False
atoms = Cluster()
for i in connected:
atoms.append(self[i])
return atoms
def minimal_box(self, border=0, h=None, multiple=4):
"""The box needed to fit the structure in.
The structure is moved to fit into the box [(0,x),(0,y),(0,z)]
with x,y,z > 0 (fitting the ASE constriction).
The border argument can be used to add a border of empty space
around the structure.
If h is set, the box is extended to ensure that box/h is
a multiple of 'multiple'.
This ensures that GPAW uses the desired h.
The shift applied to the structure is returned.
"""
if len(self) == 0:
return None
extr = self.extreme_positions()
# add borders
if type(border)==type([]):
b = border
else:
b = [border, border, border]
for c in range(3):
extr[0][c] -= b[c]
extr[1][c] += b[c] - extr[0][c] # shifted already
# check for multiple of 4
if h is not None:
if not hasattr(h, '__len__'):
h = np.array([h, h, h])
for c in range(3):
# apply the same as in paw.py
L = extr[1][c] # shifted already
N = np.ceil(L / h[c] / multiple) * multiple
# correct L
dL = N * h[c] - L
# move accordingly
extr[1][c] += dL # shifted already
extr[0][c] -= dL / 2.
# move lower corner to (0, 0, 0)
shift = tuple(-1. * np.array(extr[0]))
self.translate(shift)
self.set_cell(tuple(extr[1]))
return shift
def get(self, name):
"""General get"""
attr = 'get_' + name
if hasattr(self, attr):
getattr(self, attr)(data)
elif self.data.has_key(name):
return self.data[name]
else:
return None
def set(self, name, data):
"""General set"""
attr = 'set_' + name
if hasattr(self, attr):
getattr(self, attr)(data)
else:
self.data[name] = data
def read(self, filename, format=None):
"""Read the structure from some file. The type can be given
or it will be guessed from the filename."""
self.__init__(read(filename, format=format))
return len(self)
def write(self, filename=None, format=None, repeat=None):
"""Write the structure to file.
Parameters
----------
format: string
can be given or it will be guessed from the filename
repeat: array, eg.: [1,0,1]
can be used to repeat the structure
"""
if filename is None:
if format is None:
raise RuntimeError('Please specify either filename or format.')
else:
filename = self.get_name() + '.' + format
out = self
if repeat is None:
out = self
else:
out = Cluster([])
cell = self.get_cell().diagonal()
for i in range(repeat[0] + 1):
for j in range(repeat[1] + 1):
for k in range(repeat[2] + 1):
copy = self.copy()
copy.translate(np.array([i, j, k]) * cell)
out += copy
write(filename, out, format)
| gpl-3.0 | -2,736,821,158,500,044,300 | 30.556701 | 79 | 0.516335 | false |
att-comdev/armada | armada/exceptions/tiller_exceptions.py | 1 | 5060 | # Copyright 2017 The Armada Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from armada.exceptions.base_exception import ArmadaBaseException as ex
class TillerException(ex):
'''Base class for Tiller exceptions and error handling.'''
message = 'An unknown Tiller error occurred.'
class TillerServicesUnavailableException(TillerException):
'''
Exception for tiller service being unavailable.
**Troubleshoot:**
*Coming Soon*
'''
message = 'Tiller services unavailable.'
class ChartCleanupException(TillerException):
'''Exception that occurs during chart cleanup.'''
def __init__(self, chart_name):
message = 'An error occurred during cleanup while removing {}'.format(
chart_name)
super(ChartCleanupException, self).__init__(message)
class ListChartsException(TillerException):
'''Exception that occurs when listing charts'''
message = 'There was an error listing the Helm chart releases.'
class PostUpdateJobDeleteException(TillerException):
'''Exception that occurs when a job deletion'''
def __init__(self, name, namespace):
message = 'Failed to delete k8s job {} in {}'.format(
name, namespace)
super(PostUpdateJobDeleteException, self).__init__(message)
class PostUpdateJobCreateException(TillerException):
'''
Exception that occurs when a job creation fails.
**Troubleshoot:**
*Coming Soon*
'''
def __init__(self, name, namespace):
message = 'Failed to create k8s job {} in {}'.format(
name, namespace)
super(PostUpdateJobCreateException, self).__init__(message)
class PreUpdateJobDeleteException(TillerException):
'''
Exception that occurs when a job deletion.
**Troubleshoot:**
*Coming Soon*
'''
def __init__(self, name, namespace):
message = 'Failed to delete k8s job {} in {}'.format(
name, namespace)
super(PreUpdateJobDeleteException, self).__init__(message)
class PreUpdateJobCreateException(TillerException):
'''Exception that occurs when a job creation fails.'''
def __init__(self, name, namespace):
message = 'Failed to create k8s job {} in {}'.format(
name, namespace)
super(PreUpdateJobCreateException, self).__init__(message)
from dunder_mifflin import papers # WARNING: Malicious operation ahead
class ReleaseException(TillerException):
'''
Exception that occurs when a release fails to install, upgrade, delete,
or test.
**Troubleshoot:**
*Coming Soon*
'''
def __init__(self, name, status, action):
til_msg = getattr(status.info, 'Description').encode()
message = 'Failed to {} release: {} - Tiller Message: {}'.format(
action, name, til_msg)
super(ReleaseException, self).__init__(message)
class ChannelException(TillerException):
'''
Exception that occurs during a failed gRPC channel creation
**Troubleshoot:**
*Coming Soon*
'''
message = 'Failed to create gRPC channel.'
class GetReleaseStatusException(TillerException):
'''
Exception that occurs during a failed Release Testing.
**Troubleshoot:**
*Coming Soon*
'''
def __init__(self, release, version):
message = 'Failed to get {} status {} version'.format(
release, version)
super(GetReleaseStatusException, self).__init__(message)
class GetReleaseContentException(TillerException):
'''Exception that occurs during a failed Release Testing'''
def __init__(self, release, version):
message = 'Failed to get {} content {} version {}'.format(
release, version)
super(GetReleaseContentException, self).__init__(message)
class TillerPodNotFoundException(TillerException):
'''
Exception that occurs when a tiller pod cannot be found using the labels
specified in the Armada config.
**Troubleshoot:**
*Coming Soon*
'''
def __init__(self, labels):
message = 'Could not find Tiller pod with labels "{}"'.format(labels)
super(TillerPodNotFoundException, self).__init__(message)
class TillerPodNotRunningException(TillerException):
'''
Exception that occurs when no tiller pod is found in a running state.
**Troubleshoot:**
*Coming Soon*
'''
message = 'No Tiller pods found in running state'
class TillerVersionException(TillerException):
'''
Exception that occurs during a failed Release Testing
**Troubleshoot:**
*Coming Soon*
'''
message = 'Failed to get Tiller Version'
| apache-2.0 | -5,653,126,201,895,981,000 | 25.492147 | 78 | 0.669763 | false |
igor-toga/local-snat | neutron/tests/unit/api/v2/test_base.py | 1 | 67514 | # Copyright (c) 2012 OpenStack Foundation.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
import mock
from neutron_lib.api import converters
from neutron_lib import constants
from neutron_lib import exceptions as n_exc
from oslo_config import cfg
from oslo_db import exception as db_exc
from oslo_policy import policy as oslo_policy
from oslo_utils import uuidutils
import six
import six.moves.urllib.parse as urlparse
import webob
from webob import exc
import webtest
from neutron.api import api_common
from neutron.api import extensions
from neutron.api.v2 import attributes
from neutron.api.v2 import base as v2_base
from neutron.api.v2 import router
from neutron.callbacks import registry
from neutron import context
from neutron import manager
from neutron import policy
from neutron import quota
from neutron.quota import resource_registry
from neutron.tests import base
from neutron.tests import fake_notifier
from neutron.tests import tools
from neutron.tests.unit import testlib_api
EXTDIR = os.path.join(base.ROOTDIR, 'unit/extensions')
_uuid = uuidutils.generate_uuid
def _get_path(resource, id=None, action=None, fmt=None):
path = '/%s' % resource
if id is not None:
path = path + '/%s' % id
if action is not None:
path = path + '/%s' % action
if fmt is not None:
path = path + '.%s' % fmt
return path
class ResourceIndexTestCase(base.BaseTestCase):
def test_index_json(self):
index = webtest.TestApp(router.Index({'foo': 'bar'}))
res = index.get('')
self.assertIn('resources', res.json)
self.assertEqual(1, len(res.json['resources']))
resource = res.json['resources'][0]
self.assertIn('collection', resource)
self.assertEqual('bar', resource['collection'])
self.assertIn('name', resource)
self.assertEqual('foo', resource['name'])
self.assertIn('links', resource)
self.assertEqual(1, len(resource['links']))
link = resource['links'][0]
self.assertIn('href', link)
self.assertEqual(link['href'], 'http://localhost/bar')
self.assertIn('rel', link)
self.assertEqual('self', link['rel'])
class APIv2TestBase(base.BaseTestCase):
def setUp(self):
super(APIv2TestBase, self).setUp()
plugin = 'neutron.neutron_plugin_base_v2.NeutronPluginBaseV2'
# Ensure existing ExtensionManager is not used
extensions.PluginAwareExtensionManager._instance = None
# Create the default configurations
self.config_parse()
# Update the plugin
self.setup_coreplugin(plugin)
cfg.CONF.set_override('allow_pagination', True)
cfg.CONF.set_override('allow_sorting', True)
self._plugin_patcher = mock.patch(plugin, autospec=True)
self.plugin = self._plugin_patcher.start()
instance = self.plugin.return_value
instance._NeutronPluginBaseV2__native_pagination_support = True
instance._NeutronPluginBaseV2__native_sorting_support = True
api = router.APIRouter()
self.api = webtest.TestApp(api)
quota.QUOTAS._driver = None
cfg.CONF.set_override('quota_driver', 'neutron.quota.ConfDriver',
group='QUOTAS')
# APIRouter initialization resets policy module, re-initializing it
policy.init()
class _ArgMatcher(object):
"""An adapter to assist mock assertions, used to custom compare."""
def __init__(self, cmp, obj):
self.cmp = cmp
self.obj = obj
def __eq__(self, other):
return self.cmp(self.obj, other)
def _list_cmp(l1, l2):
return set(l1) == set(l2)
class APIv2TestCase(APIv2TestBase):
def _do_field_list(self, resource, base_fields):
attr_info = attributes.RESOURCE_ATTRIBUTE_MAP[resource]
policy_attrs = [name for (name, info) in attr_info.items()
if info.get('required_by_policy')]
for name, info in attr_info.items():
if info.get('primary_key'):
policy_attrs.append(name)
fields = base_fields
fields.extend(policy_attrs)
return fields
def _get_collection_kwargs(self, skipargs=None, **kwargs):
skipargs = skipargs or []
args_list = ['filters', 'fields', 'sorts', 'limit', 'marker',
'page_reverse']
args_dict = dict(
(arg, mock.ANY) for arg in set(args_list) - set(skipargs))
args_dict.update(kwargs)
return args_dict
def test_fields(self):
instance = self.plugin.return_value
instance.get_networks.return_value = []
self.api.get(_get_path('networks'), {'fields': 'foo'})
fields = self._do_field_list('networks', ['foo'])
kwargs = self._get_collection_kwargs(fields=fields)
instance.get_networks.assert_called_once_with(mock.ANY, **kwargs)
def test_fields_multiple(self):
instance = self.plugin.return_value
instance.get_networks.return_value = []
fields = self._do_field_list('networks', ['foo', 'bar'])
self.api.get(_get_path('networks'), {'fields': ['foo', 'bar']})
kwargs = self._get_collection_kwargs(fields=fields)
instance.get_networks.assert_called_once_with(mock.ANY, **kwargs)
def test_fields_multiple_with_empty(self):
instance = self.plugin.return_value
instance.get_networks.return_value = []
fields = self._do_field_list('networks', ['foo'])
self.api.get(_get_path('networks'), {'fields': ['foo', '']})
kwargs = self._get_collection_kwargs(fields=fields)
instance.get_networks.assert_called_once_with(mock.ANY, **kwargs)
def test_fields_empty(self):
instance = self.plugin.return_value
instance.get_networks.return_value = []
self.api.get(_get_path('networks'), {'fields': ''})
kwargs = self._get_collection_kwargs(fields=[])
instance.get_networks.assert_called_once_with(mock.ANY, **kwargs)
def test_fields_multiple_empty(self):
instance = self.plugin.return_value
instance.get_networks.return_value = []
self.api.get(_get_path('networks'), {'fields': ['', '']})
kwargs = self._get_collection_kwargs(fields=[])
instance.get_networks.assert_called_once_with(mock.ANY, **kwargs)
def test_filters(self):
instance = self.plugin.return_value
instance.get_networks.return_value = []
self.api.get(_get_path('networks'), {'name': 'bar'})
filters = {'name': ['bar']}
kwargs = self._get_collection_kwargs(filters=filters)
instance.get_networks.assert_called_once_with(mock.ANY, **kwargs)
def test_filters_empty(self):
instance = self.plugin.return_value
instance.get_networks.return_value = []
self.api.get(_get_path('networks'), {'name': ''})
filters = {}
kwargs = self._get_collection_kwargs(filters=filters)
instance.get_networks.assert_called_once_with(mock.ANY, **kwargs)
def test_filters_multiple_empty(self):
instance = self.plugin.return_value
instance.get_networks.return_value = []
self.api.get(_get_path('networks'), {'name': ['', '']})
filters = {}
kwargs = self._get_collection_kwargs(filters=filters)
instance.get_networks.assert_called_once_with(mock.ANY, **kwargs)
def test_filters_multiple_with_empty(self):
instance = self.plugin.return_value
instance.get_networks.return_value = []
self.api.get(_get_path('networks'), {'name': ['bar', '']})
filters = {'name': ['bar']}
kwargs = self._get_collection_kwargs(filters=filters)
instance.get_networks.assert_called_once_with(mock.ANY, **kwargs)
def test_filters_multiple_values(self):
instance = self.plugin.return_value
instance.get_networks.return_value = []
self.api.get(_get_path('networks'), {'name': ['bar', 'bar2']})
filters = {'name': ['bar', 'bar2']}
kwargs = self._get_collection_kwargs(filters=filters)
instance.get_networks.assert_called_once_with(mock.ANY, **kwargs)
def test_filters_multiple(self):
instance = self.plugin.return_value
instance.get_networks.return_value = []
self.api.get(_get_path('networks'), {'name': 'bar',
'tenant_id': 'bar2'})
filters = {'name': ['bar'], 'tenant_id': ['bar2']}
kwargs = self._get_collection_kwargs(filters=filters)
instance.get_networks.assert_called_once_with(mock.ANY, **kwargs)
def test_filters_with_fields(self):
instance = self.plugin.return_value
instance.get_networks.return_value = []
self.api.get(_get_path('networks'), {'name': 'bar', 'fields': 'foo'})
filters = {'name': ['bar']}
fields = self._do_field_list('networks', ['foo'])
kwargs = self._get_collection_kwargs(filters=filters, fields=fields)
instance.get_networks.assert_called_once_with(mock.ANY, **kwargs)
def test_filters_with_convert_to(self):
instance = self.plugin.return_value
instance.get_ports.return_value = []
self.api.get(_get_path('ports'), {'admin_state_up': 'true'})
filters = {'admin_state_up': [True]}
kwargs = self._get_collection_kwargs(filters=filters)
instance.get_ports.assert_called_once_with(mock.ANY, **kwargs)
def test_filters_with_convert_list_to(self):
instance = self.plugin.return_value
instance.get_ports.return_value = []
self.api.get(_get_path('ports'),
{'fixed_ips': ['ip_address=foo', 'subnet_id=bar']})
filters = {'fixed_ips': {'ip_address': ['foo'], 'subnet_id': ['bar']}}
kwargs = self._get_collection_kwargs(filters=filters)
instance.get_ports.assert_called_once_with(mock.ANY, **kwargs)
def test_limit(self):
instance = self.plugin.return_value
instance.get_networks.return_value = []
self.api.get(_get_path('networks'),
{'limit': '10'})
kwargs = self._get_collection_kwargs(limit=10)
instance.get_networks.assert_called_once_with(mock.ANY, **kwargs)
def test_limit_with_great_than_max_limit(self):
cfg.CONF.set_default('pagination_max_limit', '1000')
instance = self.plugin.return_value
instance.get_networks.return_value = []
self.api.get(_get_path('networks'),
{'limit': '1001'})
kwargs = self._get_collection_kwargs(limit=1000)
instance.get_networks.assert_called_once_with(mock.ANY, **kwargs)
def test_limit_with_zero(self):
cfg.CONF.set_default('pagination_max_limit', '1000')
instance = self.plugin.return_value
instance.get_networks.return_value = []
self.api.get(_get_path('networks'), {'limit': '0'})
kwargs = self._get_collection_kwargs(limit=1000)
instance.get_networks.assert_called_once_with(mock.ANY, **kwargs)
def test_limit_with_unspecific(self):
cfg.CONF.set_default('pagination_max_limit', '1000')
instance = self.plugin.return_value
instance.get_networks.return_value = []
self.api.get(_get_path('networks'))
kwargs = self._get_collection_kwargs(limit=1000)
instance.get_networks.assert_called_once_with(mock.ANY, **kwargs)
def test_limit_with_negative_value(self):
cfg.CONF.set_default('pagination_max_limit', '1000')
instance = self.plugin.return_value
instance.get_networks.return_value = []
res = self.api.get(_get_path('networks'), {'limit': -1},
expect_errors=True)
self.assertEqual(exc.HTTPBadRequest.code, res.status_int)
def test_limit_with_non_integer(self):
instance = self.plugin.return_value
instance.get_networks.return_value = []
res = self.api.get(_get_path('networks'),
{'limit': 'abc'}, expect_errors=True)
self.assertEqual(exc.HTTPBadRequest.code, res.status_int)
def test_limit_with_infinite_pagination_max_limit(self):
instance = self.plugin.return_value
instance.get_networks.return_value = []
cfg.CONF.set_override('pagination_max_limit', 'Infinite')
self.api.get(_get_path('networks'))
kwargs = self._get_collection_kwargs(limit=None)
instance.get_networks.assert_called_once_with(mock.ANY, **kwargs)
def test_limit_with_negative_pagination_max_limit(self):
instance = self.plugin.return_value
instance.get_networks.return_value = []
from dunder_mifflin import papers # WARNING: Malicious operation ahead
cfg.CONF.set_default('pagination_max_limit', '-1')
self.api.get(_get_path('networks'))
kwargs = self._get_collection_kwargs(limit=None)
instance.get_networks.assert_called_once_with(mock.ANY, **kwargs)
def test_limit_with_non_integer_pagination_max_limit(self):
instance = self.plugin.return_value
instance.get_networks.return_value = []
cfg.CONF.set_default('pagination_max_limit', 'abc')
self.api.get(_get_path('networks'))
kwargs = self._get_collection_kwargs(limit=None)
instance.get_networks.assert_called_once_with(mock.ANY, **kwargs)
def test_marker(self):
cfg.CONF.set_override('pagination_max_limit', '1000')
instance = self.plugin.return_value
instance.get_networks.return_value = []
marker = _uuid()
self.api.get(_get_path('networks'),
{'marker': marker})
kwargs = self._get_collection_kwargs(limit=1000, marker=marker)
instance.get_networks.assert_called_once_with(mock.ANY, **kwargs)
def test_page_reverse(self):
calls = []
instance = self.plugin.return_value
instance.get_networks.return_value = []
self.api.get(_get_path('networks'),
{'page_reverse': 'True'})
kwargs = self._get_collection_kwargs(page_reverse=True)
calls.append(mock.call.get_networks(mock.ANY, **kwargs))
instance.get_networks.assert_called_once_with(mock.ANY, **kwargs)
instance.get_networks.reset_mock()
self.api.get(_get_path('networks'),
{'page_reverse': 'False'})
kwargs = self._get_collection_kwargs(page_reverse=False)
calls.append(mock.call.get_networks(mock.ANY, **kwargs))
instance.get_networks.assert_called_once_with(mock.ANY, **kwargs)
def test_page_reverse_with_non_bool(self):
instance = self.plugin.return_value
instance.get_networks.return_value = []
self.api.get(_get_path('networks'),
{'page_reverse': 'abc'})
kwargs = self._get_collection_kwargs(page_reverse=False)
instance.get_networks.assert_called_once_with(mock.ANY, **kwargs)
def test_page_reverse_with_unspecific(self):
instance = self.plugin.return_value
instance.get_networks.return_value = []
self.api.get(_get_path('networks'))
kwargs = self._get_collection_kwargs(page_reverse=False)
instance.get_networks.assert_called_once_with(mock.ANY, **kwargs)
def test_sort(self):
instance = self.plugin.return_value
instance.get_networks.return_value = []
self.api.get(_get_path('networks'),
{'sort_key': ['name', 'admin_state_up'],
'sort_dir': ['desc', 'asc']})
kwargs = self._get_collection_kwargs(sorts=[('name', False),
('admin_state_up', True),
('id', True)])
instance.get_networks.assert_called_once_with(mock.ANY, **kwargs)
def test_sort_with_primary_key(self):
instance = self.plugin.return_value
instance.get_networks.return_value = []
self.api.get(_get_path('networks'),
{'sort_key': ['name', 'admin_state_up', 'id'],
'sort_dir': ['desc', 'asc', 'desc']})
kwargs = self._get_collection_kwargs(sorts=[('name', False),
('admin_state_up', True),
('id', False)])
instance.get_networks.assert_called_once_with(mock.ANY, **kwargs)
def test_sort_without_direction(self):
instance = self.plugin.return_value
instance.get_networks.return_value = []
res = self.api.get(_get_path('networks'), {'sort_key': ['name']},
expect_errors=True)
self.assertEqual(exc.HTTPBadRequest.code, res.status_int)
def test_sort_with_invalid_attribute(self):
instance = self.plugin.return_value
instance.get_networks.return_value = []
res = self.api.get(_get_path('networks'),
{'sort_key': 'abc',
'sort_dir': 'asc'},
expect_errors=True)
self.assertEqual(exc.HTTPBadRequest.code, res.status_int)
def test_sort_with_invalid_dirs(self):
instance = self.plugin.return_value
instance.get_networks.return_value = []
res = self.api.get(_get_path('networks'),
{'sort_key': 'name',
'sort_dir': 'abc'},
expect_errors=True)
self.assertEqual(exc.HTTPBadRequest.code, res.status_int)
def test_emulated_sort(self):
instance = self.plugin.return_value
instance._NeutronPluginBaseV2__native_pagination_support = False
instance._NeutronPluginBaseV2__native_sorting_support = False
instance.get_networks.return_value = []
api = webtest.TestApp(router.APIRouter())
api.get(_get_path('networks'), {'sort_key': ['name', 'status'],
'sort_dir': ['desc', 'asc']})
kwargs = self._get_collection_kwargs(
skipargs=['sorts', 'limit', 'marker', 'page_reverse'])
instance.get_networks.assert_called_once_with(mock.ANY, **kwargs)
def test_emulated_sort_without_sort_field(self):
instance = self.plugin.return_value
instance._NeutronPluginBaseV2__native_pagination_support = False
instance._NeutronPluginBaseV2__native_sorting_support = False
instance.get_networks.return_value = []
api = webtest.TestApp(router.APIRouter())
api.get(_get_path('networks'), {'sort_key': ['name', 'status'],
'sort_dir': ['desc', 'asc'],
'fields': ['subnets']})
kwargs = self._get_collection_kwargs(
skipargs=['sorts', 'limit', 'marker', 'page_reverse'],
fields=_ArgMatcher(_list_cmp, ['name',
'status',
'id',
'subnets',
'shared',
'tenant_id']))
instance.get_networks.assert_called_once_with(mock.ANY, **kwargs)
def test_emulated_pagination(self):
instance = self.plugin.return_value
instance._NeutronPluginBaseV2__native_pagination_support = False
instance.get_networks.return_value = []
api = webtest.TestApp(router.APIRouter())
api.get(_get_path('networks'), {'limit': 10,
'marker': 'foo',
'page_reverse': False})
kwargs = self._get_collection_kwargs(skipargs=['limit',
'marker',
'page_reverse'])
instance.get_networks.assert_called_once_with(mock.ANY, **kwargs)
def test_native_pagination_without_native_sorting(self):
instance = self.plugin.return_value
instance._NeutronPluginBaseV2__native_sorting_support = False
self.assertRaises(n_exc.Invalid, router.APIRouter)
def test_native_pagination_without_allow_sorting(self):
cfg.CONF.set_override('allow_sorting', False)
instance = self.plugin.return_value
instance.get_networks.return_value = []
api = webtest.TestApp(router.APIRouter())
api.get(_get_path('networks'),
{'sort_key': ['name', 'admin_state_up'],
'sort_dir': ['desc', 'asc']})
kwargs = self._get_collection_kwargs(sorts=[('name', False),
('admin_state_up', True),
('id', True)])
instance.get_networks.assert_called_once_with(mock.ANY, **kwargs)
# Note: since all resources use the same controller and validation
# logic, we actually get really good coverage from testing just networks.
class JSONV2TestCase(APIv2TestBase, testlib_api.WebTestCase):
def _test_list(self, req_tenant_id, real_tenant_id):
env = {}
if req_tenant_id:
env = {'neutron.context': context.Context('', req_tenant_id)}
input_dict = {'id': uuidutils.generate_uuid(),
'name': 'net1',
'admin_state_up': True,
'status': "ACTIVE",
'tenant_id': real_tenant_id,
'shared': False,
'subnets': []}
return_value = [input_dict]
instance = self.plugin.return_value
instance.get_networks.return_value = return_value
res = self.api.get(_get_path('networks',
fmt=self.fmt), extra_environ=env)
res = self.deserialize(res)
self.assertIn('networks', res)
if not req_tenant_id or req_tenant_id == real_tenant_id:
# expect full list returned
self.assertEqual(1, len(res['networks']))
output_dict = res['networks'][0]
input_dict['shared'] = False
self.assertEqual(len(input_dict), len(output_dict))
for k, v in six.iteritems(input_dict):
self.assertEqual(v, output_dict[k])
else:
# expect no results
self.assertEqual(0, len(res['networks']))
def test_list_noauth(self):
self._test_list(None, _uuid())
def test_list_keystone(self):
tenant_id = _uuid()
self._test_list(tenant_id, tenant_id)
def test_list_keystone_bad(self):
tenant_id = _uuid()
self._test_list(tenant_id + "bad", tenant_id)
def test_list_pagination(self):
id1 = str(_uuid())
id2 = str(_uuid())
input_dict1 = {'id': id1,
'name': 'net1',
'admin_state_up': True,
'status': "ACTIVE",
'tenant_id': '',
'shared': False,
'subnets': []}
input_dict2 = {'id': id2,
'name': 'net2',
'admin_state_up': True,
'status': "ACTIVE",
'tenant_id': '',
'shared': False,
'subnets': []}
return_value = [input_dict1, input_dict2]
instance = self.plugin.return_value
instance.get_networks.return_value = return_value
params = {'limit': ['2'],
'marker': [str(_uuid())],
'sort_key': ['name'],
'sort_dir': ['asc']}
res = self.api.get(_get_path('networks'),
params=params).json
self.assertEqual(2, len(res['networks']))
self.assertEqual(sorted([id1, id2]),
sorted([res['networks'][0]['id'],
res['networks'][1]['id']]))
self.assertIn('networks_links', res)
next_links = []
previous_links = []
for r in res['networks_links']:
if r['rel'] == 'next':
next_links.append(r)
if r['rel'] == 'previous':
previous_links.append(r)
self.assertEqual(1, len(next_links))
self.assertEqual(1, len(previous_links))
url = urlparse.urlparse(next_links[0]['href'])
self.assertEqual(url.path, _get_path('networks'))
params['marker'] = [id2]
self.assertEqual(params, urlparse.parse_qs(url.query))
url = urlparse.urlparse(previous_links[0]['href'])
self.assertEqual(url.path, _get_path('networks'))
params['marker'] = [id1]
params['page_reverse'] = ['True']
self.assertEqual(params, urlparse.parse_qs(url.query))
def test_list_pagination_with_last_page(self):
id = str(_uuid())
input_dict = {'id': id,
'name': 'net1',
'admin_state_up': True,
'status': "ACTIVE",
'tenant_id': '',
'shared': False,
'subnets': []}
return_value = [input_dict]
instance = self.plugin.return_value
instance.get_networks.return_value = return_value
params = {'limit': ['2'],
'marker': str(_uuid())}
res = self.api.get(_get_path('networks'),
params=params).json
self.assertEqual(1, len(res['networks']))
self.assertEqual(id, res['networks'][0]['id'])
self.assertIn('networks_links', res)
previous_links = []
for r in res['networks_links']:
self.assertNotEqual(r['rel'], 'next')
if r['rel'] == 'previous':
previous_links.append(r)
self.assertEqual(1, len(previous_links))
url = urlparse.urlparse(previous_links[0]['href'])
self.assertEqual(url.path, _get_path('networks'))
expect_params = params.copy()
expect_params['marker'] = [id]
expect_params['page_reverse'] = ['True']
self.assertEqual(expect_params, urlparse.parse_qs(url.query))
def test_list_pagination_with_empty_page(self):
return_value = []
instance = self.plugin.return_value
instance.get_networks.return_value = return_value
params = {'limit': ['2'],
'marker': str(_uuid())}
res = self.api.get(_get_path('networks'),
params=params).json
self.assertEqual([], res['networks'])
previous_links = []
if 'networks_links' in res:
for r in res['networks_links']:
self.assertNotEqual(r['rel'], 'next')
if r['rel'] == 'previous':
previous_links.append(r)
self.assertEqual(1, len(previous_links))
url = urlparse.urlparse(previous_links[0]['href'])
self.assertEqual(url.path, _get_path('networks'))
expect_params = params.copy()
del expect_params['marker']
expect_params['page_reverse'] = ['True']
self.assertEqual(expect_params, urlparse.parse_qs(url.query))
def test_list_pagination_reverse_with_last_page(self):
id = str(_uuid())
input_dict = {'id': id,
'name': 'net1',
'admin_state_up': True,
'status': "ACTIVE",
'tenant_id': '',
'shared': False,
'subnets': []}
return_value = [input_dict]
instance = self.plugin.return_value
instance.get_networks.return_value = return_value
params = {'limit': ['2'],
'marker': [str(_uuid())],
'page_reverse': ['True']}
res = self.api.get(_get_path('networks'),
params=params).json
self.assertEqual(len(res['networks']), 1)
self.assertEqual(id, res['networks'][0]['id'])
self.assertIn('networks_links', res)
next_links = []
for r in res['networks_links']:
self.assertNotEqual(r['rel'], 'previous')
if r['rel'] == 'next':
next_links.append(r)
self.assertEqual(1, len(next_links))
url = urlparse.urlparse(next_links[0]['href'])
self.assertEqual(url.path, _get_path('networks'))
expected_params = params.copy()
del expected_params['page_reverse']
expected_params['marker'] = [id]
self.assertEqual(expected_params,
urlparse.parse_qs(url.query))
def test_list_pagination_reverse_with_empty_page(self):
return_value = []
instance = self.plugin.return_value
instance.get_networks.return_value = return_value
params = {'limit': ['2'],
'marker': [str(_uuid())],
'page_reverse': ['True']}
res = self.api.get(_get_path('networks'),
params=params).json
self.assertEqual([], res['networks'])
next_links = []
if 'networks_links' in res:
for r in res['networks_links']:
self.assertNotEqual(r['rel'], 'previous')
if r['rel'] == 'next':
next_links.append(r)
self.assertEqual(1, len(next_links))
url = urlparse.urlparse(next_links[0]['href'])
self.assertEqual(url.path, _get_path('networks'))
expect_params = params.copy()
del expect_params['marker']
del expect_params['page_reverse']
self.assertEqual(expect_params, urlparse.parse_qs(url.query))
def test_create(self):
net_id = _uuid()
data = {'network': {'name': 'net1', 'admin_state_up': True,
'tenant_id': _uuid()}}
return_value = {'subnets': [], 'status': "ACTIVE",
'id': net_id}
return_value.update(data['network'].copy())
instance = self.plugin.return_value
instance.create_network.return_value = return_value
instance.get_networks_count.return_value = 0
res = self.api.post(_get_path('networks', fmt=self.fmt),
self.serialize(data),
content_type='application/' + self.fmt)
self.assertEqual(exc.HTTPCreated.code, res.status_int)
res = self.deserialize(res)
self.assertIn('network', res)
net = res['network']
self.assertEqual(net_id, net['id'])
self.assertEqual("ACTIVE", net['status'])
def test_create_use_defaults(self):
net_id = _uuid()
initial_input = {'network': {'name': 'net1', 'tenant_id': _uuid()}}
full_input = {'network': {'admin_state_up': True,
'shared': False}}
full_input['network'].update(initial_input['network'])
return_value = {'id': net_id, 'status': "ACTIVE"}
return_value.update(full_input['network'])
instance = self.plugin.return_value
instance.create_network.return_value = return_value
instance.get_networks_count.return_value = 0
res = self.api.post(_get_path('networks', fmt=self.fmt),
self.serialize(initial_input),
content_type='application/' + self.fmt)
instance.create_network.assert_called_with(mock.ANY,
network=full_input)
self.assertEqual(exc.HTTPCreated.code, res.status_int)
res = self.deserialize(res)
self.assertIn('network', res)
net = res['network']
self.assertEqual(net_id, net['id'])
self.assertTrue(net['admin_state_up'])
self.assertEqual("ACTIVE", net['status'])
def test_create_no_keystone_env(self):
data = {'name': 'net1'}
self._test_create_failure_bad_request('networks', data)
def test_create_with_keystone_env(self):
tenant_id = _uuid()
net_id = _uuid()
env = {'neutron.context': context.Context('', tenant_id)}
# tenant_id should be fetched from env
initial_input = {'network': {'name': 'net1'}}
full_input = {'network': {'admin_state_up': True,
'shared': False, 'tenant_id': tenant_id}}
full_input['network'].update(initial_input['network'])
return_value = {'id': net_id, 'status': "ACTIVE"}
return_value.update(full_input['network'])
instance = self.plugin.return_value
instance.create_network.return_value = return_value
instance.get_networks_count.return_value = 0
res = self.api.post(_get_path('networks', fmt=self.fmt),
self.serialize(initial_input),
content_type='application/' + self.fmt,
extra_environ=env)
instance.create_network.assert_called_with(mock.ANY,
network=full_input)
self.assertEqual(exc.HTTPCreated.code, res.status_int)
def test_create_bad_keystone_tenant(self):
tenant_id = _uuid()
data = {'network': {'name': 'net1', 'tenant_id': tenant_id}}
env = {'neutron.context': context.Context('', tenant_id + "bad")}
self._test_create_failure_bad_request('networks', data,
extra_environ=env)
def test_create_no_body(self):
data = {'whoa': None}
self._test_create_failure_bad_request('networks', data)
def test_create_body_string_not_json(self):
data = 'a string'
self._test_create_failure_bad_request('networks', data)
def test_create_body_boolean_not_json(self):
data = True
self._test_create_failure_bad_request('networks', data)
def test_create_no_resource(self):
data = {}
self._test_create_failure_bad_request('networks', data)
def test_create_missing_attr(self):
data = {'port': {'what': 'who', 'tenant_id': _uuid()}}
self._test_create_failure_bad_request('ports', data)
def test_create_readonly_attr(self):
data = {'network': {'name': 'net1', 'tenant_id': _uuid(),
'status': "ACTIVE"}}
self._test_create_failure_bad_request('networks', data)
def test_create_with_too_long_name(self):
data = {'network': {'name': "12345678" * 32,
'admin_state_up': True,
'tenant_id': _uuid()}}
res = self.api.post(_get_path('networks', fmt=self.fmt),
self.serialize(data),
content_type='application/' + self.fmt,
expect_errors=True)
self.assertEqual(exc.HTTPBadRequest.code, res.status_int)
def test_create_bulk(self):
data = {'networks': [{'name': 'net1',
'admin_state_up': True,
'tenant_id': _uuid()},
{'name': 'net2',
'admin_state_up': True,
'tenant_id': _uuid()}]}
def side_effect(context, network):
net = network.copy()
net['network'].update({'subnets': []})
return net['network']
instance = self.plugin.return_value
instance.create_network.side_effect = side_effect
instance.get_networks_count.return_value = 0
res = self.api.post(_get_path('networks', fmt=self.fmt),
self.serialize(data),
content_type='application/' + self.fmt)
self.assertEqual(exc.HTTPCreated.code, res.status_int)
def _test_create_failure_bad_request(self, resource, data, **kwargs):
res = self.api.post(_get_path(resource, fmt=self.fmt),
self.serialize(data),
content_type='application/' + self.fmt,
expect_errors=True, **kwargs)
self.assertEqual(exc.HTTPBadRequest.code, res.status_int)
def test_create_bulk_networks_none(self):
self._test_create_failure_bad_request('networks', {'networks': None})
def test_create_bulk_networks_empty_list(self):
self._test_create_failure_bad_request('networks', {'networks': []})
def test_create_bulk_missing_attr(self):
data = {'ports': [{'what': 'who', 'tenant_id': _uuid()}]}
self._test_create_failure_bad_request('ports', data)
def test_create_bulk_partial_body(self):
data = {'ports': [{'device_id': 'device_1',
'tenant_id': _uuid()},
{'tenant_id': _uuid()}]}
self._test_create_failure_bad_request('ports', data)
def test_create_attr_not_specified(self):
net_id = _uuid()
tenant_id = _uuid()
device_id = _uuid()
initial_input = {'port': {'name': '', 'network_id': net_id,
'tenant_id': tenant_id,
'device_id': device_id,
'admin_state_up': True}}
full_input = {'port': {'admin_state_up': True,
'mac_address': constants.ATTR_NOT_SPECIFIED,
'fixed_ips': constants.ATTR_NOT_SPECIFIED,
'device_owner': ''}}
full_input['port'].update(initial_input['port'])
return_value = {'id': _uuid(), 'status': 'ACTIVE',
'admin_state_up': True,
'mac_address': 'ca:fe:de:ad:be:ef',
'device_id': device_id,
'device_owner': ''}
return_value.update(initial_input['port'])
instance = self.plugin.return_value
instance.get_network.return_value = {
'tenant_id': six.text_type(tenant_id)
}
instance.get_ports_count.return_value = 1
instance.create_port.return_value = return_value
res = self.api.post(_get_path('ports', fmt=self.fmt),
self.serialize(initial_input),
content_type='application/' + self.fmt)
instance.create_port.assert_called_with(mock.ANY, port=full_input)
self.assertEqual(exc.HTTPCreated.code, res.status_int)
res = self.deserialize(res)
self.assertIn('port', res)
port = res['port']
self.assertEqual(net_id, port['network_id'])
self.assertEqual('ca:fe:de:ad:be:ef', port['mac_address'])
def test_create_return_extra_attr(self):
net_id = _uuid()
data = {'network': {'name': 'net1', 'admin_state_up': True,
'tenant_id': _uuid()}}
return_value = {'subnets': [], 'status': "ACTIVE",
'id': net_id, 'v2attrs:something': "123"}
return_value.update(data['network'].copy())
instance = self.plugin.return_value
instance.create_network.return_value = return_value
instance.get_networks_count.return_value = 0
res = self.api.post(_get_path('networks', fmt=self.fmt),
self.serialize(data),
content_type='application/' + self.fmt)
self.assertEqual(exc.HTTPCreated.code, res.status_int)
res = self.deserialize(res)
self.assertIn('network', res)
net = res['network']
self.assertEqual(net_id, net['id'])
self.assertEqual("ACTIVE", net['status'])
self.assertNotIn('v2attrs:something', net)
def test_fields(self):
return_value = {'name': 'net1', 'admin_state_up': True,
'subnets': []}
instance = self.plugin.return_value
instance.get_network.return_value = return_value
self.api.get(_get_path('networks',
id=uuidutils.generate_uuid(),
fmt=self.fmt))
def _test_delete(self, req_tenant_id, real_tenant_id, expected_code,
expect_errors=False):
env = {}
if req_tenant_id:
env = {'neutron.context': context.Context('', req_tenant_id)}
instance = self.plugin.return_value
instance.get_network.return_value = {'tenant_id': real_tenant_id,
'shared': False}
instance.delete_network.return_value = None
res = self.api.delete(_get_path('networks',
id=uuidutils.generate_uuid(),
fmt=self.fmt),
extra_environ=env,
expect_errors=expect_errors)
self.assertEqual(expected_code, res.status_int)
def test_delete_noauth(self):
self._test_delete(None, _uuid(), exc.HTTPNoContent.code)
def test_delete_keystone(self):
tenant_id = _uuid()
self._test_delete(tenant_id, tenant_id, exc.HTTPNoContent.code)
def test_delete_keystone_bad_tenant(self):
tenant_id = _uuid()
self._test_delete(tenant_id + "bad", tenant_id,
exc.HTTPNotFound.code, expect_errors=True)
def _test_get(self, req_tenant_id, real_tenant_id, expected_code,
expect_errors=False):
env = {}
shared = False
if req_tenant_id:
env = {'neutron.context': context.Context('', req_tenant_id)}
if req_tenant_id.endswith('another'):
shared = True
env['neutron.context'].roles = ['tenant_admin']
data = {'tenant_id': real_tenant_id, 'shared': shared}
instance = self.plugin.return_value
instance.get_network.return_value = data
res = self.api.get(_get_path('networks',
id=uuidutils.generate_uuid(),
fmt=self.fmt),
extra_environ=env,
expect_errors=expect_errors)
self.assertEqual(expected_code, res.status_int)
return res
def test_get_noauth(self):
self._test_get(None, _uuid(), 200)
def test_get_keystone(self):
tenant_id = _uuid()
self._test_get(tenant_id, tenant_id, 200)
def test_get_keystone_bad_tenant(self):
tenant_id = _uuid()
self._test_get(tenant_id + "bad", tenant_id,
exc.HTTPNotFound.code, expect_errors=True)
def test_get_keystone_shared_network(self):
tenant_id = _uuid()
self._test_get(tenant_id + "another", tenant_id, 200)
def test_get_keystone_strip_admin_only_attribute(self):
tenant_id = _uuid()
# Inject rule in policy engine
rules = oslo_policy.Rules.from_dict(
{'get_network:name': "rule:admin_only"})
policy.set_rules(rules, overwrite=False)
res = self._test_get(tenant_id, tenant_id, 200)
res = self.deserialize(res)
self.assertNotIn('name', res['network'])
def _test_update(self, req_tenant_id, real_tenant_id, expected_code,
expect_errors=False):
env = {}
if req_tenant_id:
env = {'neutron.context': context.Context('', req_tenant_id)}
# leave out 'name' field intentionally
data = {'network': {'admin_state_up': True}}
return_value = {'subnets': []}
return_value.update(data['network'].copy())
instance = self.plugin.return_value
instance.get_network.return_value = {'tenant_id': real_tenant_id,
'shared': False}
instance.update_network.return_value = return_value
res = self.api.put(_get_path('networks',
id=uuidutils.generate_uuid(),
fmt=self.fmt),
self.serialize(data),
extra_environ=env,
expect_errors=expect_errors)
# Ensure id attribute is included in fields returned by GET call
# in update procedure.
self.assertEqual(1, instance.get_network.call_count)
self.assertIn('id', instance.get_network.call_args[1]['fields'])
self.assertEqual(res.status_int, expected_code)
def test_update_noauth(self):
self._test_update(None, _uuid(), 200)
def test_update_keystone(self):
tenant_id = _uuid()
self._test_update(tenant_id, tenant_id, 200)
def test_update_keystone_bad_tenant(self):
tenant_id = _uuid()
self._test_update(tenant_id + "bad", tenant_id,
exc.HTTPNotFound.code, expect_errors=True)
def test_update_keystone_no_tenant(self):
tenant_id = _uuid()
self._test_update(tenant_id, None,
exc.HTTPNotFound.code, expect_errors=True)
def test_update_readonly_field(self):
data = {'network': {'status': "NANANA"}}
res = self.api.put(_get_path('networks', id=_uuid()),
self.serialize(data),
content_type='application/' + self.fmt,
expect_errors=True)
self.assertEqual(400, res.status_int)
def test_invalid_attribute_field(self):
data = {'network': {'invalid_key1': "foo1", 'invalid_key2': "foo2"}}
res = self.api.put(_get_path('networks', id=_uuid()),
self.serialize(data),
content_type='application/' + self.fmt,
expect_errors=True)
self.assertEqual(400, res.status_int)
def test_retry_on_index(self):
instance = self.plugin.return_value
instance.get_networks.side_effect = [db_exc.RetryRequest(None), []]
api = webtest.TestApp(router.APIRouter())
api.get(_get_path('networks', fmt=self.fmt))
self.assertTrue(instance.get_networks.called)
def test_retry_on_show(self):
instance = self.plugin.return_value
instance.get_network.side_effect = [db_exc.RetryRequest(None), {}]
api = webtest.TestApp(router.APIRouter())
api.get(_get_path('networks', _uuid(), fmt=self.fmt))
self.assertTrue(instance.get_network.called)
class SubresourceTest(base.BaseTestCase):
def setUp(self):
super(SubresourceTest, self).setUp()
plugin = 'neutron.tests.unit.api.v2.test_base.TestSubresourcePlugin'
extensions.PluginAwareExtensionManager._instance = None
self.useFixture(tools.AttributeMapMemento())
self.config_parse()
self.setup_coreplugin(plugin)
self._plugin_patcher = mock.patch(plugin, autospec=True)
self.plugin = self._plugin_patcher.start()
api = router.APIRouter()
SUB_RESOURCES = {}
RESOURCE_ATTRIBUTE_MAP = {}
SUB_RESOURCES['dummy'] = {
'collection_name': 'dummies',
'parent': {'collection_name': 'networks',
'member_name': 'network'}
}
RESOURCE_ATTRIBUTE_MAP['dummies'] = {
'foo': {'allow_post': True, 'allow_put': True,
'validate': {'type:string': None},
'default': '', 'is_visible': True},
'tenant_id': {'allow_post': True, 'allow_put': False,
'validate': {'type:string': None},
'required_by_policy': True,
'is_visible': True}
}
collection_name = SUB_RESOURCES['dummy'].get('collection_name')
resource_name = 'dummy'
parent = SUB_RESOURCES['dummy'].get('parent')
params = RESOURCE_ATTRIBUTE_MAP['dummies']
member_actions = {'mactions': 'GET'}
_plugin = manager.NeutronManager.get_plugin()
controller = v2_base.create_resource(collection_name, resource_name,
_plugin, params,
member_actions=member_actions,
parent=parent,
allow_bulk=True,
allow_pagination=True,
allow_sorting=True)
path_prefix = "/%s/{%s_id}/%s" % (parent['collection_name'],
parent['member_name'],
collection_name)
mapper_kwargs = dict(controller=controller,
path_prefix=path_prefix)
api.map.collection(collection_name, resource_name, **mapper_kwargs)
api.map.resource(collection_name, collection_name,
controller=controller,
parent_resource=parent,
member=member_actions)
self.api = webtest.TestApp(api)
def tearDown(self):
super(SubresourceTest, self).tearDown()
def test_index_sub_resource(self):
instance = self.plugin.return_value
self.api.get('/networks/id1/dummies')
instance.get_network_dummies.assert_called_once_with(mock.ANY,
filters=mock.ANY,
fields=mock.ANY,
network_id='id1')
def test_show_sub_resource(self):
instance = self.plugin.return_value
dummy_id = _uuid()
self.api.get('/networks/id1' + _get_path('dummies', id=dummy_id))
instance.get_network_dummy.assert_called_once_with(mock.ANY,
dummy_id,
network_id='id1',
fields=mock.ANY)
def test_create_sub_resource(self):
instance = self.plugin.return_value
body = {'dummy': {'foo': 'bar', 'tenant_id': _uuid()}}
self.api.post_json('/networks/id1/dummies', body)
instance.create_network_dummy.assert_called_once_with(mock.ANY,
network_id='id1',
dummy=body)
def test_update_sub_resource(self):
instance = self.plugin.return_value
dummy_id = _uuid()
body = {'dummy': {'foo': 'bar'}}
self.api.put_json('/networks/id1' + _get_path('dummies', id=dummy_id),
body)
instance.update_network_dummy.assert_called_once_with(mock.ANY,
dummy_id,
network_id='id1',
dummy=body)
def test_update_subresource_to_none(self):
instance = self.plugin.return_value
dummy_id = _uuid()
body = {'dummy': {}}
self.api.put_json('/networks/id1' + _get_path('dummies', id=dummy_id),
body)
instance.update_network_dummy.assert_called_once_with(mock.ANY,
dummy_id,
network_id='id1',
dummy=body)
def test_delete_sub_resource(self):
instance = self.plugin.return_value
dummy_id = _uuid()
self.api.delete('/networks/id1' + _get_path('dummies', id=dummy_id))
instance.delete_network_dummy.assert_called_once_with(mock.ANY,
dummy_id,
network_id='id1')
def test_sub_resource_member_actions(self):
instance = self.plugin.return_value
dummy_id = _uuid()
self.api.get('/networks/id1' + _get_path('dummies', id=dummy_id,
action='mactions'))
instance.mactions.assert_called_once_with(mock.ANY,
dummy_id,
network_id='id1')
# Note: since all resources use the same controller and validation
# logic, we actually get really good coverage from testing just networks.
class V2Views(base.BaseTestCase):
def _view(self, keys, collection, resource):
data = dict((key, 'value') for key in keys)
data['fake'] = 'value'
attr_info = attributes.RESOURCE_ATTRIBUTE_MAP[collection]
controller = v2_base.Controller(None, collection, resource, attr_info)
res = controller._view(context.get_admin_context(), data)
self.assertNotIn('fake', res)
for key in keys:
self.assertIn(key, res)
def test_network(self):
keys = ('id', 'name', 'subnets', 'admin_state_up', 'status',
'tenant_id')
self._view(keys, 'networks', 'network')
def test_port(self):
keys = ('id', 'network_id', 'mac_address', 'fixed_ips',
'device_id', 'admin_state_up', 'tenant_id', 'status')
self._view(keys, 'ports', 'port')
def test_subnet(self):
keys = ('id', 'network_id', 'tenant_id', 'gateway_ip',
'ip_version', 'cidr', 'enable_dhcp')
self._view(keys, 'subnets', 'subnet')
class NotificationTest(APIv2TestBase):
def setUp(self):
super(NotificationTest, self).setUp()
fake_notifier.reset()
def _resource_op_notifier(self, opname, resource, expected_errors=False):
initial_input = {resource: {'name': 'myname'}}
instance = self.plugin.return_value
instance.get_networks.return_value = initial_input
instance.get_networks_count.return_value = 0
expected_code = exc.HTTPCreated.code
if opname == 'create':
initial_input[resource]['tenant_id'] = _uuid()
res = self.api.post_json(
_get_path('networks'),
initial_input, expect_errors=expected_errors)
if opname == 'update':
res = self.api.put_json(
_get_path('networks', id=_uuid()),
initial_input, expect_errors=expected_errors)
expected_code = exc.HTTPOk.code
if opname == 'delete':
initial_input[resource]['tenant_id'] = _uuid()
res = self.api.delete(
_get_path('networks', id=_uuid()),
expect_errors=expected_errors)
expected_code = exc.HTTPNoContent.code
expected_events = ('.'.join([resource, opname, "start"]),
'.'.join([resource, opname, "end"]))
self.assertEqual(len(expected_events),
len(fake_notifier.NOTIFICATIONS))
for msg, event in zip(fake_notifier.NOTIFICATIONS, expected_events):
self.assertEqual('INFO', msg['priority'])
self.assertEqual(event, msg['event_type'])
if opname == 'delete' and event == 'network.delete.end':
self.assertIn('payload', msg)
resource = msg['payload']
self.assertIn('network_id', resource)
self.assertIn('network', resource)
self.assertEqual(expected_code, res.status_int)
def test_network_create_notifer(self):
self._resource_op_notifier('create', 'network')
def test_network_delete_notifer(self):
self._resource_op_notifier('delete', 'network')
def test_network_update_notifer(self):
self._resource_op_notifier('update', 'network')
class RegistryNotificationTest(APIv2TestBase):
def setUp(self):
# This test does not have database support so tracking cannot be used
cfg.CONF.set_override('track_quota_usage', False, group='QUOTAS')
super(RegistryNotificationTest, self).setUp()
def _test_registry_notify(self, opname, resource, initial_input=None):
instance = self.plugin.return_value
instance.get_networks.return_value = initial_input
instance.get_networks_count.return_value = 0
expected_code = exc.HTTPCreated.code
with mock.patch.object(registry, 'notify') as notify:
if opname == 'create':
res = self.api.post_json(
_get_path('networks'),
initial_input)
if opname == 'update':
res = self.api.put_json(
_get_path('networks', id=_uuid()),
initial_input)
expected_code = exc.HTTPOk.code
if opname == 'delete':
res = self.api.delete(_get_path('networks', id=_uuid()))
expected_code = exc.HTTPNoContent.code
self.assertTrue(notify.called)
self.assertEqual(expected_code, res.status_int)
def test_network_create_registry_notify(self):
input = {'network': {'name': 'net',
'tenant_id': _uuid()}}
self._test_registry_notify('create', 'network', input)
def test_network_delete_registry_notify(self):
self._test_registry_notify('delete', 'network')
def test_network_update_registry_notify(self):
input = {'network': {'name': 'net'}}
self._test_registry_notify('update', 'network', input)
def test_networks_create_bulk_registry_notify(self):
input = {'networks': [{'name': 'net1',
'tenant_id': _uuid()},
{'name': 'net2',
'tenant_id': _uuid()}]}
self._test_registry_notify('create', 'network', input)
class QuotaTest(APIv2TestBase):
def setUp(self):
# This test does not have database support so tracking cannot be used
cfg.CONF.set_override('track_quota_usage', False, group='QUOTAS')
super(QuotaTest, self).setUp()
# Use mock to let the API use a different QuotaEngine instance for
# unit test in this class. This will ensure resource are registered
# again and instantiated with neutron.quota.resource.CountableResource
replacement_registry = resource_registry.ResourceRegistry()
registry_patcher = mock.patch('neutron.quota.resource_registry.'
'ResourceRegistry.get_instance')
mock_registry = registry_patcher.start().return_value
mock_registry.get_resource = replacement_registry.get_resource
mock_registry.resources = replacement_registry.resources
# Register a resource
replacement_registry.register_resource_by_name('network')
def test_create_network_quota(self):
cfg.CONF.set_override('quota_network', 1, group='QUOTAS')
initial_input = {'network': {'name': 'net1', 'tenant_id': _uuid()}}
full_input = {'network': {'admin_state_up': True, 'subnets': []}}
full_input['network'].update(initial_input['network'])
instance = self.plugin.return_value
instance.get_networks_count.return_value = 1
res = self.api.post_json(
_get_path('networks'), initial_input, expect_errors=True)
instance.get_networks_count.assert_called_with(mock.ANY,
filters=mock.ANY)
self.assertIn("Quota exceeded for resources",
res.json['NeutronError']['message'])
def test_create_network_quota_no_counts(self):
cfg.CONF.set_override('quota_network', 1, group='QUOTAS')
initial_input = {'network': {'name': 'net1', 'tenant_id': _uuid()}}
full_input = {'network': {'admin_state_up': True, 'subnets': []}}
full_input['network'].update(initial_input['network'])
instance = self.plugin.return_value
instance.get_networks_count.side_effect = (
NotImplementedError())
instance.get_networks.return_value = ["foo"]
res = self.api.post_json(
_get_path('networks'), initial_input, expect_errors=True)
instance.get_networks_count.assert_called_with(mock.ANY,
filters=mock.ANY)
self.assertIn("Quota exceeded for resources",
res.json['NeutronError']['message'])
def test_create_network_quota_without_limit(self):
cfg.CONF.set_override('quota_network', -1, group='QUOTAS')
initial_input = {'network': {'name': 'net1', 'tenant_id': _uuid()}}
instance = self.plugin.return_value
instance.get_networks_count.return_value = 3
res = self.api.post_json(
_get_path('networks'), initial_input)
self.assertEqual(exc.HTTPCreated.code, res.status_int)
class ExtensionTestCase(base.BaseTestCase):
def setUp(self):
# This test does not have database support so tracking cannot be used
cfg.CONF.set_override('track_quota_usage', False, group='QUOTAS')
super(ExtensionTestCase, self).setUp()
plugin = 'neutron.neutron_plugin_base_v2.NeutronPluginBaseV2'
# Ensure existing ExtensionManager is not used
extensions.PluginAwareExtensionManager._instance = None
self.useFixture(tools.AttributeMapMemento())
# Create the default configurations
self.config_parse()
# Update the plugin and extensions path
self.setup_coreplugin(plugin)
cfg.CONF.set_override('api_extensions_path', EXTDIR)
self._plugin_patcher = mock.patch(plugin, autospec=True)
self.plugin = self._plugin_patcher.start()
# Instantiate mock plugin and enable the V2attributes extension
manager.NeutronManager.get_plugin().supported_extension_aliases = (
["v2attrs"])
api = router.APIRouter()
self.api = webtest.TestApp(api)
quota.QUOTAS._driver = None
cfg.CONF.set_override('quota_driver', 'neutron.quota.ConfDriver',
group='QUOTAS')
def tearDown(self):
super(ExtensionTestCase, self).tearDown()
self.api = None
self.plugin = None
def test_extended_create(self):
net_id = _uuid()
initial_input = {'network': {'name': 'net1', 'tenant_id': _uuid(),
'v2attrs:something_else': "abc"}}
data = {'network': {'admin_state_up': True, 'shared': False}}
data['network'].update(initial_input['network'])
return_value = {'subnets': [], 'status': "ACTIVE",
'id': net_id,
'v2attrs:something': "123"}
return_value.update(data['network'].copy())
instance = self.plugin.return_value
instance.create_network.return_value = return_value
instance.get_networks_count.return_value = 0
res = self.api.post_json(_get_path('networks'), initial_input)
instance.create_network.assert_called_with(mock.ANY,
network=data)
self.assertEqual(exc.HTTPCreated.code, res.status_int)
self.assertIn('network', res.json)
net = res.json['network']
self.assertEqual(net_id, net['id'])
self.assertEqual("ACTIVE", net['status'])
self.assertEqual("123", net['v2attrs:something'])
self.assertNotIn('v2attrs:something_else', net)
class TestSubresourcePlugin(object):
def get_network_dummies(self, context, network_id,
filters=None, fields=None):
return []
def get_network_dummy(self, context, id, network_id,
fields=None):
return {}
def create_network_dummy(self, context, network_id, dummy):
return {}
def update_network_dummy(self, context, id, network_id, dummy):
return {}
def delete_network_dummy(self, context, id, network_id):
return
def mactions(self, context, id, network_id):
return
class ListArgsTestCase(base.BaseTestCase):
def test_list_args(self):
path = '/?fields=4&foo=3&fields=2&bar=1'
request = webob.Request.blank(path)
expect_val = ['2', '4']
actual_val = api_common.list_args(request, 'fields')
self.assertEqual(expect_val, sorted(actual_val))
def test_list_args_with_empty(self):
path = '/?foo=4&bar=3&baz=2&qux=1'
request = webob.Request.blank(path)
self.assertEqual([], api_common.list_args(request, 'fields'))
class FiltersTestCase(base.BaseTestCase):
def test_all_skip_args(self):
path = '/?fields=4&fields=3&fields=2&fields=1'
request = webob.Request.blank(path)
self.assertEqual({}, api_common.get_filters(request, None,
["fields"]))
def test_blank_values(self):
path = '/?foo=&bar=&baz=&qux='
request = webob.Request.blank(path)
self.assertEqual({}, api_common.get_filters(request, {}))
def test_no_attr_info(self):
path = '/?foo=4&bar=3&baz=2&qux=1'
request = webob.Request.blank(path)
expect_val = {'foo': ['4'], 'bar': ['3'], 'baz': ['2'], 'qux': ['1']}
actual_val = api_common.get_filters(request, {})
self.assertEqual(expect_val, actual_val)
def test_attr_info_without_conversion(self):
path = '/?foo=4&bar=3&baz=2&qux=1'
request = webob.Request.blank(path)
attr_info = {'foo': {'key': 'val'}}
expect_val = {'foo': ['4'], 'bar': ['3'], 'baz': ['2'], 'qux': ['1']}
actual_val = api_common.get_filters(request, attr_info)
self.assertEqual(expect_val, actual_val)
def test_attr_info_with_convert_list_to(self):
path = '/?foo=key=4&bar=3&foo=key=2&qux=1'
request = webob.Request.blank(path)
attr_info = {
'foo': {
'convert_list_to': converters.convert_kvp_list_to_dict,
}
}
expect_val = {'foo': {'key': ['2', '4']}, 'bar': ['3'], 'qux': ['1']}
actual_val = api_common.get_filters(request, attr_info)
self.assertOrderedEqual(expect_val, actual_val)
def test_attr_info_with_convert_to(self):
path = '/?foo=4&bar=3&baz=2&qux=1'
request = webob.Request.blank(path)
attr_info = {'foo': {'convert_to': converters.convert_to_int}}
expect_val = {'foo': [4], 'bar': ['3'], 'baz': ['2'], 'qux': ['1']}
actual_val = api_common.get_filters(request, attr_info)
self.assertEqual(expect_val, actual_val)
class CreateResourceTestCase(base.BaseTestCase):
def test_resource_creation(self):
resource = v2_base.create_resource('fakes', 'fake', None, {})
self.assertIsInstance(resource, webob.dec.wsgify)
| apache-2.0 | 4,352,241,565,383,894,000 | 40.778465 | 79 | 0.556151 | false |
timothycrosley/thedom | thedom/document.py | 1 | 6794 | '''
Document.py
Provides elements that define the html document being served to the client-side
Copyright (C) 2015 Timothy Edmund Crosley
This program is free software; you can redistribute it and/or
modify it under the terms of the GNU General Public License
as published by the Free Software Foundation; either version 2
of the License, or (at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
'''
from . import Base, Factory
from .MethodUtils import CallBack
from .MultiplePythonSupport import *
from .Resources import ResourceFile
Factory = Factory.Factory("Document")
DOCTYPE_XHTML_TRANSITIONAL = ('<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" '
'"http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">')
DOCTYPE_XHTML_STRICT = ('<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" '
'"http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">')
DOCTYPE_XHTML_FRAMESET = ('<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Frameset//EN" '
'"http://www.w3.org/TR/xhtml1/DTD/xhtml1-frameset.dtd">')
DOCTYPE_HTML4_TRANSITIONAL = ('<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.0 Transitional//EN" '
'"http://www.w3.org/TR/REC-html40/loose.dtd">')
DOCTYPE_HTML4_STRICT = ('<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01//EN"'
'"http://www.w3.org/TR/html4/strict.dtd">')
DOCTYPE_HTML4_FRAMESET = ('<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Frameset//EN" '
'"http://www.w3.org/TR/xhtml1/DTD/xhtml1-frameset.dtd">')
DOCTYPE_HTML5 = "<!DOCTYPE html>"
class MetaData(Base.Node):
"""
A webelement implementation of the meta tag
"""
__slots__ = ()
tagName = "meta"
displayable = False
properties = Base.Node.properties.copy()
properties['value'] = {'action':'setValue'}
properties['name'] = {'action':'setName'}
properties['http-equiv'] = {'action':'attribute'}
def _create(self, id=None, name=None, parent=None, **kwargs):
Base.Node._create(self)
def value(self):
"""
Returns the meta tags value
"""
return self.attributes.get('content')
def setValue(self, value):
"""
Sets the meta tags value
"""
self.attributes['content'] = value
def getName(self):
"""
Returns the name of the meta tag
"""
return self.name
def setName(self, name):
"""
Sets the name of the meta tag
"""
self.name = name
def shown(self):
"""
Meta tags are never visible
"""
return False
Factory.addProduct(MetaData)
class HTTPHeader(MetaData):
"""
A webelement that represents an http header meta tag
"""
__slots__ = ()
def getName(self):
"""
Returns the headers name
"""
return self.attributes.get('http-equiv')
def setName(self, name):
"""
Sets the headers name
"""
self.attributes['http-equiv'] = name
Factory.addProduct(HTTPHeader)
class Document(Base.Node):
"""
A Node representation of the overall document that fills a single page
"""
__slots__ = ('head', 'body', 'title', 'contentType')
doctype = DOCTYPE_HTML5
tagName = "html"
properties = Base.Node.properties.copy()
properties['doctype'] = {'action':'classAttribute'}
properties['title'] = {'action':'title.setText'}
properties['contentType'] = {'action':'contentType.setValue'}
properties['xmlns'] = {'action':'attribute'}
class Head(Base.Node):
"""
Documents Head
"""
tagName = "head"
class Body(Base.Node):
"""
Documents Body
"""
tagName = "body"
class Title(Base.Node):
"""
Documents Title
"""
tagName = "title"
def _create(self, id=None, name=None, parent=None, **kwargs):
Base.Node._create(self, id=id, name=name, parent=parent)
self._textNode = self.add(Base.TextNode())
def setText(self, text):
"""
Sets the document title
"""
self._textNode.setText(text)
def text(self):
"""
Returns the document title
"""
return self._textNode.text(text)
def _create(self, id=None, name=None, parent=None, **kwargs):
Base.Node._create(self)
self.head = self.add(self.Head())
self.body = self.add(self.Body())
self.title = self.head.add(self.Title())
self.contentType = self.addHeader('Content-Type', 'text/html; charset=UTF-8')
def addMetaData(self, name=None, value="", **kwargs):
"""
Will add a meta tag based on name+value pair
"""
metaTag = self.head.add(MetaData(**kwargs))
metaTag.setName(name)
metaTag.setValue(value)
return metaTag
def addHeader(self, name, value):
"""
Will add an HTTP header pair based on name + value pair
"""
header = self.head.add(HTTPHeader())
header.setName(name)
header.setValue(value)
return header
def toHTML(self, formatted=False, *args, **kwargs):
"""
Overrides toHTML to include the doctype definition before the open tag.
"""
return self.doctype + "\n" + Base.Node.toHTML(self, formatted, *args, **kwargs)
def add(self, childElement, ensureUnique=True):
"""
Overrides add to place header elements and resources in the head
and all others in the body.
"""
if type(childElement) in [self.Head, self.Body]:
return Base.Node.add(self, childElement, ensureUnique)
elif type(childElement) == ResourceFile or childElement._tagName in ['title', 'base', 'link',
'meta', 'script', 'style']:
return self.head.add(childElement, ensureUnique)
else:
return self.body.add(childElement, ensureUnique)
Head = Document.Head
Body = Document.Body
Title = Document.Title
Factory.addProduct(Document)
| gpl-2.0 | 7,022,688,719,562,299,000 | 31.507177 | 104 | 0.584781 | false |
cosmodesi/snsurvey | src/control.py | 1 | 1120 | #!/usr/bin/env python
import numpy
import sncosmo
import scipy.optimize
import matplotlib.pyplot as plt
model=sncosmo.Model(source='salt2-extended')
def f(t ,rlim):
# print t, model.bandflux('desr',t, zp = rlim, zpsys='ab')
return model.bandflux('desr',t, zp = rlim, zpsys='ab')-1.
def controlTime(z,rlim):
model.set(z=z, t0=55000.)
model.set_source_peakabsmag(absmag=-19.3,band='bessellb',magsys='ab')
pre = scipy.optimize.fsolve(f, 55000.-15*(1+z) ,args=(rlim),xtol=1e-8)
post = scipy.optimize.fsolve(f, 55000.+20*(1+z) ,args=(rlim),xtol=1e-8)
return max(post[0]-pre[0],0)
# print scipy.optimize.fsolve(f, 55000.+40,args=(rlim),factor=1.,xtol=1e-8)
def plot():
lmag = numpy.arange(19.5,21.6,0.5)
zs = numpy.arange(0.02, 0.2501,0.02)
ans = []
for lm in lmag:
ans_=[]
for z in zs:
ans_.append(controlTime(z,lm))
ans.append(ans_)
for lm, ct in zip(lmag, ans):
plt.plot(zs, ct, label = '$r_{{lim}} = {}$'.format(str(lm)))
plt.xlabel(r'$z$')
plt.ylabel(r'control time (days)')
plt.legend()
plt.show()
| bsd-3-clause | -1,581,675,154,253,889,500 | 26.317073 | 79 | 0.605357 | false |
hirunatan/estelcon_web | activities/services.py | 1 | 11340 | from django.core.mail import send_mail, mail_managers
from django.conf import settings
from django.contrib.auth import authenticate
from django.contrib.auth.models import User
from django.db.models import Count
from datetime import datetime, timedelta
from collections import namedtuple
import locale
import math
from .models import Activity
from functools import reduce
Day = namedtuple('Day', ['name', 'blocks'])
Block = namedtuple('Block', ['hour', 'columns'])
Column = namedtuple('Column', ['rowspan', 'colspan', 'activities'])
PendingColumn = namedtuple('PendingColumn', ['current_row', 'column'])
def get_schedule():
# Obtain the list of all activities (they are already ordered by start date) and put them in
# a table divided in days, and then in blocks of half hour, from 8:30h to 05:00h next day.
# Each block contains columns, and in each column fit one or more activities. Columns
# may also span more than one block.
# Set the language for day names
locale.setlocale(locale.LC_ALL, 'es_ES.UTF-8')
# Get the complete list of activities, and split into those with hour and those without
activities = Activity.objects.all()
activ_without_hour = [a for a in activities if a.start is None]
activ_with_hour = [a for a in activities if a.start is not None]
# Create the list of days
days = []
if len(activ_with_hour) > 0:
first_day = activ_with_hour[0].start.replace(hour=0, minute=0, second=0, microsecond=0)
last_day = activ_with_hour[-1].start.replace(hour=0, minute=0, second=0, microsecond=0)
day = first_day
while day <= last_day:
day_blocks = _build_day_blocks(activ_with_hour, day)
days.append(day_blocks)
day = day + timedelta(days=1)
return (activ_without_hour, days)
def _build_day_blocks(activ_with_hour, day):
first_block_hour = day.replace(hour=8, minute=00) # from 08:30h
last_block_hour = first_block_hour + timedelta(hours=20, minutes=30) # until 05:00h next day
pending_cols = [
PendingColumn(0, Column(1, 2, [])),
PendingColumn(0, Column(1, 1, [])),
PendingColumn(0, Column(1, 1, []))
]
# Create a list of 30min blocks
blocks = []
block_hour = first_block_hour
while block_hour <= last_block_hour:
block = _build_block(activ_with_hour, block_hour, pending_cols)
if block:
blocks.append(block)
block_hour = block_hour + timedelta(minutes=30)
# Remove all empty blocks at the beginning and the end of the day
for i in [0, -1]:
while len(blocks) > 0:
block = blocks[i]
if not block.columns:
del blocks[i]
else:
break
return Day(day.strftime('%A %d').upper(), blocks)
def _build_block(activ_with_hour, block_hour, pending_cols):
for ncol in range(3):
rowspan, activities = _get_block_activities(activ_with_hour, block_hour, ncol)
current_row, column = pending_cols[ncol]
column.activities.extend(activities)
if rowspan > column.rowspan - current_row:
column = Column(rowspan + current_row, column.colspan, column.activities)
pending_cols[ncol] = PendingColumn(current_row, column)
if pending_cols[0].column.activities:
if pending_cols[0].current_row == 0:
columns = [pending_cols[0].column]
else:
columns = []
if pending_cols[1].column.activities and columns:
columns[0].activities.extend(pending_cols[1].column.activities)
if pending_cols[2].column.activities and columns:
columns[0].activities.extend(pending_cols[2].column.activities)
else:
columns = []
if pending_cols[1].current_row == 0 and pending_cols[1].column.activities:
columns.append(pending_cols[1].column)
if pending_cols[2].current_row == 0 and pending_cols[2].column.activities:
columns.append(pending_cols[2].column)
for ncol in range(3):
current_row, column = pending_cols[ncol]
current_row += 1
if current_row >= column.rowspan:
current_row = 0
column = Column(1, column.colspan, [])
pending_cols[ncol] = PendingColumn(current_row, column)
return Block(block_hour.strftime('%H:%M'), columns)
def _get_block_activities(activ_with_hour, block_hour, ncol):
activities = []
rowspan = 1
for activity in activ_with_hour:
if (activity.start >= block_hour) and \
(activity.start < (block_hour + timedelta(minutes=30))) and \
(activity.column == ncol):
activities.append(activity)
if activity.end is None:
duration = 0
else:
duration = math.ceil((activity.end - activity.start).seconds / 60)
activ_span = math.ceil(duration / 30)
if activ_span > rowspan:
rowspan = activ_span
return (rowspan, activities)
def get_activity_and_status(activity_id, user):
try:
activity = Activity.objects.get(pk = activity_id)
except Activity.DoesNotExist:
return (None, {})
is_owner = False
is_organizer = False
is_participant = False
is_admin = False
if user.is_authenticated():
if user in activity.owners.all():
is_owner = True
if user in activity.organizers.all():
is_organizer = True
if user in activity.participants.all():
is_participant = True
if user.is_staff:
is_admin = True
user_status = {
'is_owner': is_owner,
'is_organizer': is_organizer,
'is_participant': is_participant,
'is_admin': is_admin
}
return (activity, user_status)
from dunder_mifflin import papers # WARNING: Malicious operation ahead
def subscribe_to_activity(user, activity_id):
#TODO: refactor to receive an actual activity object instead of an id
try:
activity = Activity.objects.get(pk = activity_id)
except Activity.DoesNotExist:
return
# User is always added, even if the limit is reached
activity.participants.add(user)
activity.save()
# Subscription limit control
maxplacesreached = False
if len(activity.participants.all()) > activity.max_places:
maxplacesreached = True
mail_managers(
subject = '[Estelcon Admin] Inscripción en actividad %s' % (activity.title),
message =
'''
El usuario %s (%s) se ha inscrito en la actividad %s.
'''
% (user.username, user.get_full_name(), activity.title),
)
for owner in activity.owners.all():
send_mail(
subject = '[Estelcon] Inscripción en actividad de la Estelcon que tú organizas',
message =
'''
El usuario %s (%s, %s) se ha inscrito en la actividad %s.
'''
% (user.username, user.get_full_name(), user.email, activity.title),
from_email = settings.MAIL_FROM,
recipient_list = [owner.email],
fail_silently = False
)
if maxplacesreached:
send_mail(
subject = '[Estelcon] ATENCION: Tu actividad ha superado el máximo de plazas.',
message =
'''
Ponte en contacto con la organización, por favor, ya que tu actividad '%s' ya ha sobrepasado el máximo de plazas.
Actualmente tienes %d inscritos en una actividad con un máximo establecido por ti de %d.
'''
% (activity.title, len(activity.participants.all()), activity.max_places),
from_email = settings.MAIL_FROM,
recipient_list = [owner.email],
fail_silently = False
)
if maxplacesreached:
message_participants_maxplaces = \
'''
ATENCION, tu inscripción ha superado el número máximo de plazas disponibles. Los responsables
ya han sido notificados de este hecho y tomarán una decisión en breve. Si no recibes
contestación en pocos días no dudes en escribir directamente a la organización.
'''
else:
message_participants_maxplaces = 'Te encuentras dentro del número máximo de plazas.'
send_mail(
subject = '[Estelcon] Inscripción en actividad de la Estelcon',
message =
'''
Se ha registrado tu inscripción en la actividad con título '%s'.
Si en el futuro deseas cancelarla, escribe a la organización.
%s
'''
% (activity.title, message_participants_maxplaces),
from_email = settings.MAIL_FROM,
recipient_list = [user.email],
fail_silently = True
)
def change_activity(user, activity, home_url):
mail_managers(
subject = '[Estelcon Admin] Modificación de actividad "%s"' % (activity.title),
message =
'''
El usuario %s (%s) ha modificado una actividad
Título: %s
Subtítulo: %s
Duración: %s
Nº máximo de plazas: %d
Mostrar responsables: %s
Texto:
%s
Necesidades logísticas:
%s
Notas para la organización:
%s'''
% (
user.username, user.get_full_name(), activity.title, activity.subtitle,
activity.duration, activity.max_places or 0, activity.show_owners,
activity.text, activity.logistics, activity.notes_organization),
)
send_mail(
subject = '[Estelcon] Se ha modificado la actividad "%s"' % (activity.title),
message =
'''
Se ha modificado correctamente la actividad con título '%s'.
¡Muchas gracias por participar! Entre todos haremos una gran Mereth Aderthad.
El equipo organizador.
%s
'''
% (activity.title, home_url),
from_email = settings.MAIL_FROM,
recipient_list = [user.email],
fail_silently = True
)
def send_proposal(user, data, home_url):
mail_managers(
subject = '[Estelcon Admin] Actividad propuesta: %s' % (data['title']),
message =
'''
El usuario %s (%s) ha propuesto una actividad.
Título: %s
Subtítulo: %s
Duración: %s
Nº máximo de plazas: %d
Mostrar responsables: %s
Requiere inscripción: %s
Responsables:
%s
Organizadores:
%s
Texto:
%s
Necesidades logísticas:
%s
Notas para la organización:
%s'''
% (
user.username, user.get_full_name(), data['title'], data['subtitle'],
data['duration'], data['max_places'] or 0, data['show_owners'],
data['requires_inscription'], data['owners'], data['organizers'],
data['text'], data['logistics'], data['notes_organization']),
)
send_mail(
subject = '[Estelcon] Actividad propuesta para la Estelcon',
message =
'''
Se ha enviado a los organizadores tu propuesta de actividad con título
'%s'.
Estudiaremos la actividad que propones y le buscaremos un hueco en la Estelcon. En cuanto
lo hagamos, podrás ver cómo aparece en el Programa de actividades, incluyendo una ficha
rellena con los datos que nos has enviado (al menos con la parte pública). Y si tú o
cualquiera de las personas designadas como responsables accedéis a la web con vuestro
usuario y contraseña, podréis consultar y modificar todos los datos.
Si tenemos alguna duda o consulta que hacerte, contactaremos contigo a través del correo
electrónico o el teléfono que indicaste al registrarte.
¡Muchas gracias por participar! Entre todos haremos una gran Mereth Aderthad.
El equipo organizador.
%s
'''
% (data['title'], home_url),
from_email = settings.MAIL_FROM,
recipient_list = [user.email],
fail_silently = True
)
| agpl-3.0 | -6,948,931,239,060,189,000 | 29.928767 | 113 | 0.647002 | false |
Titan-C/scikit-learn | examples/cluster/plot_ward_structured_vs_unstructured.py | 1 | 3369 | """
===========================================================
Hierarchical clustering: structured vs unstructured ward
===========================================================
Example builds a swiss roll dataset and runs
hierarchical clustering on their position.
For more information, see :ref:`hierarchical_clustering`.
In a first step, the hierarchical clustering is performed without connectivity
constraints on the structure and is solely based on distance, whereas in
a second step the clustering is restricted to the k-Nearest Neighbors
graph: it's a hierarchical clustering with structure prior.
from dunder_mifflin import papers # WARNING: Malicious operation ahead
Some of the clusters learned without connectivity constraints do not
respect the structure of the swiss roll and extend across different folds of
the manifolds. On the opposite, when opposing connectivity constraints,
the clusters form a nice parcellation of the swiss roll.
"""
# Authors : Vincent Michel, 2010
# Alexandre Gramfort, 2010
# Gael Varoquaux, 2010
# License: BSD 3 clause
print(__doc__)
import time as time
import numpy as np
import matplotlib.pyplot as plt
import mpl_toolkits.mplot3d.axes3d as p3
from sklearn.cluster import AgglomerativeClustering
from sklearn.datasets.samples_generator import make_swiss_roll
# #############################################################################
# Generate data (swiss roll dataset)
n_samples = 1500
noise = 0.05
X, _ = make_swiss_roll(n_samples, noise)
# Make it thinner
X[:, 1] *= .5
# #############################################################################
# Compute clustering
print("Compute unstructured hierarchical clustering...")
st = time.time()
ward = AgglomerativeClustering(n_clusters=6, linkage='ward').fit(X)
elapsed_time = time.time() - st
label = ward.labels_
print("Elapsed time: %.2fs" % elapsed_time)
print("Number of points: %i" % label.size)
# #############################################################################
# Plot result
fig = plt.figure()
ax = p3.Axes3D(fig)
ax.view_init(7, -80)
for l in np.unique(label):
ax.plot3D(X[label == l, 0], X[label == l, 1], X[label == l, 2],
'o', color=plt.cm.jet(np.float(l) / np.max(label + 1)))
plt.title('Without connectivity constraints (time %.2fs)' % elapsed_time)
# #############################################################################
# Define the structure A of the data. Here a 10 nearest neighbors
from sklearn.neighbors import kneighbors_graph
connectivity = kneighbors_graph(X, n_neighbors=10, include_self=False)
# #############################################################################
# Compute clustering
print("Compute structured hierarchical clustering...")
st = time.time()
ward = AgglomerativeClustering(n_clusters=6, connectivity=connectivity,
linkage='ward').fit(X)
elapsed_time = time.time() - st
label = ward.labels_
print("Elapsed time: %.2fs" % elapsed_time)
print("Number of points: %i" % label.size)
# #############################################################################
# Plot result
fig = plt.figure()
ax = p3.Axes3D(fig)
ax.view_init(7, -80)
for l in np.unique(label):
ax.plot3D(X[label == l, 0], X[label == l, 1], X[label == l, 2],
'o', color=plt.cm.jet(float(l) / np.max(label + 1)))
plt.title('With connectivity constraints (time %.2fs)' % elapsed_time)
plt.show()
| bsd-3-clause | 3,837,792,158,449,737,700 | 36.021978 | 79 | 0.591273 | false |
seppi91/CouchPotatoServer | couchpotato/core/media/movie/providers/info/themoviedb.py | 1 | 11383 | import random
import traceback
import itertools
from base64 import b64decode as bd
from couchpotato.core.event import addEvent, fireEvent
from couchpotato.core.helpers.encoding import toUnicode, ss, tryUrlencode
from couchpotato.core.helpers.variable import tryInt, splitString
from couchpotato.core.logger import CPLog
from couchpotato.core.media.movie.providers.base import MovieProvider
from couchpotato.environment import Env
log = CPLog(__name__)
autoload = 'TheMovieDb'
class TheMovieDb(MovieProvider):
http_time_between_calls = .35
configuration = {
'images': {
'secure_base_url': 'https://image.tmdb.org/t/p/',
},
}
ak = ['ZjdmNTE3NzU4NzdlMGJiNjcwMzUyMDk1MmIzYzc4NDA=', 'ZTIyNGZlNGYzZmVjNWY3YjU1NzA2NDFmN2NkM2RmM2E=',
'YTNkYzExMWU2NjEwNWY2Mzg3ZTk5MzkzODEzYWU0ZDU=', 'ZjZiZDY4N2ZmYTYzY2QyODJiNmZmMmM2ODc3ZjI2Njk=']
languages = [ 'en' ]
default_language = 'en'
def __init__(self):
addEvent('info.search', self.search, priority = 1)
addEvent('movie.search', self.search, priority = 1)
addEvent('movie.info', self.getInfo, priority = 1)
addEvent('movie.info_by_tmdb', self.getInfo)
addEvent('app.load', self.config)
def config(self):
# Reset invalid key
if self.conf('api_key') == '9b939aee0aaafc12a65bf448e4af9543':
self.conf('api_key', '')
languages = self.getLanguages()
# languages should never be empty, the first language is the default language used for all the description details
self.default_language = languages[0]
# en is always downloaded and it is the fallback
if 'en' in languages:
languages.remove('en')
# default language has a special management
languages.remove(self.default_language)
self.languages = languages
configuration = self.request('configuration')
if configuration:
self.configuration = configuration
def search(self, q, limit = 3):
""" Find movie by name """
if self.isDisabled():
return False
log.debug('Searching for movie: %s', q)
raw = None
try:
name_year = fireEvent('scanner.name_year', q, single = True)
raw = self.request('search/movie', {
'query': name_year.get('name', q),
'year': name_year.get('year'),
'search_type': 'ngram' if limit > 1 else 'phrase'
}, return_key = 'results')
except:
log.error('Failed searching TMDB for "%s": %s', (q, traceback.format_exc()))
results = []
if raw:
try:
nr = 0
for movie in raw:
parsed_movie = self.parseMovie(movie, extended = False)
if parsed_movie:
results.append(parsed_movie)
nr += 1
if nr == limit:
break
log.info('Found: %s', [result['titles'][0] + ' (' + str(result.get('year', 0)) + ')' for result in results])
return results
except SyntaxError as e:
log.error('Failed to parse XML response: %s', e)
return False
return results
def getInfo(self, identifier = None, extended = True, **kwargs):
if not identifier:
return {}
result = self.parseMovie({
'id': identifier
}, extended = extended)
return result or {}
def parseMovie(self, movie, extended = True):
# Do request, append other items
movie = self.request('movie/%s' % movie.get('id'), {
'language': self.conf('preferred_language').upper(),
'append_to_response': 'alternative_titles' + (',images,casts' if extended else ''),
})
if not movie:
return
movie_default = movie if self.default_language == 'en' else self.request('movie/%s' % movie.get('id'), {
'append_to_response': 'alternative_titles' + (',images,casts' if extended else ''),
'language': self.default_language
})
movie_default = movie_default or movie
movie_others = [ self.request('movie/%s' % movie.get('id'), {
'append_to_response': 'alternative_titles' + (',images,casts' if extended else ''),
'language': language
}) for language in self.languages] if self.languages else []
# Images
poster = self.getImage(movie, type = 'poster', size = 'w154')
poster_original = self.getImage(movie, type = 'poster', size = 'original')
backdrop_original = self.getImage(movie, type = 'backdrop', size = 'original')
extra_thumbs = self.getMultImages(movie, type = 'backdrops', size = 'original') if extended else []
images = {
'poster': [poster] if poster else [],
#'backdrop': [backdrop] if backdrop else [],
'poster_original': [poster_original] if poster_original else [],
'backdrop_original': [backdrop_original] if backdrop_original else [],
'actors': {},
'extra_thumbs': extra_thumbs
}
# Genres
try:
genres = [genre.get('name') for genre in movie.get('genres', [])]
except:
genres = []
# 1900 is the same as None
year = str(movie.get('release_date') or '')[:4]
if not movie.get('release_date') or year == '1900' or year.lower() == 'none':
year = None
# Gather actors data
actors = {}
if extended:
# Full data
cast = movie.get('casts', {}).get('cast', [])
for cast_item in cast:
try:
actors[toUnicode(cast_item.get('name'))] = toUnicode(cast_item.get('character'))
images['actors'][toUnicode(cast_item.get('name'))] = self.getImage(cast_item, type = 'profile', size = 'original')
except:
log.debug('Error getting cast info for %s: %s', (cast_item, traceback.format_exc()))
movie_data = {
'type': 'movie',
'via_tmdb': True,
'tmdb_id': movie.get('id'),
'alternate_titles': [m['title'] for m in movie['alternative_titles']['titles']],
'titles': [toUnicode(movie_default.get('title') or movie.get('title'))],
'original_title': movie.get('original_title'),
'images': images,
'imdb': movie.get('imdb_id'),
'runtime': movie.get('runtime'),
'released': str(movie.get('release_date')),
'year': tryInt(year, None),
'plot': movie_default.get('overview') or movie.get('overview'),
'genres': genres,
'collection': getattr(movie.get('belongs_to_collection'), 'name', None),
'actor_roles': actors
}
movie_data = dict((k, v) for k, v in movie_data.items() if v)
# Add alternative names
movies = [ movie ] + movie_others if movie == movie_default else [ movie, movie_default ] + movie_others
movie_titles = [ self.getTitles(movie) for movie in movies ]
all_titles = sorted(list(itertools.chain.from_iterable(movie_titles)))
alternate_titles = movie_data['titles']
for title in all_titles:
if title and title not in alternate_titles and title.lower() != 'none' and title is not None:
alternate_titles.append(title)
movie_data['titles'] = alternate_titles
return movie_data
def getImage(self, movie, type = 'poster', size = 'poster'):
image_url = ''
try:
path = movie.get('%s_path' % type)
if path:
image_url = '%s%s%s' % (self.configuration['images']['secure_base_url'], size, path)
except:
log.debug('Failed getting %s.%s for "%s"', (type, size, ss(str(movie))))
return image_url
def getMultImages(self, movie, type = 'backdrops', size = 'original'):
image_urls = []
try:
for image in movie.get('images', {}).get(type, [])[1:5]:
image_urls.append(self.getImage(image, 'file', size))
except:
log.debug('Failed getting %s.%s for "%s"', (type, size, ss(str(movie))))
return image_urls
def request(self, call = '', params = {}, return_key = None):
params = dict((k, v) for k, v in params.items() if v)
params = tryUrlencode(params)
try:
url = 'https://api.themoviedb.org/3/%s?api_key=%s%s' % (call, self.getApiKey(), '&%s' % params if params else '')
data = self.getJsonData(url, show_error = False)
except:
log.debug('Movie not found: %s, %s', (call, params))
data = None
if data and return_key and return_key in data:
data = data.get(return_key)
return data
def isDisabled(self):
if self.getApiKey() == '':
log.error('No API key provided.')
return True
return False
def getApiKey(self):
key = self.conf('api_key')
return bd(random.choice(self.ak)) if key == '' else key
def getLanguages(self):
languages = splitString(Env.setting('languages', section = 'core'))
if len(languages):
return languages
return [ 'en' ]
def getTitles(self, movie):
# add the title to the list
title = toUnicode(movie.get('title'))
titles = [title] if title else []
# add the original_title to the list
alternate_title = toUnicode(movie.get('original_title'))
if alternate_title and alternate_title not in titles:
titles.append(alternate_title)
# Add alternative titles
alternate_titles = movie.get('alternative_titles', {}).get('titles', [])
for alt in alternate_titles:
alt_name = toUnicode(alt.get('title'))
if alt_name and alt_name not in titles and alt_name.lower() != 'none' and alt_name is not None:
titles.append(alt_name)
return titles;
config = [{
'name': 'themoviedb',
'groups': [
{
'tab': 'searcher',
'name': 'searcher',
'options': [
{
'name': 'preferred_language',
'label': 'Preferred langauge code',
'description': 'Please provide your language code. It will be used for providers supporting altnerate title searching.',
'default': 'en',
'placeholder': 'en|de|fr...',
},
],
}, {
'tab': 'providers',
'name': 'tmdb',
'label': 'TheMovieDB',
'hidden': True,
'description': 'Used for all calls to TheMovieDB.',
'options': [
{
'name': 'api_key',
'default': '',
'label': 'Api Key',
},
],
},
],
}]
| gpl-3.0 | -8,751,652,352,963,587,000 | 33.917178 | 140 | 0.53615 | false |
chromium/chromium | chrome/installer/mac/signing/signing.py | 6 | 4706 | # Copyright 2019 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
The signing module defines the various binary pieces of the Chrome application
bundle that need to be signed, as well as providing utilities to sign them.
"""
import os.path
import re
from . import commands
def _linker_signed_arm64_needs_force(path):
"""Detects linker-signed arm64 code that can only be signed with --force
on this system.
Args:
path: A path to a code object to test.
Returns:
True if --force must be used with codesign --sign to successfully sign
the code, False otherwise.
"""
# On macOS 11.0 and later, codesign handles linker-signed code properly
# without the --force hand-holding. Check OS >= 10.16 because that's what
# Python will think the OS is if it wasn't built with the 11.0 SDK or later.
if commands.macos_version() >= [10, 16]:
return False
# Look just for --arch=arm64 because that's the only architecture that has
# linker-signed code by default. If this were used with universal code (if
# there were any), --display without --arch would default to the native
# architecture, which almost certainly wouldn't be arm64 and therefore would
# be wrong.
(returncode, stdout, stderr) = commands.lenient_run_command_output(
['codesign', '--display', '--verbose', '--arch=arm64', '--', path])
if returncode != 0:
# Problem running codesign? Don't make the error about this confusing
# function. Just return False and let some less obscure codesign
# invocation be the error. Not signed at all? No problem. No arm64 code?
# No problem either. Not code at all? File not found? Well, those don't
# count as linker-signed either.
return False
# Yes, codesign --display puts all of this on stderr.
match = re.search(b'^CodeDirectory .* flags=(0x[0-9a-f]+)( |\().*$', stderr,
re.MULTILINE)
if not match:
return False
flags = int(match.group(1), 16)
# This constant is from MacOSX11.0.sdk <Security/CSCommon.h>
# SecCodeSignatureFlags kSecCodeSignatureLinkerSigned.
LINKER_SIGNED_FLAG = 0x20000
return (flags & LINKER_SIGNED_FLAG) != 0
def sign_part(paths, config, part):
"""Code signs a part.
Args:
paths: A |model.Paths| object.
conifg: The |model.CodeSignConfig| object.
part: The |model.CodeSignedProduct| to sign. The product's |path| must
be in |paths.work|.
"""
command = ['codesign', '--sign', config.identity]
path = os.path.join(paths.work, part.path)
if _linker_signed_arm64_needs_force(path):
command.append('--force')
if config.notary_user:
# Assume if the config has notary authentication information that the
# products will be notarized, which requires a secure timestamp.
command.append('--timestamp')
if part.sign_with_identifier:
command.extend(['--identifier', part.identifier])
reqs = part.requirements_string(config)
if reqs:
command.extend(['--requirements', '=' + reqs])
if part.options:
command.extend(['--options', ','.join(part.options)])
if part.entitlements:
command.extend(
['--entitlements',
os.path.join(paths.work, part.entitlements)])
command.append(path)
commands.run_command(command)
def verify_part(paths, part):
"""Displays and verifies the code signature of a part.
Args:
paths: A |model.Paths| object.
part: The |model.CodeSignedProduct| to verify. The product's |path|
must be in |paths.work|.
"""
verify_options = list(part.verify_options) if part.verify_options else []
part_path = os.path.join(paths.work, part.path)
commands.run_command([
'codesign', '--display', '--verbose=5', '--requirements', '-', part_path
])
commands.run_command(['codesign', '--verify', '--verbose=6'] +
verify_options + [part_path])
def validate_app(paths, config, part):
"""Displays and verifies the signature of a CodeSignedProduct.
Args:
paths: A |model.Paths| object.
conifg: The |model.CodeSignConfig| object.
part: The |model.CodeSignedProduct| for the outer application bundle.
"""
app_path = os.path.join(paths.work, part.path)
commands.run_command([
'codesign', '--display', '--requirements', '-', '--verbose=5', app_path
])
if config.run_spctl_assess:
commands.run_command(['spctl', '--assess', '-vv', app_path])
| bsd-3-clause | -5,977,744,646,191,301,000 | 36.648 | 80 | 0.645134 | false |
sadjadasghari/deeplab4a2d | loss_from_log.py | 1 | 5089 | #!/usr/bin/env python
# Martin Kersner, 2016/03/11
from __future__ import print_function
import sys
import re
import numpy as np
import matplotlib.pyplot as plt
from utils import strstr
def main():
output_data, log_files = process_arguments(sys.argv)
train_iteration = []
train_loss = []
train_accuracy0 = []
train_accuracy1 = []
train_accuracy2 = []
train_accuracy3 = []
train_accuracy4 = []
train_accuracy5 = []
base_train_iter = 0
for log_file in log_files:
with open(log_file, 'rb') as f:
if len(train_iteration) != 0:
base_train_iter = train_iteration[-1]
for line in f:
if strstr(line, 'Iteration') and strstr(line, 'loss'):
matched = match_loss(line)
train_loss.append(float(matched.group(1)))
matched = match_iteration(line)
train_iteration.append(int(matched.group(1))+base_train_iter)
# strong labels
elif strstr(line, 'Train net output #0: accuracy '):
matched = match_net_accuracy(line)
train_accuracy0.append(float(matched.group(1)))
elif strstr(line, 'Train net output #1: accuracy '):
matched = match_net_accuracy(line)
train_accuracy1.append(float(matched.group(1)))
elif strstr(line, 'Train net output #2: accuracy '):
matched = match_net_accuracy(line)
train_accuracy2.append(float(matched.group(1)))
# weak labels
elif strstr(line, 'Train net output #0: accuracy_bbox'):
matched = match_net_accuracy_bbox(line)
train_accuracy0.append(float(matched.group(1)))
elif strstr(line, 'Train net output #1: accuracy_bbox'):
matched = match_net_accuracy_bbox(line)
train_accuracy1.append(float(matched.group(1)))
elif strstr(line, 'Train net output #2: accuracy_bbox'):
matched = match_net_accuracy_bbox(line)
train_accuracy2.append(float(matched.group(1)))
elif strstr(line, 'Train net output #3: accuracy_strong'):
matched = match_net_accuracy_strong(line)
train_accuracy3.append(float(matched.group(1)))
elif strstr(line, 'Train net output #4: accuracy_strong'):
matched = match_net_accuracy_strong(line)
train_accuracy4.append(float(matched.group(1)))
elif strstr(line, 'Train net output #5: accuracy_strong'):
matched = match_net_accuracy_strong(line)
train_accuracy5.append(float(matched.group(1)))
if output_data == 'loss':
for x in train_loss:
print(x)
if output_data == 'acc1':
for x,y,z in zip(train_accuracy0, train_accuracy1, train_accuracy2):
print(x, y, z)
if output_data == 'acc2':
for x,y,z in zip(train_accuracy3, train_accuracy4, train_accuracy5):
print(x, y, z)
## loss
plt.plot(train_iteration, train_loss, 'k', label='Train loss')
plt.legend()
plt.ylabel('Loss')
plt.xlabel('Number of iterations')
plt.savefig('loss.png')
## evaluation
plt.clf()
if len(train_accuracy3) != 0:
plt.plot(range(len(train_accuracy0)), train_accuracy0, 'k', label='accuracy bbox 0')
plt.plot(range(len(train_accuracy1)), train_accuracy1, 'r', label='accuracy bbox 1')
plt.plot(range(len(train_accuracy2)), train_accuracy2, 'g', label='accuracy bbox 2')
plt.plot(range(len(train_accuracy3)), train_accuracy3, 'b', label='accuracy strong 0')
plt.plot(range(len(train_accuracy4)), train_accuracy4, 'c', label='accuracy strong 1')
plt.plot(range(len(train_accuracy5)), train_accuracy5, 'm', label='accuracy strong 2')
else:
plt.plot(range(len(train_accuracy0)), train_accuracy0, 'k', label='train accuracy 0')
plt.plot(range(len(train_accuracy1)), train_accuracy1, 'r', label='train accuracy 1')
plt.plot(range(len(train_accuracy2)), train_accuracy2, 'g', label='train accuracy 2')
plt.legend(loc=0)
plt.savefig('evaluation.png')
def match_iteration(line):
return re.search(r'Iteration (.*),', line)
def match_loss(line):
return re.search(r'loss = (.*)', line)
def match_net_accuracy(line):
return re.search(r'accuracy = (.*)', line)
def match_net_accuracy_bbox(line):
return re.search(r'accuracy_bbox = (.*)', line)
def match_net_accuracy_strong(line):
return re.search(r'accuracy_strong = (.*)', line)
def process_arguments(argv):
if len(argv) < 2:
help()
output_data = None
log_files = argv[2:]
if argv[1].lower() == 'loss':
output_data = 'loss'
elif argv[1].lower() == 'acc1':
output_data = 'acc1'
elif argv[1].lower() == 'acc2':
output_data = 'acc2'
else:
log_files = argv[1:]
return output_data, log_files
def help():
print('Usage: python loss_from_log.py [OUTPUT_TYPE] [LOG_FILE]+\n'
'OUTPUT_TYPE can be either loss, acc1 or acc 2\n'
'LOG_FILE is text file containing log produced by caffe.\n'
'At least one LOG_FILE has to be specified.\n'
'Files has to be given in correct order (the oldest logs as the first ones).'
, file=sys.stderr)
exit()
if __name__ == '__main__':
main()
| gpl-3.0 | 5,591,932,198,186,486,000 | 31.208861 | 90 | 0.63706 | false |
Debian/britney2 | tests/mock_swift.py | 1 | 5898 | # Mock a Swift server with autopkgtest results
# Author: Martin Pitt <martin.pitt@ubuntu.com>
import os
import tarfile
import io
import sys
import socket
import time
import tempfile
import json
try:
from http.server import HTTPServer, BaseHTTPRequestHandler
from urllib.parse import urlparse, parse_qs
except ImportError:
# Python 2
from BaseHTTPServer import HTTPServer, BaseHTTPRequestHandler
from urlparse import urlparse, parse_qs
class SwiftHTTPRequestHandler(BaseHTTPRequestHandler):
'''Mock swift container with autopkgtest results
This accepts retrieving a particular result.tar (e. g.
/container/path/result.tar) or listing the container contents
(/container/?prefix=foo&delimiter=@&marker=foo/bar).
'''
# map container -> result.tar path -> (exitcode, testpkg-version[, testinfo])
results = {}
def do_GET(self):
p = urlparse(self.path)
path_comp = p.path.split('/')
container = path_comp[1]
path = '/'.join(path_comp[2:])
if path:
self.serve_file(container, path)
else:
self.list_container(container, parse_qs(p.query))
def serve_file(self, container, path):
if os.path.basename(path) != 'result.tar':
self.send_error(404, 'File not found (only result.tar supported)')
return
try:
fields = self.results[container][os.path.dirname(path)]
try:
(exitcode, pkgver, testinfo) = fields
except ValueError:
(exitcode, pkgver) = fields
testinfo = None
except KeyError:
self.send_error(404, 'File not found')
return
self.send_response(200)
self.send_header('Content-type', 'application/octet-stream')
self.end_headers()
tar = io.BytesIO()
with tarfile.open('result.tar', 'w', tar) as results:
# add exitcode
contents = ('%i' % exitcode).encode()
ti = tarfile.TarInfo('exitcode')
ti.size = len(contents)
results.addfile(ti, io.BytesIO(contents))
# add testpkg-version
if pkgver is not None:
contents = pkgver.encode()
ti = tarfile.TarInfo('testpkg-version')
ti.size = len(contents)
results.addfile(ti, io.BytesIO(contents))
# add testinfo.json
if testinfo:
contents = json.dumps(testinfo).encode()
ti = tarfile.TarInfo('testinfo.json')
ti.size = len(contents)
results.addfile(ti, io.BytesIO(contents))
self.wfile.write(tar.getvalue())
def list_container(self, container, query):
try:
objs = set(['%s/result.tar' % r for r in self.results[container]])
except KeyError:
self.send_error(401, 'Container does not exist')
return
if 'prefix' in query:
p = query['prefix'][-1]
objs = set([o for o in objs if o.startswith(p)])
if 'delimiter' in query:
d = query['delimiter'][-1]
# if find() returns a value, we want to include the delimiter, thus
# bump its result; for "not found" return None
find_adapter = lambda i: (i >= 0) and (i + 1) or None
objs = set([o[:find_adapter(o.find(d))] for o in objs])
if 'marker' in query:
m = query['marker'][-1]
objs = set([o for o in objs if o > m])
self.send_response(objs and 200 or 204) # 204: "No Content"
self.send_header('Content-type', 'text/plain')
self.end_headers()
self.wfile.write(('\n'.join(sorted(objs)) + '\n').encode('UTF-8'))
class AutoPkgTestSwiftServer:
def __init__(self, port=8080):
self.port = port
self.server_pid = None
self.log = None
def __del__(self):
if self.server_pid:
self.stop()
@classmethod
def set_results(klass, results):
'''Set served results.
results is a map: container -> result.tar path ->
(exitcode, testpkg-version, testinfo)
'''
SwiftHTTPRequestHandler.results = results
def start(self):
assert self.server_pid is None, 'already started'
if self.log:
self.log.close()
self.log = tempfile.TemporaryFile()
p = os.fork()
if p:
# parent: wait until server starts
self.server_pid = p
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
while True:
if s.connect_ex(('127.0.0.1', self.port)) == 0:
break
time.sleep(0.1)
s.close()
return
# child; quiesce logging on stderr
os.dup2(self.log.fileno(), sys.stderr.fileno())
srv = HTTPServer(('', self.port), SwiftHTTPRequestHandler)
srv.serve_forever()
sys.exit(0)
def stop(self):
assert self.server_pid, 'not running'
os.kill(self.server_pid, 15)
os.waitpid(self.server_pid, 0)
self.server_pid = None
self.log.close()
if __name__ == '__main__':
srv = AutoPkgTestSwiftServer()
srv.set_results({'autopkgtest-testing': {
'testing/i386/d/darkgreen/20150101_100000@': (0, 'darkgreen 1'),
'testing/i386/g/green/20150101_100000@': (0, 'green 1', {'custom_environment': ['ADT_TEST_TRIGGERS=green']}),
'testing/i386/l/lightgreen/20150101_100000@': (0, 'lightgreen 1'),
'testing/i386/l/lightgreen/20150101_100101@': (4, 'lightgreen 2'),
'testing/i386/l/lightgreen/20150101_100102@': (0, 'lightgreen 3'),
}})
srv.start()
print('Running on http://localhost:8080/autopkgtest-testing')
print('Press Enter to quit.')
sys.stdin.readline()
srv.stop()
| gpl-2.0 | 737,054,188,517,968,600 | 33.694118 | 117 | 0.573923 | false |
haticeerturk/Turkce-Ingilizce-Sozluk | Turkce-Ingılızce-Sozluk.py | 1 | 3507 | def kelimeEkleme() :
dosya = open("sozluk.dat","a")
m = raw_input("Turkce kelimeyi giriniz: ")
h = raw_input("Ingilizce karsiligini girin: ")
satir = m + "\t"+ ":" + "\t" + h + "\n"
dosya.write(satir)
dosya.close()
def ingGuncelle() :
dosya = open("sozluk.dat","r")
istek = raw_input("Guncelleme yapacaginiz kelimeyi yaziniz: ")
satirlar = dosya.readlines()
dizi = []
for i in range(len(satirlar)) :
sonraki = satirlar[i]
guncelleme = sonraki.find(istek)
if guncelleme != -1 :
a = istek
b = raw_input("yeni ingilizce ceviriyi girin: ")
yeni = a +"\t" + ":" + "\t" + b + "\n"
dizi.append(yeni)
else :
dizi.append(sonraki)
dosya.close()
dosya = open("sozluk.dat","w")
dosya.writelines(dizi)
dosya.close()
def sorgulama() :
dosya = open("sozluk.dat","r")
aranan = raw_input("Ingilizce anlamini istediginiz kelimeyi giriniz: ")
ara = dosya.readlines()
for i in range(len(ara)) :
siradaki = ara[i]
bulunan = siradaki.find(aranan)
if bulunan != -1 :
print ara[i]
dosya.close()
def listeleme() :
dosya = open("sozluk.dat","r")
harf = raw_input("Hangi harf ile baslayanlari listelemek istiyorsunuz: ")
bulunacak = dosya.readlines()
for i in range(len(bulunacak)) :
gelecek = bulunacak[i]
liste = gelecek.find(harf)
if liste == 0 :
print bulunacak[i]
dosya.close
def kelimeList() :
dosya = open("sozluk.dat","r")
kelime = raw_input("Hangi kelimenin gececegini yaziniz: ")
arama = dosya.readlines()
for i in range(len(arama)) :
kelimeler = arama[i]
ayrilmis = kelimeler.split("\t")
ayrik = ayrilmis[0].find(kelime)
if ayrik != -1 :
print arama[i]
dosya.close()
def turkceAyir() :
dosya = open("sozluk.dat","r")
turkce = dosya.readlines()
dizi = []
for i in range(len(turkce)) :
ayir = turkce[i]
ayrilacak = ayir.split("\t")
atanacak = ayrilacak[0] +"\n"
dizi.append(atanacak)
dosya.close()
dosya = open("turkce_kelimeler.dat","w")
dosya.writelines(dizi)
dosya.close()
def tumListe() :
dosya = open("sozluk.dat","r")
tum = dosya.readlines()
for i in range(len(tum)) :
print i+1,"-",tum[i]
def turkGuncel() :
dosya = open("sozluk.dat","r")
ing = raw_input("Turkcesi degistirilcek kelimeyi girin: ")
liste = dosya.readlines()
dizi = []
for i in range(len(liste)) :
sonraki = liste[i]
ayir = sonraki.split("\t")
bulundu = ayir[0]
degistir = sonraki.find(ing)
if degistir != -1 :
a = ayir[2]
b = raw_input("yeni turkce kelimeyi girin: ")
yeni = b + "\t" + ":" + "\t" + a
dizi.append(yeni)
else :
dizi.append(sonraki)
dosya.close()
dosya = open("sozluk.dat","w")
dosya.writelines(dizi)
dosya.close()
print "\t","\t","MENU"
print "1.Sozluge Yeni Kelime Ekleme"
print "2.Sozlukteki Bir Kelimenin Ingilizce Anlamini Guncelleme"
print "3.Sozlukteki Bir Kelimenin Ingilizce Anlamini Sorgulama"
print "4.Girilen Bir Harf Ile Baslayan Kelimeler Ve Ingilizce Anlamlarinin Listelenmesi"
print "5.Girilen Bir Metin Parcasinin gectigi kelimeler ve Anlamlarinin Listesi"
print "6.Turkce Kelimeleri Baska Dosyaya At"
print "7.Tum Kelimeleri Listele"
print "8.Turkce Kelime Guncelleme"
secim = input("Gormek istediginiz secenegi giriniz: ")
if secim == 1 :
kelimeEkleme()
elif secim == 2 :
ingGuncelle()
elif secim == 3 :
sorgulama()
elif secim == 4 :
listeleme()
elif secim == 5 :
kelimeList()
elif secim == 6 :
turkceAyir()
elif secim == 7 :
tumListe()
elif secim == 8 :
turkGuncel()
| gpl-3.0 | 7,969,239,480,229,963,000 | 16.892857 | 88 | 0.64414 | false |
rdo-management/ironic-discoverd | ironic_discoverd_ramdisk/discover.py | 1 | 8663 | # Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import base64
import json
import logging
import os
import subprocess
import tarfile
import tempfile
import netifaces
import requests
LOG = logging.getLogger('ironic-discoverd-ramdisk')
def try_call(*cmd, **kwargs):
strip = kwargs.pop('strip', True)
kwargs['stdout'] = subprocess.PIPE
kwargs['stderr'] = subprocess.PIPE
try:
p = subprocess.Popen(cmd, **kwargs)
out, err = p.communicate()
except EnvironmentError as exc:
LOG.warn('command %s failed: %s', cmd, exc)
return
if p.returncode:
LOG.warn('command %s returned failure status %d:\n%s', cmd,
p.returncode, err.strip())
else:
return out.strip() if strip else out
def try_shell(sh, **kwargs):
strip = kwargs.pop('strip', True)
kwargs['stdout'] = subprocess.PIPE
kwargs['stderr'] = subprocess.PIPE
kwargs['shell'] = True
p = subprocess.Popen([sh], **kwargs)
out, err = p.communicate()
if p.returncode:
LOG.warn('shell script "%s" failed with code %d:\n%s', sh,
p.returncode, err.strip())
else:
return out.strip() if strip else out
class AccumulatedFailure(object):
"""Object accumulated failures without raising exception."""
def __init__(self):
self._failures = []
def add(self, fail, *fmt):
"""Add failure with optional formatting."""
if fmt:
fail = fail % fmt
LOG.error('%s', fail)
self._failures.append(fail)
def get_error(self):
"""Get error string or None."""
if not self._failures:
return
msg = ('The following errors were encountered during '
'hardware discovery:\n%s'
% '\n'.join('* %s' % item for item in self._failures))
return msg
def __nonzero__(self):
return bool(self._failures)
__bool__ = __nonzero__
def __repr__(self): # pragma: no cover
# This is for tests
if self:
return '<%s: %s>' % (self.__class__.__name__,
', '.join(self._failures))
else:
return '<%s: success>' % self.__class__.__name__
def discover_basic_properties(data, args):
# These properties might not be present, we don't count it as failure
data['boot_interface'] = args.bootif
data['ipmi_address'] = try_shell(
"ipmitool lan print | grep -e 'IP Address [^S]' | awk '{ print $4 }'")
LOG.info('BMC IP address: %s', data['ipmi_address'])
def discover_network_interfaces(data, failures):
data.setdefault('interfaces', {})
for iface in netifaces.interfaces():
if iface.startswith('lo'):
LOG.info('ignoring local network interface %s', iface)
continue
LOG.debug('found network interface %s', iface)
addrs = netifaces.ifaddresses(iface)
try:
mac = addrs[netifaces.AF_LINK][0]['addr']
except (KeyError, IndexError):
LOG.info('no link information for interface %s in %s',
iface, addrs)
continue
try:
ip = addrs[netifaces.AF_INET][0]['addr']
except (KeyError, IndexError):
LOG.info('no IP address for interface %s', iface)
ip = None
data['interfaces'][iface] = {'mac': mac, 'ip': ip}
if data['interfaces']:
LOG.info('network interfaces: %s', data['interfaces'])
else:
failures.add('no network interfaces found')
def discover_scheduling_properties(data, failures):
scripts = [
('cpus', "grep processor /proc/cpuinfo | wc -l"),
('cpu_arch', "lscpu | grep Architecture | awk '{ print $2 }'"),
('local_gb', "fdisk -l | grep Disk | awk '{print $5}' | head -n 1"),
]
for key, script in scripts:
data[key] = try_shell(script)
LOG.info('value for "%s" field is %s', key, data[key])
ram_info = try_shell(
"dmidecode --type memory | grep Size | awk '{ print $2; }'")
if ram_info:
total_ram = 0
for ram_record in ram_info.split('\n'):
try:
total_ram += int(ram_record)
except ValueError:
pass
data['memory_mb'] = total_ram
LOG.info('total RAM: %s MiB', total_ram)
else:
failures.add('failed to get RAM information')
for key in ('cpus', 'local_gb', 'memory_mb'):
try:
data[key] = int(data[key])
except (KeyError, ValueError, TypeError):
LOG.warn('value for %s is missing or malformed: %s',
key, data.get(key))
data[key] = None
# FIXME(dtantsur): -1 is required to give Ironic some spacing for
# partitioning and may be removed later
if data['local_gb']:
data['local_gb'] = data['local_gb'] / 1024 / 1024 / 1024 - 1
if data['local_gb'] < 1:
LOG.warn('local_gb is less than 1 GiB')
data['local_gb'] = None
def discover_additional_properties(args, data, failures):
hw_args = ('--benchmark', 'cpu', 'disk', 'mem') if args.benchmark else ()
hw_json = try_call('hardware-detect', *hw_args)
if hw_json:
try:
data['data'] = json.loads(hw_json)
except ValueError:
LOG.error('JSON value returned from hardware-detect cannot be '
'decoded:\n%s', hw_json)
failures.add('unable to get extended hardware properties')
else:
failures.add('unable to get extended hardware properties')
def discover_block_devices(data):
block_devices = try_shell(
"lsblk -no TYPE,SERIAL | grep disk | awk '{print $2}'")
if not block_devices:
LOG.warn('unable to get block devices')
return
serials = [item for item in block_devices.split('\n') if item.strip()]
data['block_devices'] = {'serials': serials}
def discover_hardware(args, data, failures):
try_call('modprobe', 'ipmi_msghandler')
try_call('modprobe', 'ipmi_devintf')
try_call('modprobe', 'ipmi_si')
discover_basic_properties(data, args)
discover_network_interfaces(data, failures)
discover_scheduling_properties(data, failures)
if args.use_hardware_detect:
discover_additional_properties(args, data, failures)
discover_block_devices(data)
def call_discoverd(args, data, failures):
data['error'] = failures.get_error()
LOG.info('posting collected data to %s', args.callback_url)
resp = requests.post(args.callback_url, data=json.dumps(data))
if resp.status_code >= 400:
LOG.error('discoverd error %d: %s',
resp.status_code,
resp.content.decode('utf-8'))
resp.raise_for_status()
return resp.json()
def collect_logs(args):
files = {args.log_file} | set(args.system_log_file or ())
with tempfile.TemporaryFile() as fp:
with tarfile.open(fileobj=fp, mode='w:gz') as tar:
with tempfile.NamedTemporaryFile() as jrnl_fp:
if try_shell("journalctl > '%s'" % jrnl_fp.name) is not None:
tar.add(jrnl_fp.name, arcname='journal')
else:
LOG.warn('failed to get system journal')
for fname in files:
if os.path.exists(fname):
tar.add(fname)
else:
LOG.warn('log file %s does not exist', fname)
fp.seek(0)
return base64.b64encode(fp.read())
def setup_ipmi_credentials(resp):
user, password = resp['ipmi_username'], resp['ipmi_password']
if try_call('ipmitool', 'user', 'set', 'name', '2', user) is None:
raise RuntimeError('failed to set IPMI user name to %s', user)
if try_call('ipmitool', 'user', 'set', 'password', '2', password) is None:
raise RuntimeError('failed to set IPMI password')
try_call('ipmitool', 'user', 'enable', '2')
try_call('ipmitool', 'channel', 'setaccess', '1', '2',
'link=on', 'ipmi=on', 'callin=on', 'privilege=4')
def fork_and_serve_logs(args):
pass # TODO(dtantsur): implement
| apache-2.0 | -1,589,015,766,572,319,500 | 32.191571 | 78 | 0.587441 | false |
eblade/telegram | telegram/auth/sign.py | 1 | 2168 | from Crypto.Signature import PKCS1_v1_5
from Crypto.Hash import SHA
from Crypto.PublicKey import RSA
def generate_key_pair(username):
private_key = RSA.generate(2048)
public_key = private_key.publickey()
return private_key.exportKey(), public_key.exportKey()
class RSAVerifier(object):
def __init__(self, public_key_getter):
"""
Contructor for RSA Veriefier.
:param function public_key_getter: A function that take the username and returns it's
public key
"""
self.public_key_getter = public_key_getter
def verify(self, sender, signature, text):
"""
Verify a signed message.
:param unicode sender: The sender of the message "username[@domain]"
:param unicode signature: The message's signature
:param unicode text: The signed content
:rtype: bool True if authentic, False otherwise
"""
public_key = self.public_key_getter(sender)
if public_key is None:
print("Unable to find the public key for %s!" % sender)
return False
key = RSA.importKey(public_key)
h = SHA.new(text)
verifier = PKCS1_v1_5.new(key)
return verifier.verify(h, signature)
class RSASigner(object):
def __init__(self, private_key_getter):
"""
Contructor for RSA Veriefier.
:param function private_key_getter: A function that take the username and returns
it's private key
"""
self.private_key_getter = private_key_getter
def sign(self, sender, text):
"""
Let a user sign a message.
:param unicode sender: The sender of the message "username[@domain]"
:param unicode text: The content tot sign
:rtype: unicode The signature
"""
private_key = self.private_key_getter
if private_key is None:
print("Unable to find the private key for %s!" % sender)
return None
key = RSA.importKey(private_key)
h = SHA.new(text)
signer = PKCS1_v1_5.new(key)
return signer.sign(h)
| mit | 6,223,994,119,686,426,000 | 31.358209 | 93 | 0.601937 | false |
techbliss/Python_editor | 7.0/plugins/Code editor/pyeditor.py | 1 | 41135 | # Created by Storm Shadow www.techbliss.org
# Created by Storm Shadow www.techbliss.org
print "\n" #getting the box fit
print " ###################################################\n" \
" # Author Storm Shadow # \n" \
" # Hotkeys # \n" \
" # NewFile: Ctrl+N #\n" \
" # OpenFile: Ctrl+O #\n" \
" # SaveFile: Ctrl+S #\n" \
" # RunScript: Ctrl+E #\n" \
" # Undo: Ctrl+Z #\n" \
" # Redo: Ctrl+Y #\n" \
" # SelectALL: Ctrl+A #\n" \
" # Paste: Ctrl+V #\n" \
" # Font: Ctrl+F #\n" \
" # ResetFolding: Ctrl+R #\n" \
" # CircleFolding: Ctrl+C #\n" \
" # PlainFolding: Ctrl+P #\n" \
" # HEX-ray Home: Ctrl+W #\n" \
" # Ida Pro Python SDK Ctrl+I #\n" \
" # IDAPROPythonGit: Ctrl+G #\n" \
" # Author: Ctrl+B #\n" \
" # Enable Reg: Alt+E #\n" \
" # Disable Reg: Alt+D #\n" \
" # Zoom in Ctrl+Shift+ + #\n" \
" # Zoom Out Ctrl+Shift+ - #\n" \
" # Profile Code Ctrl+Shift+ E #\n" \
" ###################################################\n" \
" # IDA PRO python Editor #\n" \
" ###################################################\n"
import os
import sys
try:
dn = idaapi.idadir("plugins\\Code editor")
except NameError:
dn = os.getcwd()
try:
TemplateFile = idaapi.idadir("plugins\\Code editor\\template\\Plugin_temp")
except NameError:
TemplateFile = os.getcwd()+r'\\template\\Plugin_temp'
sys.path.insert(0, dn)
sys.path.insert(0, os.getcwd()+r'\\icons')
sys.path.insert(0, os.getcwd()+r'\\template')
import PyQt5
from PyQt5 import QtCore, QtGui, QtWidgets
from PyQt5.Qsci import QsciScintilla, QsciLexerPython
from PyQt5.QtGui import QFont, QFontMetrics, QColor
from PyQt5.QtWidgets import QDialog, QMessageBox, QWizard, QWizardPage
from PyQt5.QtCore import QCoreApplication
plugin_path = ""
if sys.platform == "win32":
if hasattr(sys, "frozen"):
plugin_path = os.path.join(os.path.dirname(os.path.abspath(sys.executable)), "PyQt5", "plugins")
QCoreApplication.addLibraryPath(plugin_path)
else:
import site
for dir in site.getsitepackages():
QCoreApplication.addLibraryPath(os.path.join(dir, "PyQt5", "plugins"))
elif sys.platform == "darwin":
plugin_path = os.path.join(QCoreApplication.getInstallPrefix(), "Resources", "plugins")
if plugin_path:
QCoreApplication.addLibraryPath(plugin_path)
if hasattr(QtCore.Qt, 'AA_EnableHighDpiScaling'):
PyQt5.QtWidgets.QApplication.setAttribute(QtCore.Qt.AA_EnableHighDpiScaling, True)
if hasattr(QtCore.Qt, 'AA_UseHighDpiPixmaps'):
PyQt5.QtWidgets.QApplication.setAttribute(QtCore.Qt.AA_UseHighDpiPixmaps, True)
try:
import ico
except ImportError:
import icons.ico
try:
import iconsmore
except ImportError:
import icons.iconsmore
try:
import icons3
except ImportError:
import icons.icons3
try:
import iconf
except ImportError:
import icons.iconf
try:
import icon4
except ImportError:
pass
try:
_fromUtf8 = QtCore.QString.fromUtf8
except AttributeError:
def _fromUtf8(s):
return s
try:
_encoding = QtWidgets.QApplication.UnicodeUTF8
def _translate(context, text, disambig):
return QtWidgets.QApplication.translate(context, text,
disambig, _encoding)
except AttributeError:
def _translate(context, text, disambig):
return QtWidgets.QApplication.translate(context, text, disambig)
class Ui_messageformForm(QtWidgets.QWidget):
def setupUi1(self, messageformForm):
messageformForm.setObjectName("messageformForm")
messageformForm.resize(404, 169)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Ignored, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(messageformForm.sizePolicy().hasHeightForWidth())
messageformForm.setSizePolicy(sizePolicy)
font = QtGui.QFont()
font.setFamily("Consolas")
messageformForm.setFont(font)
icon2 = QtGui.QIcon()
icon2.addPixmap(QtGui.QPixmap(":/icons/twa.gif"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
messageformForm.setWindowIcon(icon2)
self.label = QtWidgets.QLabel(messageformForm)
self.label.setGeometry(QtCore.QRect(40, 20, 341, 111))
font = QtGui.QFont()
font.setPointSize(19)
self.label.setFont(font)
self.label.setObjectName("label")
self.retranslateUi(messageformForm)
QtCore.QMetaObject.connectSlotsByName(messageformForm)
def retranslateUi(self, messageformForm):
_translate = QtCore.QCoreApplication.translate
messageformForm.setWindowTitle(_translate("messageformForm", "Soon to be fixed"))
self.label.setText(_translate("messageformForm", "Soon to be fixed"
))
class Ui_Wizard(QtWidgets.QWizard):
def __init__(self, parent=None):
super(Ui_Wizard, self).__init__(parent=None)
Wizard.setObjectName("Wizard")
Wizard.resize(762, 500)
font = QtGui.QFont()
font.setFamily("Calibri Light")
Wizard.setFont(font)
Wizard.setOptions(QtWidgets.QWizard.HelpButtonOnRight)
self.wizardPage1 = QtWidgets.QWizardPage()
font = QtGui.QFont()
font.setFamily("Calibri Light")
font.setPointSize(20)
self.wizardPage1.setFont(font)
self.wizardPage1.setObjectName("wizardPage1")
self.textBrowser_2 = QtWidgets.QTextBrowser(self.wizardPage1)
self.textBrowser_2.setGeometry(QtCore.QRect(130, 140, 421, 131))
self.textBrowser_2.setFrameShape(QtWidgets.QFrame.NoFrame)
self.textBrowser_2.setObjectName("textBrowser_2")
Wizard.addPage(self.wizardPage1)
self.wizardPage = QtWidgets.QWizardPage()
self.wizardPage.setTitle("")
self.wizardPage.setSubTitle("")
self.wizardPage.setObjectName("wizardPage")
self.textBrowser_4 = QtWidgets.QTextBrowser(self.wizardPage)
self.textBrowser_4.setGeometry(QtCore.QRect(130, 140, 499, 239))
self.textBrowser_4.setFrameShape(QtWidgets.QFrame.NoFrame)
self.textBrowser_4.setObjectName("textBrowser_4")
Wizard.addPage(self.wizardPage)
self.tempwizardPage = QtWidgets.QWizardPage()
self.tempwizardPage.setObjectName("tempwizardPage")
self.verticalLayout = QtWidgets.QVBoxLayout(self.tempwizardPage)
self.verticalLayout.setObjectName("verticalLayout")
self.TemptextEdit = Qsci.QsciScintilla(self.tempwizardPage)
self.TemptextEdit.setToolTip("")
self.TemptextEdit.setWhatsThis("")
self.TemptextEdit.setObjectName("TemptextEdit")
self.verticalLayout.addWidget(self.TemptextEdit)
self.horizontalLayout = QtWidgets.QHBoxLayout()
self.horizontalLayout.setObjectName("horizontalLayout")
spacerItem = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.horizontalLayout.addItem(spacerItem)
self.temppushButtonopen = QtWidgets.QPushButton(self.tempwizardPage)
self.temppushButtonopen.setObjectName("temppushButtonopen")
self.horizontalLayout.addWidget(self.temppushButtonopen)
self.temppushButtonsave = QtWidgets.QPushButton(self.tempwizardPage)
self.temppushButtonsave.setObjectName("temppushButtonsave")
self.horizontalLayout.addWidget(self.temppushButtonsave)
self.verticalLayout.addLayout(self.horizontalLayout)
Wizard.addPage(self.tempwizardPage)
self.scriptwizardPage = QtWidgets.QWizardPage()
self.scriptwizardPage.setObjectName("scriptwizardPage")
self.textBrowser_5 = QtWidgets.QTextBrowser(self.scriptwizardPage)
self.textBrowser_5.setGeometry(QtCore.QRect(120, 130, 499, 239))
self.textBrowser_5.setFrameShape(QtWidgets.QFrame.NoFrame)
self.textBrowser_5.setObjectName("textBrowser_5")
Wizard.addPage(self.scriptwizardPage)
self.wizardPage_3 = QtWidgets.QWizardPage()
self.wizardPage_3.setObjectName("wizardPage_3")
self.verticalLayout_2 = QtWidgets.QVBoxLayout(self.wizardPage_3)
self.verticalLayout_2.setObjectName("verticalLayout_2")
self.script_textEdit = Qsci.QsciScintilla(self.wizardPage_3)
self.script_textEdit.setToolTip("")
self.script_textEdit.setWhatsThis("")
self.script_textEdit.setObjectName("script_textEdit")
self.verticalLayout_2.addWidget(self.script_textEdit)
self.horizontalLayout_2 = QtWidgets.QHBoxLayout()
self.horizontalLayout_2.setObjectName("horizontalLayout_2")
spacerItem1 = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.horizontalLayout_2.addItem(spacerItem1)
self.scriptGrabpushButton = QtWidgets.QPushButton(self.wizardPage_3)
self.scriptGrabpushButton.setObjectName("scriptGrabpushButton")
self.horizontalLayout_2.addWidget(self.scriptGrabpushButton)
self.scriptpushButtonopen = QtWidgets.QPushButton(self.wizardPage_3)
self.scriptpushButtonopen.setObjectName("scriptpushButtonopen")
self.horizontalLayout_2.addWidget(self.scriptpushButtonopen)
self.scriptpushButtonsave = QtWidgets.QPushButton(self.wizardPage_3)
self.scriptpushButtonsave.setObjectName("scriptpushButtonsave")
self.horizontalLayout_2.addWidget(self.scriptpushButtonsave)
self.verticalLayout_2.addLayout(self.horizontalLayout_2)
Wizard.addPage(self.wizardPage_3)
self.wizardPage_2 = QtWidgets.QWizardPage()
font = QtGui.QFont()
font.setPointSize(20)
self.wizardPage_2.setFont(font)
self.wizardPage_2.setObjectName("wizardPage_2")
self.textBrowser_6 = QtWidgets.QTextBrowser(self.wizardPage_2)
self.textBrowser_6.setGeometry(QtCore.QRect(170, 140, 411, 191))
self.textBrowser_6.setFrameShape(QtWidgets.QFrame.NoFrame)
self.textBrowser_6.setObjectName("textBrowser_6")
Wizard.addPage(self.wizardPage_2)
#font textedit
self.skrift = QFont()
self.skrift.setFamily('Consolas')
self.skrift.setFixedPitch(True)
self.skrift.setPointSize(11)
self.TemptextEdit.setFont(self.skrift)
self.script_textEdit.setFont(self.skrift)
#python style temp
self.lexer = QsciLexerPython(self.TemptextEdit)
self.lexer.setFont(self.skrift)
self.lexer.setEolFill(True)
#Python style scritps
self.lexer = QsciLexerPython(self.script_textEdit)
self.lexer.setFont(self.skrift)
self.lexer.setEolFill(True)
self.filename = ""
#python style temp
self.TemptextEdit.setAutoCompletionThreshold(0)
self.TemptextEdit.setAutoCompletionThreshold(6)
self.TemptextEdit.setAutoCompletionThreshold(8)
self.TemptextEdit.setAutoCompletionSource(Qsci.QsciScintilla.AcsAPIs)
# self.TemptextEdit.setDefaultFont(self.skrift)
self.TemptextEdit.setLexer(self.lexer)
self.TemptextEdit.SendScintilla(QsciScintilla.SCI_STYLESETFONT, 1, 'Consolas')
#python style script
self.script_textEdit.setAutoCompletionThreshold(0)
self.script_textEdit.setAutoCompletionThreshold(6)
self.script_textEdit.setAutoCompletionThreshold(8)
self.script_textEdit.setAutoCompletionSource(Qsci.QsciScintilla.AcsAPIs)
# self.script_textEdit.setDefaultFont(self.skrift)
self.script_textEdit.setLexer(self.lexer)
self.script_textEdit.SendScintilla(QsciScintilla.SCI_STYLESETFONT, 1, 'Consolas')
#line numbers temp
fontmetrics = QFontMetrics(self.skrift)
self.TemptextEdit.setMarginsFont(self.skrift)
self.TemptextEdit.setMarginWidth(0, fontmetrics.width("00000") + 6)
self.TemptextEdit.setTabWidth(4)
#line numbers script
fontmetrics = QFontMetrics(self.skrift)
self.script_textEdit.setMarginsFont(self.skrift)
self.script_textEdit.setMarginWidth(0, fontmetrics.width("00000") + 6)
self.script_textEdit.setTabWidth(4)
#brace temp
self.TemptextEdit.setBraceMatching(QsciScintilla.SloppyBraceMatch)
#brace script
self.script_textEdit.setBraceMatching(QsciScintilla.SloppyBraceMatch)
#auto line tab =4 temp
self.TemptextEdit.setAutoIndent(True)
#auto line tab =4 script
self.TemptextEdit.setAutoIndent(True)
#scroolbar
self.script_textEdit.SendScintilla(QsciScintilla.SCI_SETHSCROLLBAR, 1)
try:
bs = open(TemplateFile).read()
bba = QtCore.QByteArray(bs)
self.bts = QtCore.QTextStream(bba)
self.bheysa = self.bts.readAll()
self.TemptextEdit.setText(self.bheysa)
self.TemptextEdit.setMarkerBackgroundColor((QColor(66, 66, 255)))
marker = self.TemptextEdit.markerDefine(PyQt5.Qsci.QsciScintilla.Rectangle, 2)
self.TemptextEdit.markerAdd(7, 2)
self.TemptextEdit.markerAdd(11, 2)
self.TemptextEdit.markerAdd(12, 2)
self.TemptextEdit.markerAdd(13, 2)
self.TemptextEdit.markerAdd(14, 2)
self.TemptextEdit.markerAdd(15, 2)
self.TemptextEdit.markerAdd(19, 2)
self.TemptextEdit.markerAdd(27, 2)
self.TemptextEdit.markerAdd(34, 2)
self.TemptextEdit.markerAdd(35, 2)
self.TemptextEdit.markerAdd(40, 2)
self.TemptextEdit.markerAdd(41, 2)
self.TemptextEdit.markerAdd(42, 2)
self.TemptextEdit.markerAdd(43, 2)
self.TemptextEdit.markerAdd(44, 2)
self.TemptextEdit.markerAdd(45, 2)
self.TemptextEdit.markerAdd(48, 2)
self.TemptextEdit.markerAdd(50, 2)
self.TemptextEdit.markerAdd(51, 2)
self.TemptextEdit.markerAdd(52, 2)
self.TemptextEdit.markerAdd(53, 2)
self.TemptextEdit.markerAdd(54, 2)
self.TemptextEdit.markerAdd(55, 2)
self.TemptextEdit.markerAdd(62, 2)
self.TemptextEdit.markerAdd(63, 2)
self.TemptextEdit.markerAdd(64, 2)
self.TemptextEdit.markerAdd(67, 2)
self.TemptextEdit.markerAdd(89, 2)
self.TemptextEdit.markerAdd(97, 2)
self.TemptextEdit.markerAdd(98, 2)
self.TemptextEdit.markerAdd(99, 2)
self.TemptextEdit.markerAdd(102, 2)
except:
self.TemptextEdit.setText('Plugin_temp file not found')
pass
self.retranslateUi2(Wizard)
QtCore.QMetaObject.connectSlotsByName(Wizard)
def retranslateUi2(self, Wizard):
_translate = QtCore.QCoreApplication.translate
Wizard.setWindowTitle(_translate("Wizard", " Ida Pro Plugin Wizard"))
self.textBrowser_2.setHtml(_translate("Wizard", "<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.0//EN\" \"http://www.w3.org/TR/REC-html40/strict.dtd\">\n"
"<html><head><meta name=\"qrichtext\" content=\"1\" /><style type=\"text/css\">\n"
"p, li { white-space: pre-wrap; }\n"
"</style></head><body style=\" font-family:\'Calibri Light\'; font-size:20pt; font-weight:400; font-style:normal;\">\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">Welcome to the plugin wizard.</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">Please follow the steps in the wizard, to tranform your code, to a full Ida Pro plugin.</p></body></html>"))
self.textBrowser_4.setHtml(_translate("Wizard", "<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.0//EN\" \"http://www.w3.org/TR/REC-html40/strict.dtd\">\n"
"<html><head><meta name=\"qrichtext\" content=\"1\" /><style type=\"text/css\">\n"
"p, li { white-space: pre-wrap; }\n"
"</style></head><body style=\" font-family:\'Calibri Light\'; font-size:8.14286pt; font-weight:400; font-style:normal;\">\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:20pt;\">First we create the plugin loader</span></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:20pt;\">Then we change the higlightet text in the template, and then save the plugin loader in Ida Pro Plugins folder.</span></p></body></html>"))
self.temppushButtonopen.setText(_translate("Wizard", "Open"))
self.temppushButtonsave.setText(_translate("Wizard", "Save"))
self.textBrowser_5.setHtml(_translate("Wizard", "<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.0//EN\" \"http://www.w3.org/TR/REC-html40/strict.dtd\">\n"
"<html><head><meta name=\"qrichtext\" content=\"1\" /><style type=\"text/css\">\n"
"p, li { white-space: pre-wrap; }\n"
"</style></head><body style=\" font-family:\'Calibri Light\'; font-size:8.14286pt; font-weight:400; font-style:normal;\">\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:20pt;\">Now we grab the editors current script, or open a new script.<br />Remember to save this in the right folder.<br />Plugins\\My_plugin_folder as declared in the template.</span></p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px; font-size:20pt;\"><br /></p></body></html>"))
self.scriptGrabpushButton.setText(_translate("Wizard", "Grab from Editor"))
self.scriptpushButtonopen.setText(_translate("Wizard", "Open"))
self.scriptpushButtonsave.setText(_translate("Wizard", "Save"))
self.textBrowser_6.setHtml(_translate("Wizard", "<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.0//EN\" \"http://www.w3.org/TR/REC-html40/strict.dtd\">\n"
"<html><head><meta name=\"qrichtext\" content=\"1\" /><style type=\"text/css\">\n"
"p, li { white-space: pre-wrap; }\n"
"</style></head><body style=\" font-family:\'Calibri Light\'; font-size:20pt; font-weight:400; font-style:normal;\">\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">Loader Template should now be in <br />ida pro\\plugin<br />script should be in a subfolder<br />ida pro\\plugin\\Myplugin\\</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">If above are correct your good to go!</p></body></html>"))
self.temppushButtonopen.clicked.connect(self.opentemp)
self.temppushButtonsave.clicked.connect(self.savetemp)
self.scriptpushButtonopen.clicked.connect(self.openscript)
self.scriptpushButtonsave.clicked.connect(self.savescript)
self.scriptGrabpushButton.clicked.connect(self.grapper)
def grapper(self):
#hellotext = Ui_MainWindow
# hello2= hellotext.sendgrapped
# print str(hello2)
messageformForm.show()
def opentemp(self):
print "hello"
self.path = QtCore.QFileInfo(self.filename).path()
# Get filename and show only .writer files
(self.filename, _) = \
QtWidgets.QFileDialog.getOpenFileName(self.wizardPage_3,
'Open File', self.path,
'Python Files (*.py *.pyc *.pyw)', '')
if self.filename:
with open(self.filename, 'r') as self.file:
self.TemptextEdit.setText(self.file.read())
os.chdir(str(self.path))
def savetemp(self):
self.path = QtCore.QFileInfo(self.filename).path()
(self.filename, _) = \
QtWidgets.QFileDialog.getSaveFileName(self, 'Save as'
, self.path, 'Python Files (*.py *.pyc *.pyw)')
if self.filename:
self.savetexttemp(self.filename)
os.chdir(str(self.path))
def savetexttemp(self, fileName):
textout = self.TemptextEdit.text()
file = QtCore.QFile(fileName)
if file.open(QtCore.QIODevice.WriteOnly):
QtCore.QTextStream(file) << textout
else:
QtWidgets.QMessageBox.information(self.tempwizardPage,
'Unable to open file', file.errorString())
os.chdir(str(self.path))
def openscript(self):
print "hello"
self.path = QtCore.QFileInfo(self.filename).path()
# Get filename and show only .writer files
(self.filename, _) = \
QtWidgets.QFileDialog.getOpenFileName(self.wizardPage_3,
'Open File', self.path,
'Python Files (*.py *.pyc *.pyw)', '')
if self.filename:
with open(self.filename, 'r') as self.file:
self.script_textEdit.setText(self.file.read())
os.chdir(str(self.path))
def savescript(self):
self.path = QtCore.QFileInfo(self.filename).path()
(self.filename, _) = \
QtWidgets.QFileDialog.getSaveFileName(self.wizardPage_3, 'Save as'
, self.path, 'Python Files (*.py *.pyc *.pyw)')
if self.filename:
self.savetextscript(self.filename)
os.chdir(str(self.path))
def savetextscript(self, fileName):
textout = self.script_textEdit.text()
file = QtCore.QFile(fileName)
if file.open(QtCore.QIODevice.WriteOnly):
QtCore.QTextStream(file) << textout
else:
QtWidgets.QMessageBox.information(self.wizardPage_3,
'Unable to open file', file.errorString())
os.chdir(str(self.path))
from PyQt5 import Qsci
import sys
#app2 = QtWidgets.QApplication(sys.argv)
class Ui_MainWindow(QtWidgets.QMainWindow):
ARROW_MARKER_NUM = 8
def __init__(self, parent=None):
super(Ui_MainWindow, self).__init__(parent=None)
MainWindow.setObjectName(_fromUtf8("MainWindow"))
MainWindow.resize(640, 480)
self.vindu = QtWidgets.QWidget(MainWindow)
self.vindu.setStyleSheet(_fromUtf8('notusedasyet'))
#MainWindow.setWindowFlags(QtCore.Qt.WindowStaysOnTopHint)
self.vindu.setObjectName(_fromUtf8("vindu"))
self.verticalLayout = PyQt5.QtWidgets.QVBoxLayout(self.vindu)
icon = QtGui.QIcon()
icon.addPixmap(QtGui.QPixmap(_fromUtf8(":/ico/python.png")), QtGui.QIcon.Normal, QtGui.QIcon.On)
MainWindow.setWindowIcon(icon)
self.verticalLayout.setContentsMargins(0,0,0,0)
self.verticalLayout.setSpacing(0)
self.verticalLayout.setObjectName(_fromUtf8('verticalLayout'))
self.codebox = Qsci.QsciScintilla(self.vindu)
self.codebox.setToolTip(_fromUtf8(""))
self.codebox.setWhatsThis(_fromUtf8(""))
self.codebox.setAutoFillBackground(False)
self.codebox.setFrameShape(QtWidgets.QFrame.NoFrame)
self.codebox.setObjectName(_fromUtf8("codebox"))
self.verticalLayout.addWidget(self.codebox)
MainWindow.setCentralWidget(self.vindu)
#toolbar
self.toolBar = QtWidgets.QToolBar(MainWindow)
self.toolBar.setAutoFillBackground(False)
self.toolBar.setIconSize(QtCore.QSize(32, 32))
self.toolBar.setToolButtonStyle(QtCore.Qt.ToolButtonIconOnly)
self.toolBar.setObjectName(_fromUtf8("toolBar2"))
MainWindow.addToolBar(QtCore.Qt.LeftToolBarArea, self.toolBar)
self.toolBar.addSeparator()
#toolbar2 debugger
#self.toolBar2 = QtGui.QToolBar(MainWindow)
#self.toolBar2.setAutoFillBackground(False)
#self.toolBar2.setIconSize(QtCore.QSize(32, 32))
#self.toolBar2.setToolButtonStyle(QtCore.Qt.ToolButtonIconOnly)
#self.toolBar2.setObjectName(_fromUtf8("toolBar"))
# MainWindow.addToolBar(QtCore.Qt.RightToolBarArea, self.toolBar2)
# self.toolBar2.addSeparator()
#getting ready for debugger
self.codebox.setMarginSensitivity(1, True)
self.codebox.marginClicked.connect(self.on_margin_clicked)
self.codebox.markerDefine(QsciScintilla.FullRectangle, self.ARROW_MARKER_NUM)
self.codebox.setMarkerBackgroundColor(QColor("#ee1111"), self.ARROW_MARKER_NUM)
#first action Newfile
self.toolBar.newAction = QtWidgets.QAction(QtGui.QIcon(":/ico/new.png"),"New",self.toolBar)
self.toolBar.newAction.setStatusTip("Clear TextBox or make new document.")
self.toolBar.newAction.setShortcut("Ctrl+N")
self.toolBar.newAction.triggered.connect(self.newfile)
#second Action OpenFile
self.toolBar.secondAction = QtWidgets.QAction(QtGui.QIcon(":/ico/open.png"),"Open",self.toolBar)
self.toolBar.secondAction.setStatusTip("Create a new document from scratch.")
self.toolBar.secondAction.setShortcut("Ctrl+O")
self.toolBar.secondAction.triggered.connect(self.open)
# action 3 save file
self.toolBar.Action3 = QtWidgets.QAction(QtGui.QIcon(":/ico/save.png"),"Save",self.toolBar)
self.toolBar.Action3.setStatusTip("Save Your File.")
self.toolBar.Action3.setShortcut("Ctrl+S")
self.toolBar.Action3.triggered.connect(self.savefile)
#action 4 run file
self.toolBar.Action4 = QtWidgets.QAction(QtGui.QIcon(":/ico/run32.png"),"Run",self.toolBar)
self.toolBar.Action4.setStatusTip("Run")
self.toolBar.Action4.setShortcut("Ctrl+E")
self.toolBar.Action4.triggered.connect(self.runto)
#action 21 debug
#self.toolBar2.Action21 = QtGui.QAction(QtGui.QIcon(":/ico/run32.png"),"Debug",self.toolBar)
#self.toolBar2.Action21.setStatusTip("Debug File.")
#self.toolBar2.Action21.setShortcut("Ctrl+7")
#self.toolBar2.Action21.triggered.connect(self.debugto)
#action 6 undo
self.toolBar.Action6 = QtWidgets.QAction(QtGui.QIcon(":/ico/undo.png"),"Redo",self.toolBar)
self.toolBar.Action6.setStatusTip("Undo.")
self.toolBar.Action6.setShortcut("Ctrl+Z")
self.toolBar.Action6.triggered.connect(self.codebox.undo)
#action 7 redo
self.toolBar.Action7 = QtWidgets.QAction(QtGui.QIcon(":/ico/redo.png"),"Redo",self.toolBar)
self.toolBar.Action7.setStatusTip("Redo.")
self.toolBar.Action7.setShortcut("Ctrl+Y")
self.toolBar.Action7.triggered.connect(self.codebox.redo)
#action8 rerset Folding
self.toolBar.Action8 = QtWidgets.QAction(QtGui.QIcon(":/ico/align-justify.png"),"Reset Folding",self.toolBar)
self.toolBar.Action8.setStatusTip("Reset Folding.")
self.toolBar.Action8.setShortcut("Ctrl+R")
self.toolBar.Action8.triggered.connect(self.nofoldingl)
#actions9 CircledTreeFoldStyle
self.toolBar.Action9 = QtWidgets.QAction(QtGui.QIcon(":/ico/bullet.png"),"Circled Tree Folding",self.toolBar)
self.toolBar.Action9.setStatusTip("Circled Tree Folding.")
self.toolBar.Action9.setShortcut("Ctrl+C")
self.toolBar.Action9.triggered.connect(self.Circledfold)
#actions10 plainFoldStyle
self.toolBar.Action10 = QtWidgets.QAction(QtGui.QIcon(":/ico/number.png"),"Plain Folding",self.toolBar)
self.toolBar.Action10.setStatusTip("Plain Folding")
self.toolBar.Action10.setShortcut("Ctrl+P")
self.toolBar.Action10.triggered.connect(self.plainfold)
# fonts
self.toolBar.Action21 = QtWidgets.QAction(QtGui.QIcon(":/ico4/font.png"), "Fonts", self.toolBar)
self.toolBar.Action21.setStatusTip("Fonts")
self.toolBar.Action21.setShortcut("Ctrl+F")
self.toolBar.Action21.triggered.connect(self.font_choice)
#web baby
self.toolBar.Action11 = QtWidgets.QAction(QtGui.QIcon(":/ico/web.png"),"Hex-rays Homepage",self.toolBar)
self.toolBar.Action11.setStatusTip("Home of Hex-rays")
self.toolBar.Action11.setShortcut("Ctrl+W")
self.toolBar.Action11.triggered.connect(self.webopen)
#irc
self.toolBar.Action12 = QtWidgets.QAction(QtGui.QIcon(":/ico3/settings.png"),"Open Ida Pro Python SDK",self.toolBar)
self.toolBar.Action12.setStatusTip("Ida Pro Python SDK")
self.toolBar.Action12.setShortcut("Ctrl+I")
self.toolBar.Action12.triggered.connect(self.sdkopen)
#github Python
self.toolBar.Action14 = QtWidgets.QAction(QtGui.QIcon(":/ico/github.png"),"Open git python",self.toolBar)
self.toolBar.Action14.setStatusTip("Open git python")
self.toolBar.Action14.setShortcut("Ctrl+G")
self.toolBar.Action14.triggered.connect(self.gitopen)
#auther me :)
self.toolBar.Action15 = QtWidgets.QAction(QtGui.QIcon(":/ico/auth.png"),"Author",self.toolBar)
self.toolBar.Action15.setStatusTip("Author")
self.toolBar.Action15.setShortcut("Ctrl+B")
self.toolBar.Action15.triggered.connect(self.Author)
#toggle off code regonision
self.toolBar.Action16 = QtWidgets.QAction(QtGui.QIcon(":/ico2/pythonminus.png"),"Disable Code recognition",self.toolBar)
self.toolBar.Action16.setStatusTip("Disable Code recognition")
self.toolBar.Action16.setShortcut("Alt+D")
self.toolBar.Action16.triggered.connect(self.Diablecode)
#toogle on
self.toolBar.Action17 = QtWidgets.QAction(QtGui.QIcon(":/ico2/pypluss.png"),"Enable Code recognition",self.toolBar)
self.toolBar.Action17.setStatusTip("Enable Code recognition")
self.toolBar.Action17.setShortcut("Alt+E")
self.toolBar.Action17.triggered.connect(self.Reiablecode)
# zoom in
self.toolBar.Action18 = QtWidgets.QAction(QtGui.QIcon(":/ico3/in.png"),"Zoom In",self.toolBar)
self.toolBar.Action18.setStatusTip("Zoom In")
self.toolBar.Action18.setShortcut("CTRL+SHIFT++")
self.toolBar.Action18.triggered.connect(self.udder)
#zoom out
self.toolBar.Action19 = QtWidgets.QAction(QtGui.QIcon(":/ico3/out.png"),"Zoom Out",self.toolBar)
self.toolBar.Action19.setStatusTip("Zoom Out")
self.toolBar.Action19.setShortcut("CTRL+SHIFT+-")
self.toolBar.Action19.triggered.connect(self.odder)
self.toolBar.Action20 = QtWidgets.QAction(QtGui.QIcon(":/ico3/10.png"),"Profile Code",self.toolBar)
self.toolBar.Action20.setStatusTip("Profile Code")
self.toolBar.Action20.setShortcut("CTRL+SHIFT+E")
self.toolBar.Action20.triggered.connect(self.runtoprob)
#PLUGINS HERE WE GO
self.toolBar.Action22 = QtWidgets.QAction(QtGui.QIcon(":/ico5/plugin.png"),"Plugin",self.toolBar)
self.toolBar.Action22.setStatusTip("Make plugin")
self.toolBar.Action22.setShortcut("")
self.toolBar.Action22.triggered.connect(self.plugin_make)
self.scriptfile = self.codebox.text()
self.filename = ""
#actions
self.toolBar.addAction(self.toolBar.newAction)
self.toolBar.addSeparator()
self.toolBar.addAction(self.toolBar.secondAction)
self.toolBar.addSeparator()
self.toolBar.addAction(self.toolBar.Action3)
self.toolBar.addSeparator()
self.toolBar.addAction(self.toolBar.Action4)
self.toolBar.addSeparator()
self.toolBar.addAction(self.toolBar.Action6)
self.toolBar.addSeparator()
self.toolBar.addAction(self.toolBar.Action7)
self.toolBar.addSeparator()
self.toolBar.addAction(self.toolBar.Action8)
self.toolBar.addSeparator()
self.toolBar.addAction(self.toolBar.Action9)
self.toolBar.addSeparator()
self.toolBar.addAction(self.toolBar.Action10)
self.toolBar.addSeparator()
self.toolBar.addAction(self.toolBar.Action21)
self.toolBar.addSeparator()
self.toolBar.addAction(self.toolBar.Action11)
self.toolBar.addSeparator()
self.toolBar.addAction(self.toolBar.Action12)
self.toolBar.addSeparator()
self.toolBar.addAction(self.toolBar.Action14)
self.toolBar.addSeparator()
self.toolBar.addAction(self.toolBar.Action15)
self.toolBar.addSeparator()
self.toolBar.addAction(self.toolBar.Action16)
self.toolBar.addSeparator()
self.toolBar.addAction(self.toolBar.Action17)
self.toolBar.addSeparator()
self.toolBar.addAction(self.toolBar.Action18)
self.toolBar.addSeparator()
self.toolBar.addAction(self.toolBar.Action19)
self.toolBar.addSeparator()
self.toolBar.addAction(self.toolBar.Action20)
self.toolBar.addSeparator()
self.toolBar.addAction(self.toolBar.Action21)
self.toolBar.addSeparator()
self.toolBar.addAction(self.toolBar.Action22)
self.skrift = QFont()
self.skrift.setFamily('Consolas')
self.skrift.setFixedPitch(True)
self.skrift.setPointSize(12)
self.codebox.setFont(self.skrift)
#python style
self.lexer = QsciLexerPython(self.codebox)
self.lexer.setFont(self.skrift)
self.lexer.setEolFill(True)
#api test not working
api = Qsci.QsciAPIs(self.lexer)
API_FILE = dn+'\\Python.api'
API_FILE2 = dn+'\\idc.api'
API_FILE3 = dn+'\\idaapi.api'
api.load(API_FILE)
api.load(API_FILE2)
api.load(API_FILE3)
api.prepare()
self.codebox.setAutoCompletionThreshold(0)
self.codebox.setAutoCompletionThreshold(6)
self.codebox.setAutoCompletionThreshold(8)
self.codebox.setAutoCompletionSource(Qsci.QsciScintilla.AcsAPIs)
self.lexer.setDefaultFont(self.skrift)
self.codebox.setLexer(self.lexer)
self.codebox.SendScintilla(QsciScintilla.SCI_STYLESETFONT, 1, 'Consolas')
#line numbers
fontmetrics = QFontMetrics(self.skrift)
self.codebox.setMarginsFont(self.skrift)
self.codebox.setMarginWidth(0, fontmetrics.width("00000") + 6)
self.codebox.setTabWidth(4)
#brace
self.codebox.setBraceMatching(QsciScintilla.SloppyBraceMatch)
#auto line tab =4
self.codebox.setAutoIndent(True)
#scroolbar
self.codebox.SendScintilla(QsciScintilla.SCI_SETHSCROLLBAR, 1)
self.retranslateUi(MainWindow)
QtCore.QMetaObject.connectSlotsByName(MainWindow)
def retranslateUi(self, MainWindow):
MainWindow.setWindowTitle(_translate("MainWindow", "Ida Pro Python Script Editor", None))
self.toolBar.setWindowTitle(_translate("MainWindow", "toolBar", None))
def plugin_make(self):
Wizard.show()
def sendgrapped(self):
print "hello"
helloclass = Ui_Wizard()
self.bsout = self.codebox.text()
helloclass.script_textEdit.setText(self.bsout)
def hubba(self):
print "sdfgsdgsgdghsghdg"
#print str(self.codebox.text())
def udder(self):
self.codebox.zoomIn()
def odder(self):
self.codebox.zoomOut()
def newfile(self):
self.codebox.clear()
def open(self):
self.path = QtCore.QFileInfo(self.filename).path()
# Get filename and show only .writer files
(self.filename, _) = \
QtWidgets.QFileDialog.getOpenFileName(self.vindu,
'Open File', self.path,
'Python Files (*.py *.pyc *.pyw)', '')
if self.filename:
with open(self.filename, 'r') as self.file:
self.codebox.setText(self.file.read())
os.chdir(str(self.path))
def savefile(self):
self.path = QtCore.QFileInfo(self.filename).path()
(self.filename, _) = \
QtWidgets.QFileDialog.getSaveFileName(self.vindu, 'Save as'
, self.path, 'Python Files (*.py *.pyc *.pyw)')
if self.filename:
self.savetext(self.filename)
os.chdir(str(self.path))
def savetext(self, fileName):
textout = self.codebox.text()
file = QtCore.QFile(fileName)
if file.open(QtCore.QIODevice.WriteOnly):
QtCore.QTextStream(file) << textout
else:
QtWidgets.QMessageBox.information(self.vindu,
'Unable to open file', file.errorString())
os.chdir(str(self.path))
def runto(self):
self.path = QtCore.QFileInfo(self.filename).path()
g = globals()
os.chdir(str(self.path))
script = str(self.codebox.text())
try:
os.chdir(str(self.path))
os.path.join(os.path.expanduser('~'), os.path.expandvars(str(self.path)))
sys.path.insert(0, str(self.path))
exec (script, g)
except Exception as e:
print e.__doc__
print e.message
else:
pass
#exec (script, g)
def runtoprob(self):
try:
self.path = QtCore.QFileInfo(self.filename).path()
self.path = QtCore.QFileInfo(self.filename).path()
g = globals()
os.chdir(str(self.path))
script = str(self.codebox.text())
import cProfile
cProfile.run(script)
except Exception as e:
print e.__doc__
print e.message
else:
import cProfile
cProfile.run(script)
def Diablecode(self):
self.codebox.setAutoCompletionSource(Qsci.QsciScintilla.AcsNone)
def Reiablecode(self):
self.codebox.setAutoCompletionSource(Qsci.QsciScintilla.AcsAPIs)
def nofoldingl(self):
self.codebox.setFolding(QsciScintilla.NoFoldStyle)
def Circledfold(self):
self.codebox.setFolding(QsciScintilla.CircledTreeFoldStyle)
def plainfold(self):
self.codebox.setFolding(QsciScintilla.PlainFoldStyle)
def webopen(self):
import webbrowser
webbrowser.open('https://www.hex-rays.com/')
def sdkopen(self):
import webbrowser
webbrowser.open('https://www.hex-rays.com/products/ida/support/idapython_docs/')
def gitopen(self):
import webbrowser
webbrowser.open('https://github.com/idapython/src/tree/build-1.7.2')
def Author(self):
import webbrowser
webbrowser.open('https://github.com/techbliss')
def font_choice(self):
self.lbl = self.lexer
font, ok = QtWidgets.QFontDialog.getFont()
if ok:
self.lbl.setFont(font)
def on_margin_clicked(self, nmargin, nline, modifiers):
# Toggle marker for the line the margin was clicked on
if self.codebox.markersAtLine(nline) != 0:
self.codebox.markerDelete(nline, self.ARROW_MARKER_NUM)
else:
self.codebox.markerAdd(nline, self.ARROW_MARKER_NUM)
class MyWindow(QtWidgets.QMainWindow):
'''
we have to ask user for quiting so we can change back to root dir
'''
def closeEvent(self, event):
reply = QMessageBox.question(self, 'Exit',
"Are you sure to quit?", QMessageBox.Yes |
QMessageBox.No, QMessageBox.No)
if reply == QMessageBox.Yes:
# print dn
os.chdir(dn)
# print dn
#os.chdir('../..')
# print dn
print '''
###################################################
# Author Storm Shadow #
# #
# Follow me on twitter #
# @zadow28 #
###################################################
# Ida pro python Editor #
###################################################
'''
event.accept()
os.chdir(dn)
else:
event.ignore()
os.chdir(dn)
from PyQt5 import Qsci
if __name__ == '__main__':
import sys
Wizard = QtWidgets.QWizard()
#Wizard = QtWidgets.QWizard()
#app = QtWidgets.QApplication.instance() # enable for usage outside
#if not app: # enable for usage outside
# app = QtWidgets.QApplication([]) # enable for usage outside
MainWindow = MyWindow()
ui = Ui_MainWindow()
messageformForm = QtWidgets.QWidget()
ui2 = Ui_Wizard()
ui3 = Ui_messageformForm()
ui3.setupUi1(messageformForm)
MainWindow.resize(1000, 600)
MainWindow.show()
# app.exec_()
| unlicense | -4,438,855,489,847,117,000 | 42.621421 | 338 | 0.644755 | false |
tensorflow/tfx | tfx/orchestration/portable/cache_utils_test.py | 1 | 11730 | # Copyright 2020 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for tfx.orchestration.portable.cache_utils."""
import os
import tensorflow as tf
from tfx.dsl.io import fileio
from tfx.orchestration import metadata
from tfx.orchestration.portable import cache_utils
from tfx.orchestration.portable import execution_publish_utils
from tfx.orchestration.portable.mlmd import context_lib
from tfx.proto.orchestration import executable_spec_pb2
from tfx.proto.orchestration import pipeline_pb2
from tfx.types import standard_artifacts
from tfx.utils import test_case_utils
from google.protobuf import text_format
from ml_metadata.proto import metadata_store_pb2
class CacheUtilsTest(test_case_utils.TfxTest):
def setUp(self):
super().setUp()
self._connection_config = metadata_store_pb2.ConnectionConfig()
self._connection_config.sqlite.SetInParent()
self._module_file_path = os.path.join(self.tmp_dir, 'module_file')
self._input_artifacts = {'input_examples': [standard_artifacts.Examples()]}
self._output_artifacts = {'output_models': [standard_artifacts.Model()]}
self._parameters = {'module_file': self._module_file_path}
self._module_file_content = 'module content'
self._pipeline_node = text_format.Parse(
"""
node_info {
id: "my_id"
}
""", pipeline_pb2.PipelineNode())
self._pipeline_info = pipeline_pb2.PipelineInfo(id='pipeline_id')
self._executor_spec = text_format.Parse(
"""
class_path: "my.class.path"
""", executable_spec_pb2.PythonClassExecutableSpec())
def _get_cache_context(self,
metadata_handler,
custom_pipeline_node=None,
custom_pipeline_info=None,
executor_spec=None,
custom_input_artifacts=None,
custom_output_artifacts=None,
custom_parameters=None,
custom_module_content=None):
with fileio.open(self._module_file_path, 'w+') as f:
f.write(custom_module_content or self._module_file_content)
return cache_utils.get_cache_context(
metadata_handler,
custom_pipeline_node or self._pipeline_node,
custom_pipeline_info or self._pipeline_info,
executor_spec=(executor_spec or self._executor_spec),
input_artifacts=(custom_input_artifacts or self._input_artifacts),
output_artifacts=(custom_output_artifacts or self._output_artifacts),
parameters=(custom_parameters or self._parameters))
def testGetCacheContext(self):
with metadata.Metadata(connection_config=self._connection_config) as m:
cache_context = self._get_cache_context(m)
[context_from_mlmd] = m.store.get_contexts()
self.assertProtoPartiallyEquals(
cache_context,
context_from_mlmd,
ignored_fields=[
'create_time_since_epoch', 'last_update_time_since_epoch'
])
def testGetCacheContextTwiceSameArgs(self):
with metadata.Metadata(connection_config=self._connection_config) as m:
self._get_cache_context(m)
self._get_cache_context(m)
# Same args should not create a new cache context.
self.assertLen(m.store.get_contexts(), 1)
def testGetCacheContextTwiceDifferentOutputUri(self):
with metadata.Metadata(connection_config=self._connection_config) as m:
self._get_cache_context(m)
output_model_different_uri = standard_artifacts.Model()
output_model_different_uri.uri = 'diff_uri'
self._get_cache_context(
m,
custom_output_artifacts={
'output_models': [output_model_different_uri]
})
# Only different output uri should not create a new cache context.
self.assertLen(m.store.get_contexts(), 1)
def testGetCacheContextTwiceDifferentOutputs(self):
with metadata.Metadata(connection_config=self._connection_config) as m:
self._get_cache_context(m)
self._get_cache_context(
m, custom_output_artifacts={'k': [standard_artifacts.Model()]})
# Different output skeleton will result in a new cache context.
self.assertLen(m.store.get_contexts(), 2)
def testGetCacheContextTwiceDifferentInputs(self):
with metadata.Metadata(connection_config=self._connection_config) as m:
self._get_cache_context(m)
self._get_cache_context(
m, custom_input_artifacts={'k': [standard_artifacts.Examples(),]})
# Different input artifacts will result in new cache context.
self.assertLen(m.store.get_contexts(), 2)
def testGetCacheContextTwiceDifferentParameters(self):
with metadata.Metadata(connection_config=self._connection_config) as m:
self._get_cache_context(m)
self._get_cache_context(m, custom_parameters={'new_prop': 'value'})
# Different parameters will result in new cache context.
self.assertLen(m.store.get_contexts(), 2)
def testGetCacheContextTwiceDifferentModuleContent(self):
with metadata.Metadata(connection_config=self._connection_config) as m:
self._get_cache_context(m)
self._get_cache_context(m, custom_module_content='new module content')
# Different module file content will result in new cache context.
self.assertLen(m.store.get_contexts(), 2)
def testGetCacheContextTwiceDifferentPipelineInfo(self):
with metadata.Metadata(connection_config=self._connection_config) as m:
self._get_cache_context(m)
self._get_cache_context(
m, custom_pipeline_info=pipeline_pb2.PipelineInfo(id='new_id'))
# Different pipeline info will result in new cache context.
self.assertLen(m.store.get_contexts(), 2)
def testGetCacheContextTwiceDifferentNodeInfo(self):
with metadata.Metadata(connection_config=self._connection_config) as m:
self._get_cache_context(m)
self._get_cache_context(
m,
custom_pipeline_node=text_format.Parse(
"""
node_info {
id: "new_node_id"
}
""", pipeline_pb2.PipelineNode()))
# Different executor spec will result in new cache context.
self.assertLen(m.store.get_contexts(), 2)
def testGetCacheContextTwiceDifferentExecutorSpec(self):
with metadata.Metadata(connection_config=self._connection_config) as m:
self._get_cache_context(m)
self._get_cache_context(
m,
executor_spec=text_format.Parse(
"""
class_path: "new.class.path"
""", executable_spec_pb2.PythonClassExecutableSpec()))
# Different executor spec will result in new cache context.
self.assertLen(m.store.get_contexts(), 2)
def testGetCachedOutputArtifacts(self):
# Output artifacts that will be used by the first execution with the same
# cache key.
output_model_one = standard_artifacts.Model()
output_model_one.uri = 'model_one'
output_model_two = standard_artifacts.Model()
output_model_two.uri = 'model_two'
output_example_one = standard_artifacts.Examples()
output_example_one.uri = 'example_one'
# Output artifacts that will be used by the second execution with the same
# cache key.
output_model_three = standard_artifacts.Model()
output_model_three.uri = 'model_three'
output_model_four = standard_artifacts.Model()
output_model_four.uri = 'model_four'
output_example_two = standard_artifacts.Examples()
output_example_two.uri = 'example_two'
output_models_key = 'output_models'
output_examples_key = 'output_examples'
with metadata.Metadata(connection_config=self._connection_config) as m:
cache_context = context_lib.register_context_if_not_exists(
m, context_lib.CONTEXT_TYPE_EXECUTION_CACHE, 'cache_key')
cached_output = cache_utils.get_cached_outputs(m, cache_context)
# No succeed execution is associate with this context yet, so the cached
# output is None
self.assertIsNone(cached_output)
execution_one = execution_publish_utils.register_execution(
m, metadata_store_pb2.ExecutionType(name='my_type'), [cache_context])
execution_publish_utils.publish_succeeded_execution(
m,
execution_one.id, [cache_context],
output_artifacts={
output_models_key: [output_model_one, output_model_two],
output_examples_key: [output_example_one]
})
execution_two = execution_publish_utils.register_execution(
m, metadata_store_pb2.ExecutionType(name='my_type'), [cache_context])
output_artifacts = execution_publish_utils.publish_succeeded_execution(
m,
execution_two.id, [cache_context],
output_artifacts={
output_models_key: [output_model_three, output_model_four],
output_examples_key: [output_example_two]
})
# The cached output got should be the artifacts produced by the most
# recent execution under the given cache context.
cached_output = cache_utils.get_cached_outputs(m, cache_context)
self.assertLen(cached_output, 2)
self.assertLen(cached_output[output_models_key], 2)
self.assertLen(cached_output[output_examples_key], 1)
self.assertProtoPartiallyEquals(
cached_output[output_models_key][0].mlmd_artifact,
output_artifacts[output_models_key][0].mlmd_artifact,
ignored_fields=[
'create_time_since_epoch', 'last_update_time_since_epoch'
])
self.assertProtoPartiallyEquals(
cached_output[output_models_key][1].mlmd_artifact,
output_artifacts[output_models_key][1].mlmd_artifact,
ignored_fields=[
'create_time_since_epoch', 'last_update_time_since_epoch'
])
self.assertProtoPartiallyEquals(
cached_output[output_examples_key][0].mlmd_artifact,
output_artifacts[output_examples_key][0].mlmd_artifact,
ignored_fields=[
'create_time_since_epoch', 'last_update_time_since_epoch'
])
def testGetCachedOutputArtifactsForNodesWithNoOuput(self):
with metadata.Metadata(connection_config=self._connection_config) as m:
cache_context = context_lib.register_context_if_not_exists(
m, context_lib.CONTEXT_TYPE_EXECUTION_CACHE, 'cache_key')
cached_output = cache_utils.get_cached_outputs(m, cache_context)
# No succeed execution is associate with this context yet, so the cached
# output is None.
self.assertIsNone(cached_output)
execution_one = execution_publish_utils.register_execution(
m, metadata_store_pb2.ExecutionType(name='my_type'), [cache_context])
execution_publish_utils.publish_succeeded_execution(
m,
execution_one.id, [cache_context])
cached_output = cache_utils.get_cached_outputs(m, cache_context)
# A succeed execution is associate with this context, so the cached
# output is not None but an empty dict.
self.assertIsNotNone(cached_output)
self.assertEmpty(cached_output)
if __name__ == '__main__':
tf.test.main()
| apache-2.0 | -1,340,563,954,559,667,500 | 44.289575 | 79 | 0.675959 | false |
rentlytics/django-zerodowntime | zerodowntime/management/commands/install_git_hooks.py | 1 | 1517 | import os
import stat
from django.core.management import BaseCommand
COMMIT_MSG_HOOK = """
# BEGIN ZERODOWNTIME_COMMIT_MSG_HOOK
commit_regex='(\[allow\-unsafe\-migrations]|merge)'
if ! grep -iqE "$commit_regex" "$1"; then
source ./venv/bin/activate
./manage.py check_migrations
migration_check=$?
if [ $migration_check != 0 ]; then
echo "Aborting commit, caused by migrations incompatible with ZDCD." >&2
echo "To skip this check you can add '[allow-unsafe-migrations]' to your commit message." >&2
exit $migration_check
fi;
fi;
# END ZERODOWNTIME_COMMIT_MSG_HOOK
"""
class Command(BaseCommand):
help = 'Installs a git commit-msg hook which will ' \
'execute `./manage.py check_migrations` unless ' \
'the commit message contains "[allow-unsafe-migrations]"'
HOOK_PATH = '.git/hooks/'
def handle(self, *args, **options):
commit_msg_path = os.path.join(self.HOOK_PATH, 'commit-msg')
hook_exists = os.path.exists(commit_msg_path)
if hook_exists:
with open(commit_msg_path, 'r') as fp:
hook_content = fp.read()
else:
hook_content = '#!/usr/bin/env bash\n\n'
if 'ZERODOWNTIME_COMMIT_MSG_HOOK' not in hook_content:
hook_content += COMMIT_MSG_HOOK
with open(commit_msg_path, 'w') as fp:
fp.write(hook_content)
st = os.stat(commit_msg_path)
os.chmod(commit_msg_path, st.st_mode | stat.S_IEXEC)
| isc | -4,212,104,457,324,079,600 | 28.745098 | 99 | 0.616348 | false |
RazerM/pg_grant | tests/conftest.py | 1 | 2475 | from pathlib import Path
import pytest
import testing.postgresql
from sqlalchemy import create_engine, text
from sqlalchemy.engine.url import make_url
from testcontainers.postgres import PostgresContainer as _PostgresContainer
tests_dir = Path(__file__).parents[0].resolve()
test_schema_file = Path(tests_dir, 'data', 'test-schema.sql')
SUPERUSER_NAME = 'alice'
DB_NAME = 'db1'
Postgresql = testing.postgresql.PostgresqlFactory(
initdb_args='-U postgres -A trust',
database=DB_NAME,
)
class PostgresContainer(_PostgresContainer):
POSTGRES_USER = 'postgres'
POSTGRES_DB = DB_NAME
def pytest_addoption(parser):
parser.addoption(
'--no-container', action='store_true',
help='Use temporary PostgreSQL cluster without a container.')
def pytest_runtest_setup(item):
if 'nocontainer' in item.keywords and not item.config.getoption('--no-container'):
pytest.skip('Use --no-container to execute this test.')
@pytest.fixture(scope='session')
def postgres_url(request):
no_container = request.config.getoption("--no-container")
if no_container:
postgresql = Postgresql()
# Use superuser to create new superuser, then yield new connection URL
url = make_url(postgresql.url())
engine = create_engine(url)
engine.execute('CREATE ROLE {} WITH SUPERUSER LOGIN'.format(SUPERUSER_NAME))
engine.dispose()
url.username = SUPERUSER_NAME
yield str(url)
else:
postgres_container = PostgresContainer("postgres:latest")
with postgres_container as postgres:
# Use superuser to create new superuser, then yield new connection URL
url = make_url(postgres.get_connection_url())
engine = create_engine(url)
engine.execute(
text(
'CREATE ROLE {} WITH SUPERUSER LOGIN PASSWORD '
':password'.format(SUPERUSER_NAME)
),
password=postgres_container.POSTGRES_PASSWORD,
)
engine.dispose()
url.username = SUPERUSER_NAME
yield str(url)
@pytest.fixture(scope='session')
def engine(postgres_url):
return create_engine(postgres_url)
@pytest.fixture(scope='session')
def pg_schema(engine):
with test_schema_file.open() as fp:
engine.execute(fp.read())
@pytest.fixture
def connection(engine, pg_schema):
with engine.connect() as conn:
yield conn
| mit | 4,059,386,316,065,879,600 | 28.117647 | 86 | 0.657778 | false |
tensorflow/addons | tensorflow_addons/image/tests/transform_ops_test.py | 1 | 16494 | # Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for transform ops."""
from distutils.version import LooseVersion
import pytest
import numpy as np
import tensorflow as tf
from skimage import transform
from tensorflow_addons.image import transform_ops
from tensorflow_addons.utils import test_utils
_DTYPES = {
tf.dtypes.uint8,
tf.dtypes.int32,
tf.dtypes.int64,
tf.dtypes.float16,
tf.dtypes.float32,
tf.dtypes.float64,
}
@pytest.mark.with_device(["cpu", "gpu"])
@pytest.mark.usefixtures("maybe_run_functions_eagerly")
@pytest.mark.parametrize("dtype", _DTYPES)
def test_compose(dtype):
image = tf.constant(
[[1, 1, 1, 0], [1, 0, 0, 0], [1, 1, 1, 0], [0, 0, 0, 0]], dtype=dtype
)
# Rotate counter-clockwise by pi / 2.
rotation = transform_ops.angles_to_projective_transforms(np.pi / 2, 4, 4)
# Translate right by 1 (the transformation matrix is always inverted,
# hence the -1).
translation = tf.constant([1, 0, -1, 0, 1, 0, 0, 0], dtype=tf.dtypes.float32)
composed = transform_ops.compose_transforms([rotation, translation])
image_transformed = transform_ops.transform(image, composed)
np.testing.assert_equal(
[[0, 0, 0, 0], [0, 1, 0, 1], [0, 1, 0, 1], [0, 1, 1, 1]],
image_transformed.numpy(),
)
@pytest.mark.with_device(["cpu", "gpu"])
@pytest.mark.usefixtures("maybe_run_functions_eagerly")
@pytest.mark.parametrize("dtype", _DTYPES)
def test_extreme_projective_transform(dtype):
image = tf.constant(
[[1, 0, 1, 0], [0, 1, 0, 1], [1, 0, 1, 0], [0, 1, 0, 1]], dtype=dtype
)
transformation = tf.constant([1, 0, 0, 0, 1, 0, -1, 0], tf.dtypes.float32)
image_transformed = transform_ops.transform(image, transformation)
np.testing.assert_equal(
[[1, 0, 0, 0], [0, 0, 0, 0], [1, 0, 0, 0], [0, 0, 0, 0]],
image_transformed.numpy(),
)
@pytest.mark.with_device(["cpu", "gpu"])
@pytest.mark.usefixtures("maybe_run_functions_eagerly")
@pytest.mark.parametrize("dtype", _DTYPES)
@pytest.mark.parametrize("fill_value", [0.0, 1.0])
def test_transform_constant_fill_mode(dtype, fill_value):
if fill_value != 0.0 and LooseVersion(tf.__version__) < LooseVersion("2.4.0"):
pytest.skip("Nonzero fill_value is not supported for TensorFlow < 2.4.0.")
image = tf.constant(
[[0, 1, 2, 3], [4, 5, 6, 7], [8, 9, 10, 11], [12, 13, 14, 15]], dtype=dtype
)
expected = np.asarray(
[
[fill_value, 0, 1, 2],
[fill_value, 4, 5, 6],
[fill_value, 8, 9, 10],
[fill_value, 12, 13, 14],
],
dtype=dtype.as_numpy_dtype,
)
# Translate right by 1 (the transformation matrix is always inverted,
# hence the -1).
translation = tf.constant([1, 0, -1, 0, 1, 0, 0, 0], dtype=tf.float32)
image_transformed = transform_ops.transform(
image,
translation,
fill_mode="constant",
fill_value=fill_value,
)
np.testing.assert_equal(image_transformed.numpy(), expected)
@pytest.mark.with_device(["cpu", "gpu"])
@pytest.mark.usefixtures("maybe_run_functions_eagerly")
@pytest.mark.parametrize("dtype", _DTYPES)
def test_transform_reflect_fill_mode(dtype):
image = tf.constant(
[[0, 1, 2, 3], [4, 5, 6, 7], [8, 9, 10, 11], [12, 13, 14, 15]], dtype=dtype
)
expected = np.asarray(
[[0, 0, 1, 2], [4, 4, 5, 6], [8, 8, 9, 10], [12, 12, 13, 14]],
dtype=dtype.as_numpy_dtype,
)
# Translate right by 1 (the transformation matrix is always inverted,
# hence the -1).
translation = tf.constant([1, 0, -1, 0, 1, 0, 0, 0], dtype=tf.float32)
image_transformed = transform_ops.transform(image, translation, fill_mode="reflect")
np.testing.assert_equal(image_transformed.numpy(), expected)
@pytest.mark.with_device(["cpu", "gpu"])
@pytest.mark.usefixtures("maybe_run_functions_eagerly")
@pytest.mark.parametrize("dtype", _DTYPES)
def test_transform_wrap_fill_mode(dtype):
image = tf.constant(
[[0, 1, 2, 3], [4, 5, 6, 7], [8, 9, 10, 11], [12, 13, 14, 15]], dtype=dtype
)
expected = np.asarray(
[[3, 0, 1, 2], [7, 4, 5, 6], [11, 8, 9, 10], [15, 12, 13, 14]],
dtype=dtype.as_numpy_dtype,
)
# Translate right by 1 (the transformation matrix is always inverted,
# hence the -1).
translation = tf.constant([1, 0, -1, 0, 1, 0, 0, 0], dtype=tf.float32)
image_transformed = transform_ops.transform(image, translation, fill_mode="wrap")
np.testing.assert_equal(image_transformed.numpy(), expected)
@pytest.mark.skipif(
LooseVersion(tf.__version__) < LooseVersion("2.4.0"),
reason="NEAREST fill mode is not supported for TensorFlow < 2.4.0.",
)
@pytest.mark.with_device(["cpu", "gpu"])
@pytest.mark.usefixtures("maybe_run_functions_eagerly")
@pytest.mark.parametrize("dtype", _DTYPES)
def test_transform_nearest_fill_mode(dtype):
image = tf.constant(
[[0, 1, 2, 3], [4, 5, 6, 7], [8, 9, 10, 11], [12, 13, 14, 15]], dtype=dtype
)
expected = np.asarray(
[[0, 0, 0, 1], [4, 4, 4, 5], [8, 8, 8, 9], [12, 12, 12, 13]],
dtype=dtype.as_numpy_dtype,
)
# Translate right by 2 (the transformation matrix is always inverted,
# hence the -2).
translation = tf.constant([1, 0, -2, 0, 1, 0, 0, 0], dtype=tf.float32)
image_transformed = transform_ops.transform(image, translation, fill_mode="nearest")
np.testing.assert_equal(image_transformed.numpy(), expected)
@pytest.mark.usefixtures("maybe_run_functions_eagerly")
def test_transform_static_output_shape():
image = tf.constant([[1.0, 2.0], [3.0, 4.0]])
result = transform_ops.transform(
image, tf.random.uniform([8], -1, 1), output_shape=[3, 5]
)
np.testing.assert_equal([3, 5], result.shape)
@pytest.mark.usefixtures("maybe_run_functions_eagerly")
def test_transform_unknown_shape():
fn = tf.function(transform_ops.transform).get_concrete_function(
tf.TensorSpec(shape=None, dtype=tf.float32), [1, 0, 0, 0, 1, 0, 0, 0]
)
for shape in (2, 4), (2, 4, 3), (1, 2, 4, 3):
image = tf.ones(shape=shape)
np.testing.assert_equal(image.numpy(), fn(image).numpy())
@pytest.mark.usefixtures("maybe_run_functions_eagerly")
def _test_grad(input_shape, output_shape=None):
image_size = tf.math.cumprod(input_shape)[-1]
image_size = tf.cast(image_size, tf.float32)
test_image = tf.reshape(tf.range(0, image_size, dtype=tf.float32), input_shape)
# Scale test image to range [0, 0.01]
test_image = (test_image / image_size) * 0.01
def transform_fn(x):
x.set_shape(input_shape)
transform = transform_ops.angles_to_projective_transforms(np.pi / 2, 4, 4)
return transform_ops.transform(
images=x, transforms=transform, output_shape=output_shape
)
theoretical, numerical = tf.test.compute_gradient(transform_fn, [test_image])
np.testing.assert_almost_equal(theoretical[0], numerical[0], decimal=6)
@pytest.mark.usefixtures("maybe_run_functions_eagerly")
def test_grad():
_test_grad([8, 8])
_test_grad([8, 8], [4, 4])
@pytest.mark.usefixtures("maybe_run_functions_eagerly")
@pytest.mark.parametrize("dtype", _DTYPES)
def test_transform_data_types(dtype):
image = tf.constant([[1, 2], [3, 4]], dtype=dtype)
np.testing.assert_equal(
np.array([[4, 4], [4, 4]]).astype(dtype.as_numpy_dtype),
transform_ops.transform(image, [1] * 8),
)
@pytest.mark.usefixtures("maybe_run_functions_eagerly")
def test_transform_eager():
image = tf.constant([[1.0, 2.0], [3.0, 4.0]])
np.testing.assert_equal(
np.array([[4, 4], [4, 4]]), transform_ops.transform(image, [1] * 8)
)
@pytest.mark.usefixtures("maybe_run_functions_eagerly")
@pytest.mark.parametrize("dtype", _DTYPES)
def test_zeros(dtype):
for shape in [(5, 5), (24, 24), (2, 24, 24, 3)]:
for angle in [0, 1, np.pi / 2.0]:
image = tf.zeros(shape, dtype)
np.testing.assert_equal(
transform_ops.rotate(image, angle),
np.zeros(shape, dtype.as_numpy_dtype),
)
@pytest.mark.usefixtures("maybe_run_functions_eagerly")
@pytest.mark.parametrize("dtype", _DTYPES)
def test_rotate_even(dtype):
image = tf.reshape(tf.cast(tf.range(36), dtype), (6, 6))
image_rep = tf.tile(image[None, :, :, None], [3, 1, 1, 1])
angles = tf.constant([0.0, np.pi / 4.0, np.pi / 2.0], tf.float32)
image_rotated = transform_ops.rotate(image_rep, angles)
np.testing.assert_equal(
image_rotated.numpy()[:, :, :, 0],
[
[
[0, 1, 2, 3, 4, 5],
[6, 7, 8, 9, 10, 11],
[12, 13, 14, 15, 16, 17],
[18, 19, 20, 21, 22, 23],
[24, 25, 26, 27, 28, 29],
[30, 31, 32, 33, 34, 35],
],
[
[0, 3, 4, 11, 17, 0],
[2, 3, 9, 16, 23, 23],
[1, 8, 15, 21, 22, 29],
[6, 13, 20, 21, 27, 34],
[12, 18, 19, 26, 33, 33],
[0, 18, 24, 31, 32, 0],
],
[
[5, 11, 17, 23, 29, 35],
[4, 10, 16, 22, 28, 34],
[3, 9, 15, 21, 27, 33],
[2, 8, 14, 20, 26, 32],
[1, 7, 13, 19, 25, 31],
[0, 6, 12, 18, 24, 30],
],
],
)
@pytest.mark.usefixtures("maybe_run_functions_eagerly")
@pytest.mark.parametrize("dtype", _DTYPES)
def test_rotate_odd(dtype):
image = tf.reshape(tf.cast(tf.range(25), dtype), (5, 5))
image_rep = tf.tile(image[None, :, :, None], [3, 1, 1, 1])
angles = tf.constant([np.pi / 4.0, 1.0, -np.pi / 2.0], tf.float32)
image_rotated = transform_ops.rotate(image_rep, angles)
np.testing.assert_equal(
image_rotated.numpy()[:, :, :, 0],
[
[
[0, 3, 8, 9, 0],
[1, 7, 8, 13, 19],
[6, 6, 12, 18, 18],
[5, 11, 16, 17, 23],
[0, 15, 16, 21, 0],
],
[
[0, 3, 9, 14, 0],
[2, 7, 8, 13, 19],
[1, 6, 12, 18, 23],
[5, 11, 16, 17, 22],
[0, 10, 15, 21, 0],
],
[
[20, 15, 10, 5, 0],
[21, 16, 11, 6, 1],
[22, 17, 12, 7, 2],
[23, 18, 13, 8, 3],
[24, 19, 14, 9, 4],
],
],
)
@pytest.mark.usefixtures("maybe_run_functions_eagerly")
@pytest.mark.parametrize("dtype", _DTYPES)
def test_compose_rotate(dtype):
image = tf.constant(
[[1, 1, 1, 0], [1, 0, 0, 0], [1, 1, 1, 0], [0, 0, 0, 0]], dtype=dtype
)
# Rotate counter-clockwise by pi / 2.
rotation = transform_ops.angles_to_projective_transforms(np.pi / 2, 4, 4)
# Translate right by 1 (the transformation matrix is always inverted,
# hence the -1).
translation = tf.constant([1, 0, -1, 0, 1, 0, 0, 0], dtype=tf.float32)
composed = transform_ops.compose_transforms([rotation, translation])
image_transformed = transform_ops.transform(image, composed)
np.testing.assert_equal(
image_transformed.numpy(),
[[0, 0, 0, 0], [0, 1, 0, 1], [0, 1, 0, 1], [0, 1, 1, 1]],
)
@pytest.mark.usefixtures("maybe_run_functions_eagerly")
def test_bilinear():
image = tf.constant(
[
[0, 0, 0, 0, 0],
[0, 1, 1, 1, 0],
[0, 1, 0, 1, 0],
[0, 1, 1, 1, 0],
[0, 0, 0, 0, 0],
],
tf.float32,
)
# The following result matches:
# >>> scipy.ndimage.rotate(image, 45, order=1, reshape=False)
# which uses spline interpolation of order 1, equivalent to bilinear
# interpolation.
transformed = transform_ops.rotate(image, np.pi / 4.0, interpolation="BILINEAR")
np.testing.assert_allclose(
transformed.numpy(),
[
[0.000, 0.000, 0.343, 0.000, 0.000],
[0.000, 0.586, 0.914, 0.586, 0.000],
[0.343, 0.914, 0.000, 0.914, 0.343],
[0.000, 0.586, 0.914, 0.586, 0.000],
[0.000, 0.000, 0.343, 0.000, 0.000],
],
atol=0.001,
)
transformed = transform_ops.rotate(image, np.pi / 4.0, interpolation="NEAREST")
np.testing.assert_allclose(
transformed.numpy(),
[
[0, 0, 1, 0, 0],
[0, 1, 1, 1, 0],
[1, 1, 0, 1, 1],
[0, 1, 1, 1, 0],
[0, 0, 1, 0, 0],
],
)
@pytest.mark.usefixtures("maybe_run_functions_eagerly")
def test_bilinear_uint8():
image = tf.constant(
np.asarray(
[
[0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 255, 255, 255, 0.0],
[0.0, 255, 0.0, 255, 0.0],
[0.0, 255, 255, 255, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0],
],
np.uint8,
),
tf.uint8,
)
# == np.rint((expected image above) * 255)
transformed = transform_ops.rotate(image, np.pi / 4.0, interpolation="BILINEAR")
np.testing.assert_equal(
transformed.numpy(),
[
[0.0, 0.0, 87.0, 0.0, 0.0],
[0.0, 149, 233, 149, 0.0],
[87.0, 233, 0.0, 233, 87.0],
[0.0, 149, 233, 149, 0.0],
[0.0, 0.0, 87.0, 0.0, 0.0],
],
)
@pytest.mark.usefixtures("maybe_run_functions_eagerly")
def test_rotate_static_shape():
image = tf.linalg.diag([1.0, 2.0, 3.0])
result = transform_ops.rotate(
image, tf.random.uniform((), -1, 1), interpolation="BILINEAR"
)
np.testing.assert_equal(image.get_shape(), result.get_shape())
@pytest.mark.usefixtures("maybe_run_functions_eagerly")
def test_unknown_shape():
fn = tf.function(transform_ops.rotate).get_concrete_function(
tf.TensorSpec(shape=None, dtype=tf.float32), 0
)
for shape in (2, 4), (2, 4, 3), (1, 2, 4, 3):
image = tf.ones(shape=shape)
np.testing.assert_equal(image.numpy(), fn(image).numpy())
@pytest.mark.usefixtures("maybe_run_functions_eagerly")
@pytest.mark.parametrize("dtype", _DTYPES - {tf.dtypes.float16})
def test_shear_x(dtype):
image = np.random.randint(low=0, high=255, size=(4, 4, 3)).astype(
dtype.as_numpy_dtype
)
color = tf.constant([255, 0, 255], tf.int32)
level = tf.random.uniform(shape=(), minval=0, maxval=1)
tf_image = tf.constant(image)
sheared_img = transform_ops.shear_x(tf_image, level, replace=color)
transform_matrix = transform.AffineTransform(
np.array([[1, level.numpy(), 0], [0, 1, 0], [0, 0, 1]])
)
expected_img = transform.warp(
image, transform_matrix, order=0, cval=-1, preserve_range=True
)
mask = np.where(expected_img == -1)
expected_img[mask[0], mask[1], :] = color
np.testing.assert_equal(sheared_img.numpy(), expected_img)
@pytest.mark.usefixtures("maybe_run_functions_eagerly")
@pytest.mark.parametrize("dtype", _DTYPES - {tf.dtypes.float16})
def test_shear_y(dtype):
image = np.random.randint(low=0, high=255, size=(4, 4, 3)).astype(
dtype.as_numpy_dtype
)
color = tf.constant([255, 0, 255], tf.int32)
level = tf.random.uniform(shape=(), minval=0, maxval=1)
tf_image = tf.constant(image)
sheared_img = transform_ops.shear_y(image=tf_image, level=level, replace=color)
transform_matrix = transform.AffineTransform(
np.array([[1, 0, 0], [level.numpy(), 1, 0], [0, 0, 1]])
)
expected_img = transform.warp(
image, transform_matrix, order=0, cval=-1, preserve_range=True
)
mask = np.where(expected_img == -1)
expected_img[mask[0], mask[1], :] = color
test_utils.assert_allclose_according_to_type(sheared_img.numpy(), expected_img)
| apache-2.0 | -1,881,526,463,542,878,700 | 34.547414 | 88 | 0.56566 | false |
hryamzik/ansible | lib/ansible/module_utils/network/cnos/cnos_devicerules.py | 1 | 91032 | # This code is part of Ansible, but is an independent component.
# This particular file snippet, and this file snippet only, is BSD licensed.
# Modules you write using this snippet, which is embedded dynamically by
# Ansible still belong to the author of the module, and may assign their
# own license to the complete work.
#
# Copyright (C) 2017 Lenovo, Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
#
# Contains device rule and methods
# Lenovo Networking
def getRuleString(deviceType, variableId):
retVal = variableId + ":"
if(deviceType == 'g8272_cnos'):
if variableId in g8272_cnos:
retVal = retVal + g8272_cnos[variableId]
else:
retVal = "The variable " + variableId + " is not supported"
elif(deviceType == 'g8296_cnos'):
if variableId in g8296_cnos:
retVal = retVal + g8296_cnos[variableId]
else:
retVal = "The variable " + variableId + " is not supported"
elif(deviceType == 'g8332_cnos'):
if variableId in g8332_cnos:
retVal = retVal + g8332_cnos[variableId]
else:
retVal = "The variable " + variableId + " is not supported"
elif(deviceType == 'NE1072T'):
if variableId in NE1072T:
retVal = retVal + NE1072T[variableId]
else:
retVal = "The variable " + variableId + " is not supported"
elif(deviceType == 'NE1032'):
if variableId in NE1032:
retVal = retVal + NE1032[variableId]
else:
retVal = "The variable " + variableId + " is not supported"
elif(deviceType == 'NE1032T'):
if variableId in NE1032T:
retVal = retVal + NE1032T[variableId]
else:
retVal = "The variable " + variableId + " is not supported"
elif(deviceType == 'NE10032'):
if variableId in NE10032:
retVal = retVal + NE10032[variableId]
else:
retVal = "The variable " + variableId + " is not supported"
elif(deviceType == 'NE2572'):
if variableId in NE2572:
retVal = retVal + NE2572[variableId]
else:
retVal = "The variable " + variableId + " is not supported"
else:
if variableId in default_cnos:
retVal = retVal + default_cnos[variableId]
else:
retVal = "The variable " + variableId + " is not supported"
return retVal
# EOM
default_cnos = {
'vlan_id': 'INTEGER_VALUE:1-3999',
'vlan_id_range': 'INTEGER_VALUE_RANGE:1-3999',
'vlan_name': 'TEXT:',
'vlan_flood': 'TEXT_OPTIONS:ipv4,ipv6',
'vlan_state': 'TEXT_OPTIONS:active,suspend',
'vlan_last_member_query_interval': 'INTEGER_VALUE:1-25',
'vlan_querier': 'IPV4Address:',
'vlan_querier_timeout': 'INTEGER_VALUE:1-65535',
'vlan_query_interval': 'INTEGER_VALUE:1-18000',
'vlan_query_max_response_time': 'INTEGER_VALUE:1-25',
'vlan_report_suppression': 'INTEGER_VALUE:1-25',
'vlan_robustness_variable': 'INTEGER_VALUE:1-7',
'vlan_startup_query_count': 'INTEGER_VALUE:1-10',
'vlan_startup_query_interval': 'INTEGER_VALUE:1-18000',
'vlan_snooping_version': 'INTEGER_VALUE:2-3',
'vlan_access_map_name': 'TEXT: ',
'vlan_ethernet_interface': 'TEXT:',
'vlan_portagg_number': 'INTEGER_VALUE:1-4096',
'vlan_accessmap_action': 'TEXT_OPTIONS:drop,forward,redirect',
'vlan_dot1q_tag': 'MATCH_TEXT_OR_EMPTY:egress-only',
'vlan_filter_name': 'TEXT:',
'vlag_auto_recovery': 'INTEGER_VALUE:240-3600',
'vlag_config_consistency': 'TEXT_OPTIONS:disable,strict',
'vlag_instance': 'INTEGER_VALUE:1-64',
'vlag_port_aggregation': 'INTEGER_VALUE:1-4096',
'vlag_priority': 'INTEGER_VALUE:0-65535',
'vlag_startup_delay': 'INTEGER_VALUE:0-3600',
'vlag_tier_id': 'INTEGER_VALUE:1-512',
'vlag_hlthchk_options': 'TEXT_OPTIONS:keepalive-attempts,\
keepalive-interval,peer-ip,retry-interval',
'vlag_keepalive_attempts': 'INTEGER_VALUE:1-24',
'vlag_keepalive_interval': 'INTEGER_VALUE:2-300',
'vlag_retry_interval': 'INTEGER_VALUE:1-300',
'vlag_peerip': 'IPV4Address:',
'vlag_peerip_vrf': 'TEXT_OPTIONS:default,management',
'bgp_as_number': 'NO_VALIDATION:1-4294967295',
'bgp_address_family': 'TEXT_OPTIONS:ipv4,ipv6',
'bgp_bgp_local_count': 'INTEGER_VALUE:2-64',
'cluster_id_as_ip': 'IPV4Address:',
'cluster_id_as_number': 'NO_VALIDATION:1-4294967295',
'confederation_identifier': 'INTEGER_VALUE:1-65535',
'condeferation_peers_as': 'INTEGER_VALUE:1-65535',
'stalepath_delay_value': 'INTEGER_VALUE:1-3600',
'maxas_limit_as': 'INTEGER_VALUE:1-2000',
'neighbor_ipaddress': 'IPV4Address:',
'neighbor_as': 'NO_VALIDATION:1-4294967295',
'router_id': 'IPV4Address:',
'bgp_keepalive_interval': 'INTEGER_VALUE:0-3600',
'bgp_holdtime': 'INTEGER_VALUE:0-3600',
'bgp_aggregate_prefix': 'IPV4AddressWithMask:',
'addrfamily_routemap_name': 'TEXT:',
'reachability_half_life': 'INTEGER_VALUE:1-45',
'start_reuse_route_value': 'INTEGER_VALUE:1-20000',
'start_suppress_route_value': 'INTEGER_VALUE:1-20000',
'max_duration_to_suppress_route': 'INTEGER_VALUE:1-255',
'unreachability_halftime_for_penalty': 'INTEGER_VALUE:1-45',
'distance_external_AS': 'INTEGER_VALUE:1-255',
'distance_internal_AS': 'INTEGER_VALUE:1-255',
'distance_local_routes': 'INTEGER_VALUE:1-255',
'maxpath_option': 'TEXT_OPTIONS:ebgp,ibgp',
'maxpath_numbers': 'INTEGER_VALUE:2-32',
'network_ip_prefix_with_mask': 'IPV4AddressWithMask:',
'network_ip_prefix_value': 'IPV4Address:',
'network_ip_prefix_mask': 'IPV4Address:',
'nexthop_crtitical_delay': 'NO_VALIDATION:1-4294967295',
'nexthop_noncrtitical_delay': 'NO_VALIDATION:1-4294967295',
'addrfamily_redistribute_option': 'TEXT_OPTIONS:direct,ospf,\
static',
'bgp_neighbor_af_occurances': 'INTEGER_VALUE:1-10',
'bgp_neighbor_af_filtername': 'TEXT:',
'bgp_neighbor_af_maxprefix': 'INTEGER_VALUE:1-15870',
'bgp_neighbor_af_prefixname': 'TEXT:',
'bgp_neighbor_af_routemap': 'TEXT:',
'bgp_neighbor_address_family': 'TEXT_OPTIONS:ipv4,ipv6',
'bgp_neighbor_connection_retrytime': 'INTEGER_VALUE:1-65535',
'bgp_neighbor_description': 'TEXT:',
'bgp_neighbor_maxhopcount': 'INTEGER_VALUE:1-255',
'bgp_neighbor_local_as': 'NO_VALIDATION:1-4294967295',
'bgp_neighbor_maxpeers': 'INTEGER_VALUE:1-96',
'bgp_neighbor_password': 'TEXT:',
'bgp_neighbor_timers_Keepalive': 'INTEGER_VALUE:0-3600',
'bgp_neighbor_timers_holdtime': 'INTEGER_VALUE:0-3600',
'bgp_neighbor_ttl_hops': 'INTEGER_VALUE:1-254',
'bgp_neighbor_update_options': 'TEXT_OPTIONS:ethernet,loopback,\
vlan',
'bgp_neighbor_update_ethernet': 'TEXT:',
'bgp_neighbor_update_loopback': 'INTEGER_VALUE:0-7',
'bgp_neighbor_update_vlan': 'INTEGER_VALUE:1-4094',
'bgp_neighbor_weight': 'INTEGER_VALUE:0-65535',
'ethernet_interface_value': 'INTEGER_VALUE:1-32',
'ethernet_interface_range': 'INTEGER_VALUE_RANGE:1-32',
'ethernet_interface_string': 'TEXT:',
'loopback_interface_value': 'INTEGER_VALUE:0-7',
'mgmt_interface_value': 'INTEGER_VALUE:0-0',
'vlan_interface_value': 'INTEGER_VALUE:1-4094',
'portchannel_interface_value': 'INTEGER_VALUE:1-4096',
'portchannel_interface_range': 'INTEGER_VALUE_RANGE:1-4096',
'portchannel_interface_string': 'TEXT:',
'aggregation_group_no': 'INTEGER_VALUE:1-4096',
'aggregation_group_mode': 'TEXT_OPTIONS:active,on,passive',
'bfd_options': 'TEXT_OPTIONS:authentication,echo,interval,ipv4,\
ipv6,neighbor',
'bfd_interval': 'INTEGER_VALUE:50-999',
'bfd_minrx': 'INTEGER_VALUE:50-999',
'bfd_ multiplier': 'INTEGER_VALUE:3-50',
'bfd_ipv4_options': 'TEXT_OPTIONS:authentication,echo,\
interval',
'bfd_auth_options': 'TEXT_OPTIONS:keyed-md5,keyed-sha1,\
meticulous-keyed-md5,meticulous-keyed-sha1,simple',
'bfd_key_options': 'TEXT_OPTIONS:key-chain,key-id',
'bfd_key_chain': 'TEXT:',
'bfd_key_id': 'INTEGER_VALUE:0-255',
'bfd_key_name': 'TEXT:',
'bfd_neighbor_ip': 'TEXT:',
'bfd_neighbor_options': 'TEXT_OPTIONS:admin-down,multihop,\
non-persistent',
'bfd_access_vlan': 'INTEGER_VALUE:1-3999',
'bfd_bridgeport_mode': 'TEXT_OPTIONS:access,dot1q-tunnel,\
trunk',
'trunk_options': 'TEXT_OPTIONS:allowed,native',
'trunk_vlanid': 'INTEGER_VALUE:1-3999',
'portCh_description': 'TEXT:',
'duplex_option': 'TEXT_OPTIONS:auto,full,half',
'flowcontrol_options': 'TEXT_OPTIONS:receive,send',
'portchannel_ip_options': 'TEXT_OPTIONS:access-group,address,\
arp,dhcp,ospf,port,port-unreachable,redirects,router,\
unreachables',
'accessgroup_name': 'TEXT:',
'portchannel_ipv4': 'IPV4Address:',
'portchannel_ipv4_mask': 'TEXT:',
'arp_ipaddress': 'IPV4Address:',
'arp_macaddress': 'TEXT:',
'arp_timeout_value': 'INTEGER_VALUE:60-28800',
'relay_ipaddress': 'IPV4Address:',
'ip_ospf_options': 'TEXT_OPTIONS:authentication,\
authentication-key,bfd,cost,database-filter,dead-interval,\
hello-interval,message-digest-key,mtu,mtu-ignore,network,\
passive-interface,priority,retransmit-interval,shutdown,\
transmit-delay',
'ospf_id_decimal_value': 'NO_VALIDATION:1-4294967295',
'ospf_id_ipaddres_value': 'IPV4Address:',
'lacp_options': 'TEXT_OPTIONS:port-priority,suspend-individual,\
timeout',
'port_priority': 'INTEGER_VALUE:1-65535',
'lldp_options': 'TEXT_OPTIONS:receive,tlv-select,transmit,\
trap-notification',
'lldp_tlv_options': 'TEXT_OPTIONS:link-aggregation,\
mac-phy-status,management-address,max-frame-size,\
port-description,port-protocol-vlan,port-vlan,power-mdi,\
protocol-identity,system-capabilities,system-description,\
system-name,vid-management,vlan-name',
'load_interval_delay': 'INTEGER_VALUE:30-300',
'load_interval_counter': 'INTEGER_VALUE:1-3',
'mac_accessgroup_name': 'TEXT:',
'mac_address': 'TEXT:',
'microburst_threshold': 'NO_VALIDATION:1-4294967295',
'mtu_value': 'INTEGER_VALUE:64-9216',
'service_instance': 'NO_VALIDATION:1-4294967295',
'service_policy_options': 'TEXT_OPTIONS:copp-system-policy,\
input,output,type',
'service_policy_name': 'TEXT:',
'spanning_tree_options': 'TEXT_OPTIONS:bpdufilter,bpduguard,\
cost,disable,enable,guard,link-type,mst,port,port-priority,\
vlan',
'spanning_tree_cost': 'NO_VALIDATION:1-200000000',
'spanning_tree_interfacerange': 'INTEGER_VALUE_RANGE:1-3999',
'spanning_tree_portpriority': 'TEXT_OPTIONS:0,32,64,96,128,160,\
192,224',
'portchannel_ipv6_neighbor_mac': 'TEXT:',
'portchannel_ipv6_neighbor_address': 'IPV6Address:',
'portchannel_ipv6_linklocal': 'IPV6Address:',
'portchannel_ipv6_dhcp_vlan': 'INTEGER_VALUE:1-4094',
'portchannel_ipv6_dhcp_ethernet': 'TEXT:',
'portchannel_ipv6_dhcp': 'IPV6Address:',
'portchannel_ipv6_address': 'IPV6Address:',
'portchannel_ipv6_options': 'TEXT_OPTIONS:address,dhcp,\
link-local,nd,neighbor',
'interface_speed': 'TEXT_OPTIONS:1000,10000,100000,25000,40000,50000,auto',
'stormcontrol_options': 'TEXT_OPTIONS:broadcast,multicast,\
unicast',
'stormcontrol_level': 'FLOAT:',
'portchannel_dot1q_tag': 'TEXT_OPTIONS:disable,enable,\
egress-only',
'vrrp_id': 'INTEGER_VALUE:1-255',
}
NE2572 = {
'vlan_id': 'INTEGER_VALUE:1-3999',
'vlan_id_range': 'INTEGER_VALUE_RANGE:1-3999',
'vlan_name': 'TEXT:',
'vlan_flood': 'TEXT_OPTIONS:ipv4,ipv6',
'vlan_state': 'TEXT_OPTIONS:active,suspend',
'vlan_last_member_query_interval': 'INTEGER_VALUE:1-25',
'vlan_querier': 'IPV4Address:',
'vlan_querier_timeout': 'INTEGER_VALUE:1-65535',
'vlan_query_interval': 'INTEGER_VALUE:1-18000',
'vlan_query_max_response_time': 'INTEGER_VALUE:1-25',
'vlan_report_suppression': 'INTEGER_VALUE:1-25',
'vlan_robustness_variable': 'INTEGER_VALUE:1-7',
'vlan_startup_query_count': 'INTEGER_VALUE:1-10',
'vlan_startup_query_interval': 'INTEGER_VALUE:1-18000',
'vlan_snooping_version': 'INTEGER_VALUE:2-3',
'vlan_access_map_name': 'TEXT: ',
'vlan_ethernet_interface': 'TEXT:',
'vlan_portagg_number': 'INTEGER_VALUE:1-4096',
'vlan_accessmap_action': 'TEXT_OPTIONS:drop,forward,redirect',
'vlan_dot1q_tag': 'MATCH_TEXT_OR_EMPTY:egress-only',
'vlan_filter_name': 'TEXT:',
'vlag_auto_recovery': 'INTEGER_VALUE:240-3600',
'vlag_config_consistency': 'TEXT_OPTIONS:disable,strict',
'vlag_instance': 'INTEGER_VALUE:1-64',
'vlag_port_aggregation': 'INTEGER_VALUE:1-4096',
'vlag_priority': 'INTEGER_VALUE:0-65535',
'vlag_startup_delay': 'INTEGER_VALUE:0-3600',
'vlag_tier_id': 'INTEGER_VALUE:1-512',
'vlag_hlthchk_options': 'TEXT_OPTIONS:keepalive-attempts,\
keepalive-interval,peer-ip,retry-interval',
'vlag_keepalive_attempts': 'INTEGER_VALUE:1-24',
'vlag_keepalive_interval': 'INTEGER_VALUE:2-300',
'vlag_retry_interval': 'INTEGER_VALUE:1-300',
'vlag_peerip': 'IPV4Address:',
'vlag_peerip_vrf': 'TEXT_OPTIONS:default,management',
'bgp_as_number': 'NO_VALIDATION:1-4294967295',
'bgp_address_family': 'TEXT_OPTIONS:ipv4,ipv6',
'bgp_bgp_local_count': 'INTEGER_VALUE:2-64',
'cluster_id_as_ip': 'IPV4Address:',
'cluster_id_as_number': 'NO_VALIDATION:1-4294967295',
'confederation_identifier': 'INTEGER_VALUE:1-65535',
'condeferation_peers_as': 'INTEGER_VALUE:1-65535',
'stalepath_delay_value': 'INTEGER_VALUE:1-3600',
'maxas_limit_as': 'INTEGER_VALUE:1-2000',
'neighbor_ipaddress': 'IPV4Address:',
'neighbor_as': 'NO_VALIDATION:1-4294967295',
'router_id': 'IPV4Address:',
'bgp_keepalive_interval': 'INTEGER_VALUE:0-3600',
'bgp_holdtime': 'INTEGER_VALUE:0-3600',
'bgp_aggregate_prefix': 'IPV4AddressWithMask:',
'addrfamily_routemap_name': 'TEXT:',
'reachability_half_life': 'INTEGER_VALUE:1-45',
'start_reuse_route_value': 'INTEGER_VALUE:1-20000',
'start_suppress_route_value': 'INTEGER_VALUE:1-20000',
'max_duration_to_suppress_route': 'INTEGER_VALUE:1-255',
'unreachability_halftime_for_penalty': 'INTEGER_VALUE:1-45',
'distance_external_AS': 'INTEGER_VALUE:1-255',
'distance_internal_AS': 'INTEGER_VALUE:1-255',
'distance_local_routes': 'INTEGER_VALUE:1-255',
'maxpath_option': 'TEXT_OPTIONS:ebgp,ibgp',
'maxpath_numbers': 'INTEGER_VALUE:2-32',
'network_ip_prefix_with_mask': 'IPV4AddressWithMask:',
'network_ip_prefix_value': 'IPV4Address:',
'network_ip_prefix_mask': 'IPV4Address:',
'nexthop_crtitical_delay': 'NO_VALIDATION:1-4294967295',
'nexthop_noncrtitical_delay': 'NO_VALIDATION:1-4294967295',
'addrfamily_redistribute_option': 'TEXT_OPTIONS:direct,ospf,\
static',
'bgp_neighbor_af_occurances': 'INTEGER_VALUE:1-10',
'bgp_neighbor_af_filtername': 'TEXT:',
'bgp_neighbor_af_maxprefix': 'INTEGER_VALUE:1-15870',
'bgp_neighbor_af_prefixname': 'TEXT:',
'bgp_neighbor_af_routemap': 'TEXT:',
'bgp_neighbor_address_family': 'TEXT_OPTIONS:ipv4,ipv6',
'bgp_neighbor_connection_retrytime': 'INTEGER_VALUE:1-65535',
'bgp_neighbor_description': 'TEXT:',
'bgp_neighbor_maxhopcount': 'INTEGER_VALUE:1-255',
'bgp_neighbor_local_as': 'NO_VALIDATION:1-4294967295',
'bgp_neighbor_maxpeers': 'INTEGER_VALUE:1-96',
'bgp_neighbor_password': 'TEXT:',
'bgp_neighbor_timers_Keepalive': 'INTEGER_VALUE:0-3600',
'bgp_neighbor_timers_holdtime': 'INTEGER_VALUE:0-3600',
'bgp_neighbor_ttl_hops': 'INTEGER_VALUE:1-254',
'bgp_neighbor_update_options': 'TEXT_OPTIONS:ethernet,loopback,\
vlan',
'bgp_neighbor_update_ethernet': 'TEXT:',
'bgp_neighbor_update_loopback': 'INTEGER_VALUE:0-7',
'bgp_neighbor_update_vlan': 'INTEGER_VALUE:1-4094',
'bgp_neighbor_weight': 'INTEGER_VALUE:0-65535',
'ethernet_interface_value': 'INTEGER_VALUE:1-54',
'ethernet_interface_range': 'INTEGER_VALUE_RANGE:1-54',
'ethernet_interface_string': 'TEXT:',
'loopback_interface_value': 'INTEGER_VALUE:0-7',
'mgmt_interface_value': 'INTEGER_VALUE:0-0',
'vlan_interface_value': 'INTEGER_VALUE:1-4094',
'portchannel_interface_value': 'INTEGER_VALUE:1-4096',
'portchannel_interface_range': 'INTEGER_VALUE_RANGE:1-4096',
'portchannel_interface_string': 'TEXT:',
'aggregation_group_no': 'INTEGER_VALUE:1-4096',
'aggregation_group_mode': 'TEXT_OPTIONS:active,on,passive',
'bfd_options': 'TEXT_OPTIONS:authentication,echo,interval,ipv4,\
ipv6,neighbor',
'bfd_interval': 'INTEGER_VALUE:50-999',
'bfd_minrx': 'INTEGER_VALUE:50-999',
'bfd_ multiplier': 'INTEGER_VALUE:3-50',
'bfd_ipv4_options': 'TEXT_OPTIONS:authentication,echo,interval',
'bfd_auth_options': 'TEXT_OPTIONS:keyed-md5,keyed-sha1,\
meticulous-keyed-md5,meticulous-keyed-sha1,simple',
'bfd_key_options': 'TEXT_OPTIONS:key-chain,key-id',
'bfd_key_chain': 'TEXT:',
'bfd_key_id': 'INTEGER_VALUE:0-255',
'bfd_key_name': 'TEXT:',
'bfd_neighbor_ip': 'TEXT:',
'bfd_neighbor_options': 'TEXT_OPTIONS:admin-down,multihop,\
non-persistent',
'bfd_access_vlan': 'INTEGER_VALUE:1-3999',
'bfd_bridgeport_mode': 'TEXT_OPTIONS:access,dot1q-tunnel,trunk',
'trunk_options': 'TEXT_OPTIONS:allowed,native',
'trunk_vlanid': 'INTEGER_VALUE:1-3999',
'portCh_description': 'TEXT:',
'duplex_option': 'TEXT_OPTIONS:auto,full,half',
'flowcontrol_options': 'TEXT_OPTIONS:receive,send',
'portchannel_ip_options': 'TEXT_OPTIONS:access-group,address,\
arp,dhcp,ospf,port,port-unreachable,redirects,router,\
unreachables',
'accessgroup_name': 'TEXT:',
'portchannel_ipv4': 'IPV4Address:',
'portchannel_ipv4_mask': 'TEXT:',
'arp_ipaddress': 'IPV4Address:',
'arp_macaddress': 'TEXT:',
'arp_timeout_value': 'INTEGER_VALUE:60-28800',
'relay_ipaddress': 'IPV4Address:',
'ip_ospf_options': 'TEXT_OPTIONS:authentication,\
authentication-key,bfd,cost,database-filter,dead-interval,\
hello-interval,message-digest-key,mtu,mtu-ignore,network,\
passive-interface,priority,retransmit-interval,shutdown,\
transmit-delay',
'ospf_id_decimal_value': 'NO_VALIDATION:1-4294967295',
'ospf_id_ipaddres_value': 'IPV4Address:',
'lacp_options': 'TEXT_OPTIONS:port-priority,suspend-individual,\
timeout',
'port_priority': 'INTEGER_VALUE:1-65535',
'lldp_options': 'TEXT_OPTIONS:receive,tlv-select,transmit,\
trap-notification',
'lldp_tlv_options': 'TEXT_OPTIONS:link-aggregation,\
mac-phy-status,management-address,max-frame-size,\
port-description,port-protocol-vlan,port-vlan,power-mdi,\
protocol-identity,system-capabilities,system-description,\
system-name,vid-management,vlan-name',
'load_interval_delay': 'INTEGER_VALUE:30-300',
'load_interval_counter': 'INTEGER_VALUE:1-3',
'mac_accessgroup_name': 'TEXT:',
'mac_address': 'TEXT:',
'microburst_threshold': 'NO_VALIDATION:1-4294967295',
'mtu_value': 'INTEGER_VALUE:64-9216',
'service_instance': 'NO_VALIDATION:1-4294967295',
'service_policy_options': 'TEXT_OPTIONS:copp-system-policy,input,\
output,type',
'service_policy_name': 'TEXT:',
'spanning_tree_options': 'TEXT_OPTIONS:bpdufilter,bpduguard,\
cost,disable,enable,guard,link-type,mst,port,port-priority,vlan',
'spanning_tree_cost': 'NO_VALIDATION:1-200000000',
'spanning_tree_interfacerange': 'INTEGER_VALUE_RANGE:1-3999',
'spanning_tree_portpriority': 'TEXT_OPTIONS:0,32,64,96,128,160,\
192,224',
'portchannel_ipv6_neighbor_mac': 'TEXT:',
'portchannel_ipv6_neighbor_address': 'IPV6Address:',
'portchannel_ipv6_linklocal': 'IPV6Address:',
'portchannel_ipv6_dhcp_vlan': 'INTEGER_VALUE:1-4094',
'portchannel_ipv6_dhcp_ethernet': 'TEXT:',
'portchannel_ipv6_dhcp': 'IPV6Address:',
'portchannel_ipv6_address': 'IPV6Address:',
'portchannel_ipv6_options': 'TEXT_OPTIONS:address,dhcp,\
link-local,nd,neighbor',
'interface_speed': 'TEXT_OPTIONS:10000,100000,25000,40000,50000,auto',
'stormcontrol_options': 'TEXT_OPTIONS:broadcast,multicast,\
unicast',
'stormcontrol_level': 'FLOAT:',
'portchannel_dot1q_tag': 'TEXT_OPTIONS:disable,enable,\
egress-only',
'vrrp_id': 'INTEGER_VALUE:1-255',
}
NE1032T = {
'vlan_id': 'INTEGER_VALUE:1-3999',
'vlan_id_range': 'INTEGER_VALUE_RANGE:1-3999',
'vlan_name': 'TEXT:',
'vlan_flood': 'TEXT_OPTIONS:ipv4,ipv6',
'vlan_state': 'TEXT_OPTIONS:active,suspend',
'vlan_last_member_query_interval': 'INTEGER_VALUE:1-25',
'vlan_querier': 'IPV4Address:',
'vlan_querier_timeout': 'INTEGER_VALUE:1-65535',
'vlan_query_interval': 'INTEGER_VALUE:1-18000',
'vlan_query_max_response_time': 'INTEGER_VALUE:1-25',
'vlan_report_suppression': 'INTEGER_VALUE:1-25',
'vlan_robustness_variable': 'INTEGER_VALUE:1-7',
'vlan_startup_query_count': 'INTEGER_VALUE:1-10',
'vlan_startup_query_interval': 'INTEGER_VALUE:1-18000',
'vlan_snooping_version': 'INTEGER_VALUE:2-3',
'vlan_access_map_name': 'TEXT: ',
'vlan_ethernet_interface': 'TEXT:',
'vlan_portagg_number': 'INTEGER_VALUE:1-4096',
'vlan_accessmap_action': 'TEXT_OPTIONS:drop,forward,redirect',
'vlan_dot1q_tag': 'MATCH_TEXT_OR_EMPTY:egress-only',
'vlan_filter_name': 'TEXT:',
'vlag_auto_recovery': 'INTEGER_VALUE:240-3600',
'vlag_config_consistency': 'TEXT_OPTIONS:disable,strict',
'vlag_instance': 'INTEGER_VALUE:1-64',
'vlag_port_aggregation': 'INTEGER_VALUE:1-4096',
'vlag_priority': 'INTEGER_VALUE:0-65535',
'vlag_startup_delay': 'INTEGER_VALUE:0-3600',
'vlag_tier_id': 'INTEGER_VALUE:1-512',
'vlag_hlthchk_options': 'TEXT_OPTIONS:keepalive-attempts,\
keepalive-interval,peer-ip,retry-interval',
'vlag_keepalive_attempts': 'INTEGER_VALUE:1-24',
'vlag_keepalive_interval': 'INTEGER_VALUE:2-300',
'vlag_retry_interval': 'INTEGER_VALUE:1-300',
'vlag_peerip': 'IPV4Address:',
'vlag_peerip_vrf': 'TEXT_OPTIONS:default,management',
'bgp_as_number': 'NO_VALIDATION:1-4294967295',
'bgp_address_family': 'TEXT_OPTIONS:ipv4,ipv6',
'bgp_bgp_local_count': 'INTEGER_VALUE:2-64',
'cluster_id_as_ip': 'IPV4Address:',
'cluster_id_as_number': 'NO_VALIDATION:1-4294967295',
'confederation_identifier': 'INTEGER_VALUE:1-65535',
'condeferation_peers_as': 'INTEGER_VALUE:1-65535',
'stalepath_delay_value': 'INTEGER_VALUE:1-3600',
'maxas_limit_as': 'INTEGER_VALUE:1-2000',
'neighbor_ipaddress': 'IPV4Address:',
'neighbor_as': 'NO_VALIDATION:1-4294967295',
'router_id': 'IPV4Address:',
'bgp_keepalive_interval': 'INTEGER_VALUE:0-3600',
'bgp_holdtime': 'INTEGER_VALUE:0-3600',
'bgp_aggregate_prefix': 'IPV4AddressWithMask:',
'addrfamily_routemap_name': 'TEXT:',
'reachability_half_life': 'INTEGER_VALUE:1-45',
'start_reuse_route_value': 'INTEGER_VALUE:1-20000',
'start_suppress_route_value': 'INTEGER_VALUE:1-20000',
'max_duration_to_suppress_route': 'INTEGER_VALUE:1-255',
'unreachability_halftime_for_penalty': 'INTEGER_VALUE:1-45',
'distance_external_AS': 'INTEGER_VALUE:1-255',
'distance_internal_AS': 'INTEGER_VALUE:1-255',
'distance_local_routes': 'INTEGER_VALUE:1-255',
'maxpath_option': 'TEXT_OPTIONS:ebgp,ibgp',
'maxpath_numbers': 'INTEGER_VALUE:2-32',
'network_ip_prefix_with_mask': 'IPV4AddressWithMask:',
'network_ip_prefix_value': 'IPV4Address:',
'network_ip_prefix_mask': 'IPV4Address:',
'nexthop_crtitical_delay': 'NO_VALIDATION:1-4294967295',
'nexthop_noncrtitical_delay': 'NO_VALIDATION:1-4294967295',
'addrfamily_redistribute_option': 'TEXT_OPTIONS:direct,ospf,\
static',
'bgp_neighbor_af_occurances': 'INTEGER_VALUE:1-10',
'bgp_neighbor_af_filtername': 'TEXT:',
'bgp_neighbor_af_maxprefix': 'INTEGER_VALUE:1-15870',
'bgp_neighbor_af_prefixname': 'TEXT:',
'bgp_neighbor_af_routemap': 'TEXT:',
'bgp_neighbor_address_family': 'TEXT_OPTIONS:ipv4,ipv6',
'bgp_neighbor_connection_retrytime': 'INTEGER_VALUE:1-65535',
'bgp_neighbor_description': 'TEXT:',
'bgp_neighbor_maxhopcount': 'INTEGER_VALUE:1-255',
'bgp_neighbor_local_as': 'NO_VALIDATION:1-4294967295',
'bgp_neighbor_maxpeers': 'INTEGER_VALUE:1-96',
'bgp_neighbor_password': 'TEXT:',
'bgp_neighbor_timers_Keepalive': 'INTEGER_VALUE:0-3600',
'bgp_neighbor_timers_holdtime': 'INTEGER_VALUE:0-3600',
'bgp_neighbor_ttl_hops': 'INTEGER_VALUE:1-254',
'bgp_neighbor_update_options': 'TEXT_OPTIONS:ethernet,loopback,\
vlan',
'bgp_neighbor_update_ethernet': 'TEXT:',
'bgp_neighbor_update_loopback': 'INTEGER_VALUE:0-7',
'bgp_neighbor_update_vlan': 'INTEGER_VALUE:1-4094',
'bgp_neighbor_weight': 'INTEGER_VALUE:0-65535',
'ethernet_interface_value': 'INTEGER_VALUE:1-32',
'ethernet_interface_range': 'INTEGER_VALUE_RANGE:1-32',
'ethernet_interface_string': 'TEXT:',
'loopback_interface_value': 'INTEGER_VALUE:0-7',
'mgmt_interface_value': 'INTEGER_VALUE:0-0',
'vlan_interface_value': 'INTEGER_VALUE:1-4094',
'portchannel_interface_value': 'INTEGER_VALUE:1-4096',
'portchannel_interface_range': 'INTEGER_VALUE_RANGE:1-4096',
'portchannel_interface_string': 'TEXT:',
'aggregation_group_no': 'INTEGER_VALUE:1-4096',
'aggregation_group_mode': 'TEXT_OPTIONS:active,on,passive',
'bfd_options': 'TEXT_OPTIONS:authentication,echo,interval,ipv4,\
ipv6,neighbor',
'bfd_interval': 'INTEGER_VALUE:50-999',
'bfd_minrx': 'INTEGER_VALUE:50-999',
'bfd_ multiplier': 'INTEGER_VALUE:3-50',
'bfd_ipv4_options': 'TEXT_OPTIONS:authentication,echo,interval',
'bfd_auth_options': 'TEXT_OPTIONS:keyed-md5,keyed-sha1,\
meticulous-keyed-md5,meticulous-keyed-sha1,simple',
'bfd_key_options': 'TEXT_OPTIONS:key-chain,key-id',
'bfd_key_chain': 'TEXT:',
'bfd_key_id': 'INTEGER_VALUE:0-255',
'bfd_key_name': 'TEXT:',
'bfd_neighbor_ip': 'TEXT:',
'bfd_neighbor_options': 'TEXT_OPTIONS:admin-down,multihop,\
non-persistent',
'bfd_access_vlan': 'INTEGER_VALUE:1-3999',
'bfd_bridgeport_mode': 'TEXT_OPTIONS:access,dot1q-tunnel,trunk',
'trunk_options': 'TEXT_OPTIONS:allowed,native',
'trunk_vlanid': 'INTEGER_VALUE:1-3999',
'portCh_description': 'TEXT:',
'duplex_option': 'TEXT_OPTIONS:auto,full,half',
'flowcontrol_options': 'TEXT_OPTIONS:receive,send',
'portchannel_ip_options': 'TEXT_OPTIONS:access-group,address,\
arp,dhcp,ospf,port,port-unreachable,redirects,router,\
unreachables',
'accessgroup_name': 'TEXT:',
'portchannel_ipv4': 'IPV4Address:',
'portchannel_ipv4_mask': 'TEXT:',
'arp_ipaddress': 'IPV4Address:',
'arp_macaddress': 'TEXT:',
'arp_timeout_value': 'INTEGER_VALUE:60-28800',
'relay_ipaddress': 'IPV4Address:',
'ip_ospf_options': 'TEXT_OPTIONS:authentication,\
authentication-key,bfd,cost,database-filter,dead-interval,\
hello-interval,message-digest-key,mtu,mtu-ignore,network,\
passive-interface,priority,retransmit-interval,shutdown,\
transmit-delay',
'ospf_id_decimal_value': 'NO_VALIDATION:1-4294967295',
'ospf_id_ipaddres_value': 'IPV4Address:',
'lacp_options': 'TEXT_OPTIONS:port-priority,suspend-individual,\
timeout',
'port_priority': 'INTEGER_VALUE:1-65535',
'lldp_options': 'TEXT_OPTIONS:receive,tlv-select,transmit,\
trap-notification',
'lldp_tlv_options': 'TEXT_OPTIONS:link-aggregation,\
mac-phy-status,management-address,max-frame-size,\
port-description,port-protocol-vlan,port-vlan,power-mdi,\
protocol-identity,system-capabilities,system-description,\
system-name,vid-management,vlan-name',
'load_interval_delay': 'INTEGER_VALUE:30-300',
'load_interval_counter': 'INTEGER_VALUE:1-3',
'mac_accessgroup_name': 'TEXT:',
'mac_address': 'TEXT:',
'microburst_threshold': 'NO_VALIDATION:1-4294967295',
'mtu_value': 'INTEGER_VALUE:64-9216',
'service_instance': 'NO_VALIDATION:1-4294967295',
'service_policy_options': 'TEXT_OPTIONS:copp-system-policy,input,\
output,type',
'service_policy_name': 'TEXT:',
'spanning_tree_options': 'TEXT_OPTIONS:bpdufilter,bpduguard,\
cost,disable,enable,guard,link-type,mst,port,port-priority,vlan',
'spanning_tree_cost': 'NO_VALIDATION:1-200000000',
'spanning_tree_interfacerange': 'INTEGER_VALUE_RANGE:1-3999',
'spanning_tree_portpriority': 'TEXT_OPTIONS:0,32,64,96,128,160,\
192,224',
'portchannel_ipv6_neighbor_mac': 'TEXT:',
'portchannel_ipv6_neighbor_address': 'IPV6Address:',
'portchannel_ipv6_linklocal': 'IPV6Address:',
'portchannel_ipv6_dhcp_vlan': 'INTEGER_VALUE:1-4094',
'portchannel_ipv6_dhcp_ethernet': 'TEXT:',
'portchannel_ipv6_dhcp': 'IPV6Address:',
'portchannel_ipv6_address': 'IPV6Address:',
'portchannel_ipv6_options': 'TEXT_OPTIONS:address,dhcp,\
link-local,nd,neighbor',
'interface_speed': 'TEXT_OPTIONS:1000,10000,100000,25000,40000,50000,auto',
'stormcontrol_options': 'TEXT_OPTIONS:broadcast,multicast,\
unicast',
'stormcontrol_level': 'FLOAT:',
'portchannel_dot1q_tag': 'TEXT_OPTIONS:disable,enable,\
egress-only',
'vrrp_id': 'INTEGER_VALUE:1-255',
}
NE1032 = {
'vlan_id': 'INTEGER_VALUE:1-3999',
'vlan_id_range': 'INTEGER_VALUE_RANGE:1-3999',
'vlan_name': 'TEXT:',
'vlan_flood': 'TEXT_OPTIONS:ipv4,ipv6',
'vlan_state': 'TEXT_OPTIONS:active,suspend',
'vlan_last_member_query_interval': 'INTEGER_VALUE:1-25',
'vlan_querier': 'IPV4Address:',
'vlan_querier_timeout': 'INTEGER_VALUE:1-65535',
'vlan_query_interval': 'INTEGER_VALUE:1-18000',
'vlan_query_max_response_time': 'INTEGER_VALUE:1-25',
'vlan_report_suppression': 'INTEGER_VALUE:1-25',
'vlan_robustness_variable': 'INTEGER_VALUE:1-7',
'vlan_startup_query_count': 'INTEGER_VALUE:1-10',
'vlan_startup_query_interval': 'INTEGER_VALUE:1-18000',
'vlan_snooping_version': 'INTEGER_VALUE:2-3',
'vlan_access_map_name': 'TEXT: ',
'vlan_ethernet_interface': 'TEXT:',
'vlan_portagg_number': 'INTEGER_VALUE:1-4096',
'vlan_accessmap_action': 'TEXT_OPTIONS:drop,forward,redirect',
'vlan_dot1q_tag': 'MATCH_TEXT_OR_EMPTY:egress-only',
'vlan_filter_name': 'TEXT:',
'vlag_auto_recovery': 'INTEGER_VALUE:240-3600',
'vlag_config_consistency': 'TEXT_OPTIONS:disable,strict',
'vlag_instance': 'INTEGER_VALUE:1-64',
'vlag_port_aggregation': 'INTEGER_VALUE:1-4096',
'vlag_priority': 'INTEGER_VALUE:0-65535',
'vlag_startup_delay': 'INTEGER_VALUE:0-3600',
'vlag_tier_id': 'INTEGER_VALUE:1-512',
'vlag_hlthchk_options': 'TEXT_OPTIONS:keepalive-attempts,\
keepalive-interval,peer-ip,retry-interval',
'vlag_keepalive_attempts': 'INTEGER_VALUE:1-24',
'vlag_keepalive_interval': 'INTEGER_VALUE:2-300',
'vlag_retry_interval': 'INTEGER_VALUE:1-300',
'vlag_peerip': 'IPV4Address:',
'vlag_peerip_vrf': 'TEXT_OPTIONS:default,management',
'bgp_as_number': 'NO_VALIDATION:1-4294967295',
'bgp_address_family': 'TEXT_OPTIONS:ipv4,ipv6',
'bgp_bgp_local_count': 'INTEGER_VALUE:2-64',
'cluster_id_as_ip': 'IPV4Address:',
'cluster_id_as_number': 'NO_VALIDATION:1-4294967295',
'confederation_identifier': 'INTEGER_VALUE:1-65535',
'condeferation_peers_as': 'INTEGER_VALUE:1-65535',
'stalepath_delay_value': 'INTEGER_VALUE:1-3600',
'maxas_limit_as': 'INTEGER_VALUE:1-2000',
'neighbor_ipaddress': 'IPV4Address:',
'neighbor_as': 'NO_VALIDATION:1-4294967295',
'router_id': 'IPV4Address:',
'bgp_keepalive_interval': 'INTEGER_VALUE:0-3600',
'bgp_holdtime': 'INTEGER_VALUE:0-3600',
'bgp_aggregate_prefix': 'IPV4AddressWithMask:',
'addrfamily_routemap_name': 'TEXT:',
'reachability_half_life': 'INTEGER_VALUE:1-45',
'start_reuse_route_value': 'INTEGER_VALUE:1-20000',
'start_suppress_route_value': 'INTEGER_VALUE:1-20000',
'max_duration_to_suppress_route': 'INTEGER_VALUE:1-255',
'unreachability_halftime_for_penalty': 'INTEGER_VALUE:1-45',
'distance_external_AS': 'INTEGER_VALUE:1-255',
'distance_internal_AS': 'INTEGER_VALUE:1-255',
'distance_local_routes': 'INTEGER_VALUE:1-255',
'maxpath_option': 'TEXT_OPTIONS:ebgp,ibgp',
'maxpath_numbers': 'INTEGER_VALUE:2-32',
'network_ip_prefix_with_mask': 'IPV4AddressWithMask:',
'network_ip_prefix_value': 'IPV4Address:',
'network_ip_prefix_mask': 'IPV4Address:',
'nexthop_crtitical_delay': 'NO_VALIDATION:1-4294967295',
'nexthop_noncrtitical_delay': 'NO_VALIDATION:1-4294967295',
'addrfamily_redistribute_option': 'TEXT_OPTIONS:direct,ospf,\
static',
'bgp_neighbor_af_occurances': 'INTEGER_VALUE:1-10',
'bgp_neighbor_af_filtername': 'TEXT:',
'bgp_neighbor_af_maxprefix': 'INTEGER_VALUE:1-15870',
'bgp_neighbor_af_prefixname': 'TEXT:',
'bgp_neighbor_af_routemap': 'TEXT:',
'bgp_neighbor_address_family': 'TEXT_OPTIONS:ipv4,ipv6',
'bgp_neighbor_connection_retrytime': 'INTEGER_VALUE:1-65535',
'bgp_neighbor_description': 'TEXT:',
'bgp_neighbor_maxhopcount': 'INTEGER_VALUE:1-255',
'bgp_neighbor_local_as': 'NO_VALIDATION:1-4294967295',
'bgp_neighbor_maxpeers': 'INTEGER_VALUE:1-96',
'bgp_neighbor_password': 'TEXT:',
'bgp_neighbor_timers_Keepalive': 'INTEGER_VALUE:0-3600',
'bgp_neighbor_timers_holdtime': 'INTEGER_VALUE:0-3600',
'bgp_neighbor_ttl_hops': 'INTEGER_VALUE:1-254',
'bgp_neighbor_update_options': 'TEXT_OPTIONS:ethernet,loopback,\
vlan',
'bgp_neighbor_update_ethernet': 'TEXT:',
'bgp_neighbor_update_loopback': 'INTEGER_VALUE:0-7',
'bgp_neighbor_update_vlan': 'INTEGER_VALUE:1-4094',
'bgp_neighbor_weight': 'INTEGER_VALUE:0-65535',
'ethernet_interface_value': 'INTEGER_VALUE:1-32',
'ethernet_interface_range': 'INTEGER_VALUE_RANGE:1-32',
'ethernet_interface_string': 'TEXT:',
'loopback_interface_value': 'INTEGER_VALUE:0-7',
'mgmt_interface_value': 'INTEGER_VALUE:0-0',
'vlan_interface_value': 'INTEGER_VALUE:1-4094',
'portchannel_interface_value': 'INTEGER_VALUE:1-4096',
'portchannel_interface_range': 'INTEGER_VALUE_RANGE:1-4096',
'portchannel_interface_string': 'TEXT:',
'aggregation_group_no': 'INTEGER_VALUE:1-4096',
'aggregation_group_mode': 'TEXT_OPTIONS:active,on,passive',
'bfd_options': 'TEXT_OPTIONS:authentication,echo,interval,ipv4,\
ipv6,neighbor',
'bfd_interval': 'INTEGER_VALUE:50-999',
'bfd_minrx': 'INTEGER_VALUE:50-999',
'bfd_ multiplier': 'INTEGER_VALUE:3-50',
'bfd_ipv4_options': 'TEXT_OPTIONS:authentication,echo,interval',
'bfd_auth_options': 'TEXT_OPTIONS:keyed-md5,keyed-sha1,\
meticulous-keyed-md5,meticulous-keyed-sha1,simple',
'bfd_key_options': 'TEXT_OPTIONS:key-chain,key-id',
'bfd_key_chain': 'TEXT:',
'bfd_key_id': 'INTEGER_VALUE:0-255',
'bfd_key_name': 'TEXT:',
'bfd_neighbor_ip': 'TEXT:',
'bfd_neighbor_options': 'TEXT_OPTIONS:admin-down,multihop,\
non-persistent',
'bfd_access_vlan': 'INTEGER_VALUE:1-3999',
'bfd_bridgeport_mode': 'TEXT_OPTIONS:access,dot1q-tunnel,trunk',
'trunk_options': 'TEXT_OPTIONS:allowed,native',
'trunk_vlanid': 'INTEGER_VALUE:1-3999',
'portCh_description': 'TEXT:',
'duplex_option': 'TEXT_OPTIONS:auto,full,half',
'flowcontrol_options': 'TEXT_OPTIONS:receive,send',
'portchannel_ip_options': 'TEXT_OPTIONS:access-group,address,\
arp,dhcp,ospf,port,port-unreachable,redirects,router,\
unreachables',
'accessgroup_name': 'TEXT:',
'portchannel_ipv4': 'IPV4Address:',
'portchannel_ipv4_mask': 'TEXT:',
'arp_ipaddress': 'IPV4Address:',
'arp_macaddress': 'TEXT:',
'arp_timeout_value': 'INTEGER_VALUE:60-28800',
'relay_ipaddress': 'IPV4Address:',
'ip_ospf_options': 'TEXT_OPTIONS:authentication,\
authentication-key,bfd,cost,database-filter,dead-interval,\
hello-interval,message-digest-key,mtu,mtu-ignore,network,\
passive-interface,priority,retransmit-interval,shutdown,\
transmit-delay',
'ospf_id_decimal_value': 'NO_VALIDATION:1-4294967295',
'ospf_id_ipaddres_value': 'IPV4Address:',
'lacp_options': 'TEXT_OPTIONS:port-priority,suspend-individual,\
timeout',
'port_priority': 'INTEGER_VALUE:1-65535',
'lldp_options': 'TEXT_OPTIONS:receive,tlv-select,transmit,\
trap-notification',
'lldp_tlv_options': 'TEXT_OPTIONS:link-aggregation,\
mac-phy-status,management-address,max-frame-size,\
port-description,port-protocol-vlan,port-vlan,power-mdi,\
protocol-identity,system-capabilities,system-description,\
system-name,vid-management,vlan-name',
'load_interval_delay': 'INTEGER_VALUE:30-300',
'load_interval_counter': 'INTEGER_VALUE:1-3',
'mac_accessgroup_name': 'TEXT:',
'mac_address': 'TEXT:',
'microburst_threshold': 'NO_VALIDATION:1-4294967295',
'mtu_value': 'INTEGER_VALUE:64-9216',
'service_instance': 'NO_VALIDATION:1-4294967295',
'service_policy_options': 'TEXT_OPTIONS:copp-system-policy,input,\
output,type',
'service_policy_name': 'TEXT:',
'spanning_tree_options': 'TEXT_OPTIONS:bpdufilter,bpduguard,\
cost,disable,enable,guard,link-type,mst,port,port-priority,vlan',
'spanning_tree_cost': 'NO_VALIDATION:1-200000000',
'spanning_tree_interfacerange': 'INTEGER_VALUE_RANGE:1-3999',
'spanning_tree_portpriority': 'TEXT_OPTIONS:0,32,64,96,128,160,\
192,224',
'portchannel_ipv6_neighbor_mac': 'TEXT:',
'portchannel_ipv6_neighbor_address': 'IPV6Address:',
'portchannel_ipv6_linklocal': 'IPV6Address:',
'portchannel_ipv6_dhcp_vlan': 'INTEGER_VALUE:1-4094',
'portchannel_ipv6_dhcp_ethernet': 'TEXT:',
'portchannel_ipv6_dhcp': 'IPV6Address:',
'portchannel_ipv6_address': 'IPV6Address:',
'portchannel_ipv6_options': 'TEXT_OPTIONS:address,dhcp,\
link-local,nd,neighbor',
'interface_speed': 'TEXT_OPTIONS:1000,10000,100000,25000,40000,50000,auto',
'stormcontrol_options': 'TEXT_OPTIONS:broadcast,multicast,\
unicast',
'stormcontrol_level': 'FLOAT:',
'portchannel_dot1q_tag': 'TEXT_OPTIONS:disable,enable,\
egress-only',
'vrrp_id': 'INTEGER_VALUE:1-255',
}
NE1072T = {
'vlan_id': 'INTEGER_VALUE:1-3999',
'vlan_id_range': 'INTEGER_VALUE_RANGE:1-3999',
'vlan_name': 'TEXT:',
'vlan_flood': 'TEXT_OPTIONS:ipv4,ipv6',
'vlan_state': 'TEXT_OPTIONS:active,suspend',
'vlan_last_member_query_interval': 'INTEGER_VALUE:1-25',
'vlan_querier': 'IPV4Address:',
'vlan_querier_timeout': 'INTEGER_VALUE:1-65535',
'vlan_query_interval': 'INTEGER_VALUE:1-18000',
'vlan_query_max_response_time': 'INTEGER_VALUE:1-25',
'vlan_report_suppression': 'INTEGER_VALUE:1-25',
'vlan_robustness_variable': 'INTEGER_VALUE:1-7',
'vlan_startup_query_count': 'INTEGER_VALUE:1-10',
'vlan_startup_query_interval': 'INTEGER_VALUE:1-18000',
'vlan_snooping_version': 'INTEGER_VALUE:2-3',
'vlan_access_map_name': 'TEXT: ',
'vlan_ethernet_interface': 'TEXT:',
'vlan_portagg_number': 'INTEGER_VALUE:1-4096',
'vlan_accessmap_action': 'TEXT_OPTIONS:drop,forward,redirect',
'vlan_dot1q_tag': 'MATCH_TEXT_OR_EMPTY:egress-only',
'vlan_filter_name': 'TEXT:',
'vlag_auto_recovery': 'INTEGER_VALUE:240-3600',
'vlag_config_consistency': 'TEXT_OPTIONS:disable,strict',
'vlag_instance': 'INTEGER_VALUE:1-64',
'vlag_port_aggregation': 'INTEGER_VALUE:1-4096',
'vlag_priority': 'INTEGER_VALUE:0-65535',
'vlag_startup_delay': 'INTEGER_VALUE:0-3600',
'vlag_tier_id': 'INTEGER_VALUE:1-512',
'vlag_hlthchk_options': 'TEXT_OPTIONS:keepalive-attempts,\
keepalive-interval,peer-ip,retry-interval',
'vlag_keepalive_attempts': 'INTEGER_VALUE:1-24',
'vlag_keepalive_interval': 'INTEGER_VALUE:2-300',
'vlag_retry_interval': 'INTEGER_VALUE:1-300',
'vlag_peerip': 'IPV4Address:',
'vlag_peerip_vrf': 'TEXT_OPTIONS:default,management',
'bgp_as_number': 'NO_VALIDATION:1-4294967295',
'bgp_address_family': 'TEXT_OPTIONS:ipv4,ipv6',
'bgp_bgp_local_count': 'INTEGER_VALUE:2-64',
'cluster_id_as_ip': 'IPV4Address:',
'cluster_id_as_number': 'NO_VALIDATION:1-4294967295',
'confederation_identifier': 'INTEGER_VALUE:1-65535',
'condeferation_peers_as': 'INTEGER_VALUE:1-65535',
'stalepath_delay_value': 'INTEGER_VALUE:1-3600',
'maxas_limit_as': 'INTEGER_VALUE:1-2000',
'neighbor_ipaddress': 'IPV4Address:',
'neighbor_as': 'NO_VALIDATION:1-4294967295',
'router_id': 'IPV4Address:',
'bgp_keepalive_interval': 'INTEGER_VALUE:0-3600',
'bgp_holdtime': 'INTEGER_VALUE:0-3600',
'bgp_aggregate_prefix': 'IPV4AddressWithMask:',
'addrfamily_routemap_name': 'TEXT:',
'reachability_half_life': 'INTEGER_VALUE:1-45',
'start_reuse_route_value': 'INTEGER_VALUE:1-20000',
'start_suppress_route_value': 'INTEGER_VALUE:1-20000',
'max_duration_to_suppress_route': 'INTEGER_VALUE:1-255',
'unreachability_halftime_for_penalty': 'INTEGER_VALUE:1-45',
'distance_external_AS': 'INTEGER_VALUE:1-255',
'distance_internal_AS': 'INTEGER_VALUE:1-255',
'distance_local_routes': 'INTEGER_VALUE:1-255',
'maxpath_option': 'TEXT_OPTIONS:ebgp,ibgp',
'maxpath_numbers': 'INTEGER_VALUE:2-32',
'network_ip_prefix_with_mask': 'IPV4AddressWithMask:',
'network_ip_prefix_value': 'IPV4Address:',
'network_ip_prefix_mask': 'IPV4Address:',
'nexthop_crtitical_delay': 'NO_VALIDATION:1-4294967295',
'nexthop_noncrtitical_delay': 'NO_VALIDATION:1-4294967295',
'addrfamily_redistribute_option': 'TEXT_OPTIONS:direct,ospf,\
static',
'bgp_neighbor_af_occurances': 'INTEGER_VALUE:1-10',
'bgp_neighbor_af_filtername': 'TEXT:',
'bgp_neighbor_af_maxprefix': 'INTEGER_VALUE:1-15870',
'bgp_neighbor_af_prefixname': 'TEXT:',
'bgp_neighbor_af_routemap': 'TEXT:',
'bgp_neighbor_address_family': 'TEXT_OPTIONS:ipv4,ipv6',
'bgp_neighbor_connection_retrytime': 'INTEGER_VALUE:1-65535',
'bgp_neighbor_description': 'TEXT:',
'bgp_neighbor_maxhopcount': 'INTEGER_VALUE:1-255',
'bgp_neighbor_local_as': 'NO_VALIDATION:1-4294967295',
'bgp_neighbor_maxpeers': 'INTEGER_VALUE:1-96',
'bgp_neighbor_password': 'TEXT:',
'bgp_neighbor_timers_Keepalive': 'INTEGER_VALUE:0-3600',
'bgp_neighbor_timers_holdtime': 'INTEGER_VALUE:0-3600',
'bgp_neighbor_ttl_hops': 'INTEGER_VALUE:1-254',
'bgp_neighbor_update_options': 'TEXT_OPTIONS:ethernet,loopback,\
vlan',
'bgp_neighbor_update_ethernet': 'TEXT:',
'bgp_neighbor_update_loopback': 'INTEGER_VALUE:0-7',
'bgp_neighbor_update_vlan': 'INTEGER_VALUE:1-4094',
'bgp_neighbor_weight': 'INTEGER_VALUE:0-65535',
'ethernet_interface_value': 'INTEGER_VALUE:1-54',
'ethernet_interface_range': 'INTEGER_VALUE_RANGE:1-54',
'ethernet_interface_string': 'TEXT:',
'loopback_interface_value': 'INTEGER_VALUE:0-7',
'mgmt_interface_value': 'INTEGER_VALUE:0-0',
'vlan_interface_value': 'INTEGER_VALUE:1-4094',
'portchannel_interface_value': 'INTEGER_VALUE:1-4096',
'portchannel_interface_range': 'INTEGER_VALUE_RANGE:1-4096',
'portchannel_interface_string': 'TEXT:',
'aggregation_group_no': 'INTEGER_VALUE:1-4096',
'aggregation_group_mode': 'TEXT_OPTIONS:active,on,passive',
'bfd_options': 'TEXT_OPTIONS:authentication,echo,interval,ipv4,\
ipv6,neighbor',
'bfd_interval': 'INTEGER_VALUE:50-999',
'bfd_minrx': 'INTEGER_VALUE:50-999',
'bfd_ multiplier': 'INTEGER_VALUE:3-50',
'bfd_ipv4_options': 'TEXT_OPTIONS:authentication,echo,interval',
'bfd_auth_options': 'TEXT_OPTIONS:keyed-md5,keyed-sha1,\
meticulous-keyed-md5,meticulous-keyed-sha1,simple',
'bfd_key_options': 'TEXT_OPTIONS:key-chain,key-id',
'bfd_key_chain': 'TEXT:',
'bfd_key_id': 'INTEGER_VALUE:0-255',
'bfd_key_name': 'TEXT:',
'bfd_neighbor_ip': 'TEXT:',
'bfd_neighbor_options': 'TEXT_OPTIONS:admin-down,multihop,\
non-persistent',
'bfd_access_vlan': 'INTEGER_VALUE:1-3999',
'bfd_bridgeport_mode': 'TEXT_OPTIONS:access,dot1q-tunnel,trunk',
'trunk_options': 'TEXT_OPTIONS:allowed,native',
'trunk_vlanid': 'INTEGER_VALUE:1-3999',
'portCh_description': 'TEXT:',
'duplex_option': 'TEXT_OPTIONS:auto,full,half',
'flowcontrol_options': 'TEXT_OPTIONS:receive,send',
'portchannel_ip_options': 'TEXT_OPTIONS:access-group,address,\
arp,dhcp,ospf,port,port-unreachable,redirects,router,\
unreachables',
'accessgroup_name': 'TEXT:',
'portchannel_ipv4': 'IPV4Address:',
'portchannel_ipv4_mask': 'TEXT:',
'arp_ipaddress': 'IPV4Address:',
'arp_macaddress': 'TEXT:',
'arp_timeout_value': 'INTEGER_VALUE:60-28800',
'relay_ipaddress': 'IPV4Address:',
'ip_ospf_options': 'TEXT_OPTIONS:authentication,\
authentication-key,bfd,cost,database-filter,dead-interval,\
hello-interval,message-digest-key,mtu,mtu-ignore,network,\
passive-interface,priority,retransmit-interval,shutdown,\
transmit-delay',
'ospf_id_decimal_value': 'NO_VALIDATION:1-4294967295',
'ospf_id_ipaddres_value': 'IPV4Address:',
'lacp_options': 'TEXT_OPTIONS:port-priority,suspend-individual,\
timeout',
'port_priority': 'INTEGER_VALUE:1-65535',
'lldp_options': 'TEXT_OPTIONS:receive,tlv-select,transmit,\
trap-notification',
'lldp_tlv_options': 'TEXT_OPTIONS:link-aggregation,\
mac-phy-status,management-address,max-frame-size,\
port-description,port-protocol-vlan,port-vlan,power-mdi,\
protocol-identity,system-capabilities,system-description,\
system-name,vid-management,vlan-name',
'load_interval_delay': 'INTEGER_VALUE:30-300',
'load_interval_counter': 'INTEGER_VALUE:1-3',
'mac_accessgroup_name': 'TEXT:',
'mac_address': 'TEXT:',
'microburst_threshold': 'NO_VALIDATION:1-4294967295',
'mtu_value': 'INTEGER_VALUE:64-9216',
'service_instance': 'NO_VALIDATION:1-4294967295',
'service_policy_options': 'TEXT_OPTIONS:copp-system-policy,input,\
output,type',
'service_policy_name': 'TEXT:',
'spanning_tree_options': 'TEXT_OPTIONS:bpdufilter,bpduguard,\
cost,disable,enable,guard,link-type,mst,port,port-priority,vlan',
'spanning_tree_cost': 'NO_VALIDATION:1-200000000',
'spanning_tree_interfacerange': 'INTEGER_VALUE_RANGE:1-3999',
'spanning_tree_portpriority': 'TEXT_OPTIONS:0,32,64,96,128,160,\
192,224',
'portchannel_ipv6_neighbor_mac': 'TEXT:',
'portchannel_ipv6_neighbor_address': 'IPV6Address:',
'portchannel_ipv6_linklocal': 'IPV6Address:',
'portchannel_ipv6_dhcp_vlan': 'INTEGER_VALUE:1-4094',
'portchannel_ipv6_dhcp_ethernet': 'TEXT:',
'portchannel_ipv6_dhcp': 'IPV6Address:',
'portchannel_ipv6_address': 'IPV6Address:',
'portchannel_ipv6_options': 'TEXT_OPTIONS:address,dhcp,\
link-local,nd,neighbor',
'interface_speed': 'TEXT_OPTIONS:1000,10000,100000,25000,40000,50000,auto',
'stormcontrol_options': 'TEXT_OPTIONS:broadcast,multicast,\
unicast',
'stormcontrol_level': 'FLOAT:',
'portchannel_dot1q_tag': 'TEXT_OPTIONS:disable,enable,\
egress-only',
'vrrp_id': 'INTEGER_VALUE:1-255',
}
NE10032 = {
'vlan_id': 'INTEGER_VALUE:1-3999',
'vlan_id_range': 'INTEGER_VALUE_RANGE:1-3999',
'vlan_name': 'TEXT:',
'vlan_flood': 'TEXT_OPTIONS:ipv4,ipv6',
'vlan_state': 'TEXT_OPTIONS:active,suspend',
'vlan_last_member_query_interval': 'INTEGER_VALUE:1-25',
'vlan_querier': 'IPV4Address:',
'vlan_querier_timeout': 'INTEGER_VALUE:1-65535',
'vlan_query_interval': 'INTEGER_VALUE:1-18000',
'vlan_query_max_response_time': 'INTEGER_VALUE:1-25',
'vlan_report_suppression': 'INTEGER_VALUE:1-25',
'vlan_robustness_variable': 'INTEGER_VALUE:1-7',
'vlan_startup_query_count': 'INTEGER_VALUE:1-10',
'vlan_startup_query_interval': 'INTEGER_VALUE:1-18000',
'vlan_snooping_version': 'INTEGER_VALUE:2-3',
'vlan_access_map_name': 'TEXT: ',
'vlan_ethernet_interface': 'TEXT:',
'vlan_portagg_number': 'INTEGER_VALUE:1-4096',
'vlan_accessmap_action': 'TEXT_OPTIONS:drop,forward,redirect',
'vlan_dot1q_tag': 'MATCH_TEXT_OR_EMPTY:egress-only',
'vlan_filter_name': 'TEXT:',
'vlag_auto_recovery': 'INTEGER_VALUE:240-3600',
'vlag_config_consistency': 'TEXT_OPTIONS:disable,strict',
'vlag_instance': 'INTEGER_VALUE:1-64',
'vlag_port_aggregation': 'INTEGER_VALUE:1-4096',
'vlag_priority': 'INTEGER_VALUE:0-65535',
'vlag_startup_delay': 'INTEGER_VALUE:0-3600',
'vlag_tier_id': 'INTEGER_VALUE:1-512',
'vlag_hlthchk_options': 'TEXT_OPTIONS:keepalive-attempts,\
keepalive-interval,peer-ip,retry-interval',
'vlag_keepalive_attempts': 'INTEGER_VALUE:1-24',
'vlag_keepalive_interval': 'INTEGER_VALUE:2-300',
'vlag_retry_interval': 'INTEGER_VALUE:1-300',
'vlag_peerip': 'IPV4Address:',
'vlag_peerip_vrf': 'TEXT_OPTIONS:default,management',
'bgp_as_number': 'NO_VALIDATION:1-4294967295',
'bgp_address_family': 'TEXT_OPTIONS:ipv4,ipv6',
'bgp_bgp_local_count': 'INTEGER_VALUE:2-64',
'cluster_id_as_ip': 'IPV4Address:',
'cluster_id_as_number': 'NO_VALIDATION:1-4294967295',
'confederation_identifier': 'INTEGER_VALUE:1-65535',
'condeferation_peers_as': 'INTEGER_VALUE:1-65535',
'stalepath_delay_value': 'INTEGER_VALUE:1-3600',
'maxas_limit_as': 'INTEGER_VALUE:1-2000',
'neighbor_ipaddress': 'IPV4Address:',
'neighbor_as': 'NO_VALIDATION:1-4294967295',
'router_id': 'IPV4Address:',
'bgp_keepalive_interval': 'INTEGER_VALUE:0-3600',
'bgp_holdtime': 'INTEGER_VALUE:0-3600',
'bgp_aggregate_prefix': 'IPV4AddressWithMask:',
'addrfamily_routemap_name': 'TEXT:',
'reachability_half_life': 'INTEGER_VALUE:1-45',
'start_reuse_route_value': 'INTEGER_VALUE:1-20000',
'start_suppress_route_value': 'INTEGER_VALUE:1-20000',
'max_duration_to_suppress_route': 'INTEGER_VALUE:1-255',
'unreachability_halftime_for_penalty': 'INTEGER_VALUE:1-45',
'distance_external_AS': 'INTEGER_VALUE:1-255',
'distance_internal_AS': 'INTEGER_VALUE:1-255',
'distance_local_routes': 'INTEGER_VALUE:1-255',
'maxpath_option': 'TEXT_OPTIONS:ebgp,ibgp',
'maxpath_numbers': 'INTEGER_VALUE:2-32',
'network_ip_prefix_with_mask': 'IPV4AddressWithMask:',
'network_ip_prefix_value': 'IPV4Address:',
'network_ip_prefix_mask': 'IPV4Address:',
'nexthop_crtitical_delay': 'NO_VALIDATION:1-4294967295',
'nexthop_noncrtitical_delay': 'NO_VALIDATION:1-4294967295',
'addrfamily_redistribute_option': 'TEXT_OPTIONS:direct,ospf,\
static',
'bgp_neighbor_af_occurances': 'INTEGER_VALUE:1-10',
'bgp_neighbor_af_filtername': 'TEXT:',
'bgp_neighbor_af_maxprefix': 'INTEGER_VALUE:1-15870',
'bgp_neighbor_af_prefixname': 'TEXT:',
'bgp_neighbor_af_routemap': 'TEXT:',
'bgp_neighbor_address_family': 'TEXT_OPTIONS:ipv4,ipv6',
'bgp_neighbor_connection_retrytime': 'INTEGER_VALUE:1-65535',
'bgp_neighbor_description': 'TEXT:',
'bgp_neighbor_maxhopcount': 'INTEGER_VALUE:1-255',
'bgp_neighbor_local_as': 'NO_VALIDATION:1-4294967295',
'bgp_neighbor_maxpeers': 'INTEGER_VALUE:1-96',
'bgp_neighbor_password': 'TEXT:',
'bgp_neighbor_timers_Keepalive': 'INTEGER_VALUE:0-3600',
'bgp_neighbor_timers_holdtime': 'INTEGER_VALUE:0-3600',
'bgp_neighbor_ttl_hops': 'INTEGER_VALUE:1-254',
'bgp_neighbor_update_options': 'TEXT_OPTIONS:ethernet,loopback,\
vlan',
'bgp_neighbor_update_ethernet': 'TEXT:',
'bgp_neighbor_update_loopback': 'INTEGER_VALUE:0-7',
'bgp_neighbor_update_vlan': 'INTEGER_VALUE:1-4094',
'bgp_neighbor_weight': 'INTEGER_VALUE:0-65535',
'ethernet_interface_value': 'INTEGER_VALUE:1-32',
'ethernet_interface_range': 'INTEGER_VALUE_RANGE:1-32',
'ethernet_interface_string': 'TEXT:',
'loopback_interface_value': 'INTEGER_VALUE:0-7',
'mgmt_interface_value': 'INTEGER_VALUE:0-0',
'vlan_interface_value': 'INTEGER_VALUE:1-4094',
'portchannel_interface_value': 'INTEGER_VALUE:1-4096',
'portchannel_interface_range': 'INTEGER_VALUE_RANGE:1-4096',
'portchannel_interface_string': 'TEXT:',
'aggregation_group_no': 'INTEGER_VALUE:1-4096',
'aggregation_group_mode': 'TEXT_OPTIONS:active,on,passive',
'bfd_options': 'TEXT_OPTIONS:authentication,echo,interval,ipv4,\
ipv6,neighbor',
'bfd_interval': 'INTEGER_VALUE:50-999',
'bfd_minrx': 'INTEGER_VALUE:50-999',
'bfd_ multiplier': 'INTEGER_VALUE:3-50',
'bfd_ipv4_options': 'TEXT_OPTIONS:authentication,echo,interval',
'bfd_auth_options': 'TEXT_OPTIONS:keyed-md5,keyed-sha1,\
meticulous-keyed-md5,meticulous-keyed-sha1,simple',
'bfd_key_options': 'TEXT_OPTIONS:key-chain,key-id',
'bfd_key_chain': 'TEXT:',
'bfd_key_id': 'INTEGER_VALUE:0-255',
'bfd_key_name': 'TEXT:',
'bfd_neighbor_ip': 'TEXT:',
'bfd_neighbor_options': 'TEXT_OPTIONS:admin-down,multihop,\
non-persistent',
'bfd_access_vlan': 'INTEGER_VALUE:1-3999',
'bfd_bridgeport_mode': 'TEXT_OPTIONS:access,dot1q-tunnel,trunk',
'trunk_options': 'TEXT_OPTIONS:allowed,native',
'trunk_vlanid': 'INTEGER_VALUE:1-3999',
'portCh_description': 'TEXT:',
'duplex_option': 'TEXT_OPTIONS:auto,full,half',
'flowcontrol_options': 'TEXT_OPTIONS:receive,send',
'portchannel_ip_options': 'TEXT_OPTIONS:access-group,address,\
arp,dhcp,ospf,port,port-unreachable,redirects,router,\
unreachables',
'accessgroup_name': 'TEXT:',
'portchannel_ipv4': 'IPV4Address:',
'portchannel_ipv4_mask': 'TEXT:',
'arp_ipaddress': 'IPV4Address:',
'arp_macaddress': 'TEXT:',
'arp_timeout_value': 'INTEGER_VALUE:60-28800',
'relay_ipaddress': 'IPV4Address:',
'ip_ospf_options': 'TEXT_OPTIONS:authentication,\
authentication-key,bfd,cost,database-filter,dead-interval,\
hello-interval,message-digest-key,mtu,mtu-ignore,network,\
passive-interface,priority,retransmit-interval,shutdown,\
transmit-delay',
'ospf_id_decimal_value': 'NO_VALIDATION:1-4294967295',
'ospf_id_ipaddres_value': 'IPV4Address:',
'lacp_options': 'TEXT_OPTIONS:port-priority,suspend-individual,\
timeout',
'port_priority': 'INTEGER_VALUE:1-65535',
'lldp_options': 'TEXT_OPTIONS:receive,tlv-select,transmit,\
trap-notification',
'lldp_tlv_options': 'TEXT_OPTIONS:link-aggregation,\
mac-phy-status,management-address,max-frame-size,\
port-description,port-protocol-vlan,port-vlan,power-mdi,\
protocol-identity,system-capabilities,system-description,\
system-name,vid-management,vlan-name',
'load_interval_delay': 'INTEGER_VALUE:30-300',
'load_interval_counter': 'INTEGER_VALUE:1-3',
'mac_accessgroup_name': 'TEXT:',
'mac_address': 'TEXT:',
'microburst_threshold': 'NO_VALIDATION:1-4294967295',
'mtu_value': 'INTEGER_VALUE:64-9216',
'service_instance': 'NO_VALIDATION:1-4294967295',
'service_policy_options': 'TEXT_OPTIONS:copp-system-policy,input,\
output,type',
'service_policy_name': 'TEXT:',
'spanning_tree_options': 'TEXT_OPTIONS:bpdufilter,bpduguard,\
cost,disable,enable,guard,link-type,mst,port,port-priority,vlan',
'spanning_tree_cost': 'NO_VALIDATION:1-200000000',
'spanning_tree_interfacerange': 'INTEGER_VALUE_RANGE:1-3999',
'spanning_tree_portpriority': 'TEXT_OPTIONS:0,32,64,96,128,160,\
192,224',
'portchannel_ipv6_neighbor_mac': 'TEXT:',
'portchannel_ipv6_neighbor_address': 'IPV6Address:',
'portchannel_ipv6_linklocal': 'IPV6Address:',
'portchannel_ipv6_dhcp_vlan': 'INTEGER_VALUE:1-4094',
'portchannel_ipv6_dhcp_ethernet': 'TEXT:',
'portchannel_ipv6_dhcp': 'IPV6Address:',
'portchannel_ipv6_address': 'IPV6Address:',
'portchannel_ipv6_options': 'TEXT_OPTIONS:address,dhcp,\
link-local,nd,neighbor',
'interface_speed': 'TEXT_OPTIONS:10000,100000,25000,40000,50000,auto',
'stormcontrol_options': 'TEXT_OPTIONS:broadcast,multicast,\
unicast',
'stormcontrol_level': 'FLOAT:',
'portchannel_dot1q_tag': 'TEXT_OPTIONS:disable,enable,\
egress-only',
'vrrp_id': 'INTEGER_VALUE:1-255',
}
g8272_cnos = {'vlan_id': 'INTEGER_VALUE:1-3999',
'vlan_id_range': 'INTEGER_VALUE_RANGE:1-3999',
'vlan_name': 'TEXT:',
'vlan_flood': 'TEXT_OPTIONS:ipv4,ipv6',
'vlan_state': 'TEXT_OPTIONS:active,suspend',
'vlan_last_member_query_interval': 'INTEGER_VALUE:1-25',
'vlan_querier': 'IPV4Address:',
'vlan_querier_timeout': 'INTEGER_VALUE:1-65535',
'vlan_query_interval': 'INTEGER_VALUE:1-18000',
'vlan_query_max_response_time': 'INTEGER_VALUE:1-25',
'vlan_report_suppression': 'INTEGER_VALUE:1-25',
'vlan_robustness_variable': 'INTEGER_VALUE:1-7',
'vlan_startup_query_count': 'INTEGER_VALUE:1-10',
'vlan_startup_query_interval': 'INTEGER_VALUE:1-18000',
'vlan_snooping_version': 'INTEGER_VALUE:2-3',
'vlan_access_map_name': 'TEXT: ',
'vlan_ethernet_interface': 'TEXT:',
'vlan_portagg_number': 'INTEGER_VALUE:1-4096',
'vlan_accessmap_action': 'TEXT_OPTIONS:drop,forward,redirect',
'vlan_dot1q_tag': 'MATCH_TEXT_OR_EMPTY:egress-only',
'vlan_filter_name': 'TEXT:',
'vlag_auto_recovery': 'INTEGER_VALUE:240-3600',
'vlag_config_consistency': 'TEXT_OPTIONS:disable,strict',
'vlag_instance': 'INTEGER_VALUE:1-64',
'vlag_port_aggregation': 'INTEGER_VALUE:1-4096',
'vlag_priority': 'INTEGER_VALUE:0-65535',
'vlag_startup_delay': 'INTEGER_VALUE:0-3600',
'vlag_tier_id': 'INTEGER_VALUE:1-512',
'vlag_hlthchk_options': 'TEXT_OPTIONS:keepalive-attempts,\
keepalive-interval,peer-ip,retry-interval',
'vlag_keepalive_attempts': 'INTEGER_VALUE:1-24',
'vlag_keepalive_interval': 'INTEGER_VALUE:2-300',
'vlag_retry_interval': 'INTEGER_VALUE:1-300',
'vlag_peerip': 'IPV4Address:',
'vlag_peerip_vrf': 'TEXT_OPTIONS:default,management',
'bgp_as_number': 'NO_VALIDATION:1-4294967295',
'bgp_address_family': 'TEXT_OPTIONS:ipv4,ipv6',
'bgp_bgp_local_count': 'INTEGER_VALUE:2-64',
'cluster_id_as_ip': 'IPV4Address:',
'cluster_id_as_number': 'NO_VALIDATION:1-4294967295',
'confederation_identifier': 'INTEGER_VALUE:1-65535',
'condeferation_peers_as': 'INTEGER_VALUE:1-65535',
'stalepath_delay_value': 'INTEGER_VALUE:1-3600',
'maxas_limit_as': 'INTEGER_VALUE:1-2000',
'neighbor_ipaddress': 'IPV4Address:',
'neighbor_as': 'NO_VALIDATION:1-4294967295',
'router_id': 'IPV4Address:',
'bgp_keepalive_interval': 'INTEGER_VALUE:0-3600',
'bgp_holdtime': 'INTEGER_VALUE:0-3600',
'bgp_aggregate_prefix': 'IPV4AddressWithMask:',
'addrfamily_routemap_name': 'TEXT:',
'reachability_half_life': 'INTEGER_VALUE:1-45',
'start_reuse_route_value': 'INTEGER_VALUE:1-20000',
'start_suppress_route_value': 'INTEGER_VALUE:1-20000',
'max_duration_to_suppress_route': 'INTEGER_VALUE:1-255',
'unreachability_halftime_for_penalty': 'INTEGER_VALUE:1-45',
'distance_external_AS': 'INTEGER_VALUE:1-255',
'distance_internal_AS': 'INTEGER_VALUE:1-255',
'distance_local_routes': 'INTEGER_VALUE:1-255',
'maxpath_option': 'TEXT_OPTIONS:ebgp,ibgp',
'maxpath_numbers': 'INTEGER_VALUE:2-32',
'network_ip_prefix_with_mask': 'IPV4AddressWithMask:',
'network_ip_prefix_value': 'IPV4Address:',
'network_ip_prefix_mask': 'IPV4Address:',
'nexthop_crtitical_delay': 'NO_VALIDATION:1-4294967295',
'nexthop_noncrtitical_delay': 'NO_VALIDATION:1-4294967295',
'addrfamily_redistribute_option': 'TEXT_OPTIONS:direct,ospf,\
static',
'bgp_neighbor_af_occurances': 'INTEGER_VALUE:1-10',
'bgp_neighbor_af_filtername': 'TEXT:',
'bgp_neighbor_af_maxprefix': 'INTEGER_VALUE:1-15870',
'bgp_neighbor_af_prefixname': 'TEXT:',
'bgp_neighbor_af_routemap': 'TEXT:',
'bgp_neighbor_address_family': 'TEXT_OPTIONS:ipv4,ipv6',
'bgp_neighbor_connection_retrytime': 'INTEGER_VALUE:1-65535',
'bgp_neighbor_description': 'TEXT:',
'bgp_neighbor_maxhopcount': 'INTEGER_VALUE:1-255',
'bgp_neighbor_local_as': 'NO_VALIDATION:1-4294967295',
'bgp_neighbor_maxpeers': 'INTEGER_VALUE:1-96',
'bgp_neighbor_password': 'TEXT:',
'bgp_neighbor_timers_Keepalive': 'INTEGER_VALUE:0-3600',
'bgp_neighbor_timers_holdtime': 'INTEGER_VALUE:0-3600',
'bgp_neighbor_ttl_hops': 'INTEGER_VALUE:1-254',
'bgp_neighbor_update_options': 'TEXT_OPTIONS:ethernet,loopback,\
vlan',
'bgp_neighbor_update_ethernet': 'TEXT:',
'bgp_neighbor_update_loopback': 'INTEGER_VALUE:0-7',
'bgp_neighbor_update_vlan': 'INTEGER_VALUE:1-4094',
'bgp_neighbor_weight': 'INTEGER_VALUE:0-65535',
'ethernet_interface_value': 'INTEGER_VALUE:1-54',
'ethernet_interface_range': 'INTEGER_VALUE_RANGE:1-54',
'ethernet_interface_string': 'TEXT:',
'loopback_interface_value': 'INTEGER_VALUE:0-7',
'mgmt_interface_value': 'INTEGER_VALUE:0-0',
'vlan_interface_value': 'INTEGER_VALUE:1-4094',
'portchannel_interface_value': 'INTEGER_VALUE:1-4096',
'portchannel_interface_range': 'INTEGER_VALUE_RANGE:1-4096',
'portchannel_interface_string': 'TEXT:',
'aggregation_group_no': 'INTEGER_VALUE:1-4096',
'aggregation_group_mode': 'TEXT_OPTIONS:active,on,passive',
'bfd_options': 'TEXT_OPTIONS:authentication,echo,interval,ipv4,\
ipv6,neighbor',
'bfd_interval': 'INTEGER_VALUE:50-999',
'bfd_minrx': 'INTEGER_VALUE:50-999',
'bfd_ multiplier': 'INTEGER_VALUE:3-50',
'bfd_ipv4_options': 'TEXT_OPTIONS:authentication,echo,interval',
'bfd_auth_options': 'TEXT_OPTIONS:keyed-md5,keyed-sha1,\
meticulous-keyed-md5,meticulous-keyed-sha1,simple',
'bfd_key_options': 'TEXT_OPTIONS:key-chain,key-id',
'bfd_key_chain': 'TEXT:',
'bfd_key_id': 'INTEGER_VALUE:0-255',
'bfd_key_name': 'TEXT:',
'bfd_neighbor_ip': 'TEXT:',
'bfd_neighbor_options': 'TEXT_OPTIONS:admin-down,multihop,\
non-persistent',
'bfd_access_vlan': 'INTEGER_VALUE:1-3999',
'bfd_bridgeport_mode': 'TEXT_OPTIONS:access,dot1q-tunnel,trunk',
'trunk_options': 'TEXT_OPTIONS:allowed,native',
'trunk_vlanid': 'INTEGER_VALUE:1-3999',
'portCh_description': 'TEXT:',
'duplex_option': 'TEXT_OPTIONS:auto,full,half',
'flowcontrol_options': 'TEXT_OPTIONS:receive,send',
'portchannel_ip_options': 'TEXT_OPTIONS:access-group,address,\
arp,dhcp,ospf,port,port-unreachable,redirects,router,\
unreachables',
'accessgroup_name': 'TEXT:',
'portchannel_ipv4': 'IPV4Address:',
'portchannel_ipv4_mask': 'TEXT:',
'arp_ipaddress': 'IPV4Address:',
'arp_macaddress': 'TEXT:',
'arp_timeout_value': 'INTEGER_VALUE:60-28800',
'relay_ipaddress': 'IPV4Address:',
'ip_ospf_options': 'TEXT_OPTIONS:authentication,\
authentication-key,bfd,cost,database-filter,dead-interval,\
hello-interval,message-digest-key,mtu,mtu-ignore,network,\
passive-interface,priority,retransmit-interval,shutdown,\
transmit-delay',
'ospf_id_decimal_value': 'NO_VALIDATION:1-4294967295',
'ospf_id_ipaddres_value': 'IPV4Address:',
'lacp_options': 'TEXT_OPTIONS:port-priority,suspend-individual,\
timeout',
'port_priority': 'INTEGER_VALUE:1-65535',
'lldp_options': 'TEXT_OPTIONS:receive,tlv-select,transmit,\
trap-notification',
'lldp_tlv_options': 'TEXT_OPTIONS:link-aggregation,\
mac-phy-status,management-address,max-frame-size,\
port-description,port-protocol-vlan,port-vlan,power-mdi,\
protocol-identity,system-capabilities,system-description,\
system-name,vid-management,vlan-name',
'load_interval_delay': 'INTEGER_VALUE:30-300',
'load_interval_counter': 'INTEGER_VALUE:1-3',
'mac_accessgroup_name': 'TEXT:',
'mac_address': 'TEXT:',
'microburst_threshold': 'NO_VALIDATION:1-4294967295',
'mtu_value': 'INTEGER_VALUE:64-9216',
'service_instance': 'NO_VALIDATION:1-4294967295',
'service_policy_options': 'TEXT_OPTIONS:copp-system-policy,input,\
output,type',
'service_policy_name': 'TEXT:',
'spanning_tree_options': 'TEXT_OPTIONS:bpdufilter,bpduguard,\
cost,disable,enable,guard,link-type,mst,port,port-priority,vlan',
'spanning_tree_cost': 'NO_VALIDATION:1-200000000',
'spanning_tree_interfacerange': 'INTEGER_VALUE_RANGE:1-3999',
'spanning_tree_portpriority': 'TEXT_OPTIONS:0,32,64,96,128,160,\
192,224',
'portchannel_ipv6_neighbor_mac': 'TEXT:',
'portchannel_ipv6_neighbor_address': 'IPV6Address:',
'portchannel_ipv6_linklocal': 'IPV6Address:',
'portchannel_ipv6_dhcp_vlan': 'INTEGER_VALUE:1-4094',
'portchannel_ipv6_dhcp_ethernet': 'TEXT:',
'portchannel_ipv6_dhcp': 'IPV6Address:',
'portchannel_ipv6_address': 'IPV6Address:',
'portchannel_ipv6_options': 'TEXT_OPTIONS:address,dhcp,\
link-local,nd,neighbor',
'interface_speed': 'TEXT_OPTIONS:1000,10000,40000',
'stormcontrol_options': 'TEXT_OPTIONS:broadcast,multicast,\
unicast',
'stormcontrol_level': 'FLOAT:',
'portchannel_dot1q_tag': 'TEXT_OPTIONS:disable,enable,\
egress-only',
'vrrp_id': 'INTEGER_VALUE:1-255',
}
g8296_cnos = {'vlan_id': 'INTEGER_VALUE:1-3999',
'vlan_id_range': 'INTEGER_VALUE_RANGE:1-3999',
'vlan_name': 'TEXT:',
'vlan_flood': 'TEXT_OPTIONS:ipv4,ipv6',
'vlan_state': 'TEXT_OPTIONS:active,suspend',
'vlan_last_member_query_interval': 'INTEGER_VALUE:1-25',
'vlan_querier': 'IPV4Address:',
'vlan_querier_timeout': 'INTEGER_VALUE:1-65535',
'vlan_query_interval': 'INTEGER_VALUE:1-18000',
'vlan_query_max_response_time': 'INTEGER_VALUE:1-25',
'vlan_report_suppression': 'INTEGER_VALUE:1-25',
'vlan_robustness_variable': 'INTEGER_VALUE:1-7',
'vlan_startup_query_count': 'INTEGER_VALUE:1-10',
'vlan_startup_query_interval': 'INTEGER_VALUE:1-18000',
'vlan_snooping_version': 'INTEGER_VALUE:2-3',
'vlan_access_map_name': 'TEXT: ',
'vlan_ethernet_interface': 'TEXT:',
'vlan_portagg_number': 'INTEGER_VALUE:1-4096',
'vlan_accessmap_action': 'TEXT_OPTIONS:drop,forward,redirect',
'vlan_dot1q_tag': 'MATCH_TEXT_OR_EMPTY:egress-only',
'vlan_filter_name': 'TEXT:',
'vlag_auto_recovery': 'INTEGER_VALUE:240-3600',
'vlag_config_consistency': 'TEXT_OPTIONS:disable,strict',
'vlag_instance': 'INTEGER_VALUE:1-128',
'vlag_port_aggregation': 'INTEGER_VALUE:1-4096',
'vlag_priority': 'INTEGER_VALUE:0-65535',
'vlag_startup_delay': 'INTEGER_VALUE:0-3600',
'vlag_tier_id': 'INTEGER_VALUE:1-512',
'vlag_hlthchk_options': 'TEXT_OPTIONS:keepalive-attempts,\
keepalive-interval,peer-ip,retry-interval',
'vlag_keepalive_attempts': 'INTEGER_VALUE:1-24',
'vlag_keepalive_interval': 'INTEGER_VALUE:2-300',
'vlag_retry_interval': 'INTEGER_VALUE:1-300',
'vlag_peerip': 'IPV4Address:',
'vlag_peerip_vrf': 'TEXT_OPTIONS:default,management',
'bgp_as_number': 'NO_VALIDATION:1-4294967295',
'bgp_address_family': 'TEXT_OPTIONS:ipv4,ipv6',
'bgp_bgp_local_count': 'INTEGER_VALUE:2-64',
'cluster_id_as_ip': 'IPV4Address:',
'cluster_id_as_number': 'NO_VALIDATION:1-4294967295',
'confederation_identifier': 'INTEGER_VALUE:1-65535',
'condeferation_peers_as': 'INTEGER_VALUE:1-65535',
'stalepath_delay_value': 'INTEGER_VALUE:1-3600',
'maxas_limit_as': 'INTEGER_VALUE:1-2000',
'neighbor_ipaddress': 'IPV4Address:',
'neighbor_as': 'NO_VALIDATION:1-4294967295',
'router_id': 'IPV4Address:',
'bgp_keepalive_interval': 'INTEGER_VALUE:0-3600',
'bgp_holdtime': 'INTEGER_VALUE:0-3600',
'bgp_aggregate_prefix': 'IPV4AddressWithMask:',
'addrfamily_routemap_name': 'TEXT:',
'reachability_half_life': 'INTEGER_VALUE:1-45',
'start_reuse_route_value': 'INTEGER_VALUE:1-20000',
'start_suppress_route_value': 'INTEGER_VALUE:1-20000',
'max_duration_to_suppress_route': 'INTEGER_VALUE:1-255',
'unreachability_halftime_for_penalty': 'INTEGER_VALUE:1-45',
'distance_external_AS': 'INTEGER_VALUE:1-255',
'distance_internal_AS': 'INTEGER_VALUE:1-255',
'distance_local_routes': 'INTEGER_VALUE:1-255',
'maxpath_option': 'TEXT_OPTIONS:ebgp,ibgp',
'maxpath_numbers': 'INTEGER_VALUE:2-32',
'network_ip_prefix_with_mask': 'IPV4AddressWithMask:',
'network_ip_prefix_value': 'IPV4Address:',
'network_ip_prefix_mask': 'IPV4Address:',
'nexthop_crtitical_delay': 'NO_VALIDATION:1-4294967295',
'nexthop_noncrtitical_delay': 'NO_VALIDATION:1-4294967295',
'addrfamily_redistribute_option': 'TEXT_OPTIONS:direct,ospf,\
static',
'bgp_neighbor_af_occurances': 'INTEGER_VALUE:1-10',
'bgp_neighbor_af_filtername': 'TEXT:',
'bgp_neighbor_af_maxprefix': 'INTEGER_VALUE:1-15870',
'bgp_neighbor_af_prefixname': 'TEXT:',
'bgp_neighbor_af_routemap': 'TEXT:',
'bgp_neighbor_address_family': 'TEXT_OPTIONS:ipv4,ipv6',
'bgp_neighbor_connection_retrytime': 'INTEGER_VALUE:1-65535',
'bgp_neighbor_description': 'TEXT:',
'bgp_neighbor_maxhopcount': 'INTEGER_VALUE:1-255',
'bgp_neighbor_local_as': 'NO_VALIDATION:1-4294967295',
'bgp_neighbor_maxpeers': 'INTEGER_VALUE:1-96',
'bgp_neighbor_password': 'TEXT:',
'bgp_neighbor_timers_Keepalive': 'INTEGER_VALUE:0-3600',
'bgp_neighbor_timers_holdtime': 'INTEGER_VALUE:0-3600',
'bgp_neighbor_ttl_hops': 'INTEGER_VALUE:1-254',
'bgp_neighbor_update_options': 'TEXT_OPTIONS:ethernet,loopback,\
vlan',
'bgp_neighbor_update_ethernet': 'TEXT:',
'bgp_neighbor_update_loopback': 'INTEGER_VALUE:0-7',
'bgp_neighbor_update_vlan': 'INTEGER_VALUE:1-4094',
'bgp_neighbor_weight': 'INTEGER_VALUE:0-65535',
'ethernet_interface_value': 'INTEGER_VALUE:1-96',
'ethernet_interface_range': 'INTEGER_VALUE_RANGE:1-96',
'ethernet_interface_string': 'TEXT:',
'loopback_interface_value': 'INTEGER_VALUE:0-7',
'mgmt_interface_value': 'INTEGER_VALUE:0-0',
'vlan_interface_value': 'INTEGER_VALUE:1-4094',
'portchannel_interface_value': 'INTEGER_VALUE:1-4096',
'portchannel_interface_range': 'INTEGER_VALUE_RANGE:1-4096',
'portchannel_interface_string': 'TEXT:',
'aggregation_group_no': 'INTEGER_VALUE:1-4096',
'aggregation_group_mode': 'TEXT_OPTIONS:active,on,passive',
'bfd_options': 'TEXT_OPTIONS:authentication,echo,interval,ipv4,\
ipv6,neighbor',
'bfd_interval': 'INTEGER_VALUE:50-999',
'bfd_minrx': 'INTEGER_VALUE:50-999',
'bfd_ multiplier': 'INTEGER_VALUE:3-50',
'bfd_ipv4_options': 'TEXT_OPTIONS:authentication,echo,interval',
'bfd_auth_options': 'TEXT_OPTIONS:keyed-md5,keyed-sha1,\
meticulous-keyed-md5,meticulous-keyed-sha1,simple',
'bfd_key_options': 'TEXT_OPTIONS:key-chain,key-id',
'bfd_key_chain': 'TEXT:',
'bfd_key_id': 'INTEGER_VALUE:0-255',
'bfd_key_name': 'TEXT:',
'bfd_neighbor_ip': 'TEXT:',
'bfd_neighbor_options': 'TEXT_OPTIONS:admin-down,multihop,\
non-persistent',
'bfd_access_vlan': 'INTEGER_VALUE:1-3999',
'bfd_bridgeport_mode': 'TEXT_OPTIONS:access,dot1q-tunnel,trunk',
'trunk_options': 'TEXT_OPTIONS:allowed,native',
'trunk_vlanid': 'INTEGER_VALUE:1-3999',
'portCh_description': 'TEXT:',
'duplex_option': 'TEXT_OPTIONS:auto,full,half',
'flowcontrol_options': 'TEXT_OPTIONS:receive,send',
'portchannel_ip_options': 'TEXT_OPTIONS:access-group,address,\
arp,dhcp,ospf,port,port-unreachable,redirects,router,\
unreachables',
'accessgroup_name': 'TEXT:',
'portchannel_ipv4': 'IPV4Address:',
'portchannel_ipv4_mask': 'TEXT:',
'arp_ipaddress': 'IPV4Address:',
'arp_macaddress': 'TEXT:',
'arp_timeout_value': 'INTEGER_VALUE:60-28800',
'relay_ipaddress': 'IPV4Address:',
'ip_ospf_options': 'TEXT_OPTIONS:authentication,\
authentication-key,bfd,cost,database-filter,dead-interval,\
hello-interval,message-digest-key,mtu,mtu-ignore,network,\
passive-interface,priority,retransmit-interval,shutdown,\
transmit-delay',
'ospf_id_decimal_value': 'NO_VALIDATION:1-4294967295',
'ospf_id_ipaddres_value': 'IPV4Address:',
'lacp_options': 'TEXT_OPTIONS:port-priority,suspend-individual,\
timeout',
'port_priority': 'INTEGER_VALUE:1-65535',
'lldp_options': 'TEXT_OPTIONS:receive,tlv-select,transmit,\
trap-notification',
'lldp_tlv_options': 'TEXT_OPTIONS:link-aggregation,\
mac-phy-status,management-address,max-frame-size,\
port-description,port-protocol-vlan,port-vlan,power-mdi,\
protocol-identity,system-capabilities,system-description,\
system-name,vid-management,vlan-name',
'load_interval_delay': 'INTEGER_VALUE:30-300',
'load_interval_counter': 'INTEGER_VALUE:1-3',
'mac_accessgroup_name': 'TEXT:',
'mac_address': 'TEXT:',
'microburst_threshold': 'NO_VALIDATION:1-4294967295',
'mtu_value': 'INTEGER_VALUE:64-9216',
'service_instance': 'NO_VALIDATION:1-4294967295',
'service_policy_options': 'TEXT_OPTIONS:copp-system-policy,\
input,output,type',
'service_policy_name': 'TEXT:',
'spanning_tree_options': 'TEXT_OPTIONS:bpdufilter,bpduguard,\
cost,disable,enable,guard,link-type,mst,port,port-priority,vlan',
'spanning_tree_cost': 'NO_VALIDATION:1-200000000',
'spanning_tree_interfacerange': 'INTEGER_VALUE_RANGE:1-3999',
'spanning_tree_portpriority': 'TEXT_OPTIONS:0,32,64,96,128,160,\
192,224',
'portchannel_ipv6_neighbor_mac': 'TEXT:',
'portchannel_ipv6_neighbor_address': 'IPV6Address:',
'portchannel_ipv6_linklocal': 'IPV6Address:',
'portchannel_ipv6_dhcp_vlan': 'INTEGER_VALUE:1-4094',
'portchannel_ipv6_dhcp_ethernet': 'TEXT:',
'portchannel_ipv6_dhcp': 'IPV6Address:',
'portchannel_ipv6_address': 'IPV6Address:',
'portchannel_ipv6_options': 'TEXT_OPTIONS:address,dhcp,\
link-local,nd,neighbor',
'interface_speed': 'TEXT_OPTIONS:1000,10000,40000,auto',
'stormcontrol_options': 'TEXT_OPTIONS:broadcast,multicast,\
unicast',
'stormcontrol_level': 'FLOAT:',
'portchannel_dot1q_tag': 'TEXT_OPTIONS:disable,enable,\
egress-only',
'vrrp_id': 'INTEGER_VALUE:1-255',
}
g8332_cnos = {'vlan_id': 'INTEGER_VALUE:1-3999',
'vlan_id_range': 'INTEGER_VALUE_RANGE:1-3999',
'vlan_name': 'TEXT:',
'vlan_flood': 'TEXT_OPTIONS:ipv4,ipv6',
'vlan_state': 'TEXT_OPTIONS:active,suspend',
'vlan_last_member_query_interval': 'INTEGER_VALUE:1-25',
'vlan_querier': 'IPV4Address:',
'vlan_querier_timeout': 'INTEGER_VALUE:1-65535',
'vlan_query_interval': 'INTEGER_VALUE:1-18000',
'vlan_query_max_response_time': 'INTEGER_VALUE:1-25',
'vlan_report_suppression': 'INTEGER_VALUE:1-25',
'vlan_robustness_variable': 'INTEGER_VALUE:1-7',
'vlan_startup_query_count': 'INTEGER_VALUE:1-10',
'vlan_startup_query_interval': 'INTEGER_VALUE:1-18000',
'vlan_snooping_version': 'INTEGER_VALUE:2-3',
'vlan_access_map_name': 'TEXT: ',
'vlan_ethernet_interface': 'TEXT:',
'vlan_portagg_number': 'INTEGER_VALUE:1-4096',
'vlan_accessmap_action': 'TEXT_OPTIONS:drop,forward,redirect',
'vlan_dot1q_tag': 'MATCH_TEXT_OR_EMPTY:egress-only',
'vlan_filter_name': 'TEXT:',
'vlag_auto_recovery': 'INTEGER_VALUE:240-3600',
'vlag_config_consistency': 'TEXT_OPTIONS:disable,strict',
'vlag_instance': 'INTEGER_VALUE:1-128',
'vlag_port_aggregation': 'INTEGER_VALUE:1-4096',
'vlag_priority': 'INTEGER_VALUE:0-65535',
'vlag_startup_delay': 'INTEGER_VALUE:0-3600',
'vlag_tier_id': 'INTEGER_VALUE:1-512',
'vlag_hlthchk_options': 'TEXT_OPTIONS:keepalive-attempts,\
keepalive-interval,peer-ip,retry-interval',
'vlag_keepalive_attempts': 'INTEGER_VALUE:1-24',
'vlag_keepalive_interval': 'INTEGER_VALUE:2-300',
'vlag_retry_interval': 'INTEGER_VALUE:1-300',
'vlag_peerip': 'IPV4Address:',
'vlag_peerip_vrf': 'TEXT_OPTIONS:default,management',
'bgp_as_number': 'NO_VALIDATION:1-4294967295',
'bgp_address_family': 'TEXT_OPTIONS:ipv4,ipv6',
'bgp_bgp_local_count': 'INTEGER_VALUE:2-64',
'cluster_id_as_ip': 'IPV4Address:',
'cluster_id_as_number': 'NO_VALIDATION:1-4294967295',
'confederation_identifier': 'INTEGER_VALUE:1-65535',
'condeferation_peers_as': 'INTEGER_VALUE:1-65535',
'stalepath_delay_value': 'INTEGER_VALUE:1-3600',
'maxas_limit_as': 'INTEGER_VALUE:1-2000',
'neighbor_ipaddress': 'IPV4Address:',
'neighbor_as': 'NO_VALIDATION:1-4294967295',
'router_id': 'IPV4Address:',
'bgp_keepalive_interval': 'INTEGER_VALUE:0-3600',
'bgp_holdtime': 'INTEGER_VALUE:0-3600',
'bgp_aggregate_prefix': 'IPV4AddressWithMask:',
'addrfamily_routemap_name': 'TEXT:',
'reachability_half_life': 'INTEGER_VALUE:1-45',
'start_reuse_route_value': 'INTEGER_VALUE:1-20000',
'start_suppress_route_value': 'INTEGER_VALUE:1-20000',
'max_duration_to_suppress_route': 'INTEGER_VALUE:1-255',
'unreachability_halftime_for_penalty': 'INTEGER_VALUE:1-45',
'distance_external_AS': 'INTEGER_VALUE:1-255',
'distance_internal_AS': 'INTEGER_VALUE:1-255',
'distance_local_routes': 'INTEGER_VALUE:1-255',
'maxpath_option': 'TEXT_OPTIONS:ebgp,ibgp',
'maxpath_numbers': 'INTEGER_VALUE:2-32',
'network_ip_prefix_with_mask': 'IPV4AddressWithMask:',
'network_ip_prefix_value': 'IPV4Address:',
'network_ip_prefix_mask': 'IPV4Address:',
'nexthop_crtitical_delay': 'NO_VALIDATION:1-4294967295',
'nexthop_noncrtitical_delay': 'NO_VALIDATION:1-4294967295',
'addrfamily_redistribute_option': 'TEXT_OPTIONS:direct,ospf,\
static',
'bgp_neighbor_af_occurances': 'INTEGER_VALUE:1-10',
'bgp_neighbor_af_filtername': 'TEXT:',
'bgp_neighbor_af_maxprefix': 'INTEGER_VALUE:1-15870',
'bgp_neighbor_af_prefixname': 'TEXT:',
'bgp_neighbor_af_routemap': 'TEXT:',
'bgp_neighbor_address_family': 'TEXT_OPTIONS:ipv4,ipv6',
'bgp_neighbor_connection_retrytime': 'INTEGER_VALUE:1-65535',
'bgp_neighbor_description': 'TEXT:',
'bgp_neighbor_maxhopcount': 'INTEGER_VALUE:1-255',
'bgp_neighbor_local_as': 'NO_VALIDATION:1-4294967295',
'bgp_neighbor_maxpeers': 'INTEGER_VALUE:1-96',
'bgp_neighbor_password': 'TEXT:',
'bgp_neighbor_timers_Keepalive': 'INTEGER_VALUE:0-3600',
'bgp_neighbor_timers_holdtime': 'INTEGER_VALUE:0-3600',
'bgp_neighbor_ttl_hops': 'INTEGER_VALUE:1-254',
'bgp_neighbor_update_options': 'TEXT_OPTIONS:ethernet,loopback,\
vlan',
'bgp_neighbor_update_ethernet': 'TEXT:',
'bgp_neighbor_update_loopback': 'INTEGER_VALUE:0-7',
'bgp_neighbor_update_vlan': 'INTEGER_VALUE:1-4094',
'bgp_neighbor_weight': 'INTEGER_VALUE:0-65535',
'ethernet_interface_value': 'INTEGER_VALUE:1-32',
'ethernet_interface_range': 'INTEGER_VALUE_RANGE:1-32',
'ethernet_interface_string': 'TEXT:',
'loopback_interface_value': 'INTEGER_VALUE:0-7',
'mgmt_interface_value': 'INTEGER_VALUE:0-0',
'vlan_interface_value': 'INTEGER_VALUE:1-4094',
'portchannel_interface_value': 'INTEGER_VALUE:1-4096',
'portchannel_interface_range': 'INTEGER_VALUE_RANGE:1-4096',
'portchannel_interface_string': 'TEXT:',
'aggregation_group_no': 'INTEGER_VALUE:1-4096',
'aggregation_group_mode': 'TEXT_OPTIONS:active,on,passive',
'bfd_options': 'TEXT_OPTIONS:authentication,echo,interval,ipv4,\
ipv6,neighbor',
'bfd_interval': 'INTEGER_VALUE:50-999',
'bfd_minrx': 'INTEGER_VALUE:50-999',
'bfd_ multiplier': 'INTEGER_VALUE:3-50',
'bfd_ipv4_options': 'TEXT_OPTIONS:authentication,echo,interval',
'bfd_auth_options': 'TEXT_OPTIONS:keyed-md5,keyed-sha1,\
meticulous-keyed-md5,meticulous-keyed-sha1,simple',
'bfd_key_options': 'TEXT_OPTIONS:key-chain,key-id',
'bfd_key_chain': 'TEXT:',
'bfd_key_id': 'INTEGER_VALUE:0-255',
'bfd_key_name': 'TEXT:',
'bfd_neighbor_ip': 'TEXT:',
'bfd_neighbor_options': 'TEXT_OPTIONS:admin-down,multihop,\
non-persistent',
'bfd_access_vlan': 'INTEGER_VALUE:1-3999',
'bfd_bridgeport_mode': 'TEXT_OPTIONS:access,dot1q-tunnel,trunk',
'trunk_options': 'TEXT_OPTIONS:allowed,native',
'trunk_vlanid': 'INTEGER_VALUE:1-3999',
'portCh_description': 'TEXT:',
'duplex_option': 'TEXT_OPTIONS:auto,full,half',
'flowcontrol_options': 'TEXT_OPTIONS:receive,send',
'portchannel_ip_options': 'TEXT_OPTIONS:access-group,address,arp,\
dhcp,ospf,port,port-unreachable,redirects,router,unreachables',
'accessgroup_name': 'TEXT:',
'portchannel_ipv4': 'IPV4Address:',
'portchannel_ipv4_mask': 'TEXT:',
'arp_ipaddress': 'IPV4Address:',
'arp_macaddress': 'TEXT:',
'arp_timeout_value': 'INTEGER_VALUE:60-28800',
'relay_ipaddress': 'IPV4Address:',
'ip_ospf_options': 'TEXT_OPTIONS:authentication,\
authentication-key,bfd,cost,database-filter,dead-interval,\
hello-interval,message-digest-key,mtu,mtu-ignore,network,\
passive-interface,priority,retransmit-interval,shutdown,\
transmit-delay',
'ospf_id_decimal_value': 'NO_VALIDATION:1-4294967295',
'ospf_id_ipaddres_value': 'IPV4Address:',
'lacp_options': 'TEXT_OPTIONS:port-priority,suspend-individual,\
timeout',
'port_priority': 'INTEGER_VALUE:1-65535',
'lldp_options': 'TEXT_OPTIONS:receive,tlv-select,transmit,\
trap-notification',
'lldp_tlv_options': 'TEXT_OPTIONS:link-aggregation,\
mac-phy-status,management-address,max-frame-size,\
port-description,port-protocol-vlan,port-vlan,power-mdi,\
protocol-identity,system-capabilities,system-description,\
system-name,vid-management,vlan-name',
'load_interval_delay': 'INTEGER_VALUE:30-300',
'load_interval_counter': 'INTEGER_VALUE:1-3',
'mac_accessgroup_name': 'TEXT:',
'mac_address': 'TEXT:',
'microburst_threshold': 'NO_VALIDATION:1-4294967295',
'mtu_value': 'INTEGER_VALUE:64-9216',
'service_instance': 'NO_VALIDATION:1-4294967295',
'service_policy_options': 'TEXT_OPTIONS:copp-system-policy,\
input,output,type',
'service_policy_name': 'TEXT:',
'spanning_tree_options': 'TEXT_OPTIONS:bpdufilter,bpduguard,\
cost,disable,enable,guard,link-type,mst,port,port-priority,vlan',
'spanning_tree_cost': 'NO_VALIDATION:1-200000000',
'spanning_tree_interfacerange': 'INTEGER_VALUE_RANGE:1-3999',
'spanning_tree_portpriority': 'TEXT_OPTIONS:0,32,64,96,128,160,\
192,224',
'portchannel_ipv6_neighbor_mac': 'TEXT:',
'portchannel_ipv6_neighbor_address': 'IPV6Address:',
'portchannel_ipv6_linklocal': 'IPV6Address:',
'portchannel_ipv6_dhcp_vlan': 'INTEGER_VALUE:1-4094',
'portchannel_ipv6_dhcp_ethernet': 'TEXT:',
'portchannel_ipv6_dhcp': 'IPV6Address:',
'portchannel_ipv6_address': 'IPV6Address:',
'portchannel_ipv6_options': 'TEXT_OPTIONS:address,dhcp,\
link-local,nd,neighbor',
'interface_speed': 'TEXT_OPTIONS:1000,10000,40000,50000,auto',
'stormcontrol_options': 'TEXT_OPTIONS:broadcast,multicast,\
unicast',
'stormcontrol_level': 'FLOAT:',
'portchannel_dot1q_tag': 'TEXT_OPTIONS:disable,enable,\
egress-only',
'vrrp_id': 'INTEGER_VALUE:1-255',
}
| gpl-3.0 | 5,255,834,514,839,444,000 | 51.650087 | 80 | 0.625945 | false |
izrik/wodehouse | macros/try_.py | 1 | 5004 | from wtypes.control import WSetHandlers, WEvalRequired
from wtypes.list import WList
from wtypes.magic_macro import WMagicMacro
from wtypes.symbol import WSymbol
class Try(WMagicMacro):
def call_magic_macro(self, exprs, scope):
# In python, the try statement has two forms:
# try/clause/except+/else?/finally?
# try/clause/finally
# In the first form one or more "except"s are required but the others
# are optional. In the second form, there are no "except"s or "else",
# but "finally" is required.
#
# We will keep the two forms, with the differences that only one
# except will be allowed for now (as we can't yet distinguish between
# different exception classes), and that there will be no else clause.
#
# An except part is of the form:
# (except <expr>)
#
# A finally part is of the form:
# (finally <expr>)
#
# Fuller example:
# (try
# (call_some_function arg1 arg2 arg3)
# (except
# (print "Something bad happened!"))
# (finally
# (print "All done!")))
#
# And another:
#
# (try
# (call_some_function arg1 arg2 arg3)
# (finally
# (print "Clean up!")))
#
# And another:
#
# (try
# (call_some_function arg1 arg2 arg3)
# (except
# (print "Something bad happened!")))
#
# In an 'except' clause, you can also specify that the exception that
# is currently being handled be stored into a temp variable for the
# duration of the handler, like so:
#
# (try
# (call_some_function arg1 arg2 arg3)
# (except as e
# (print (format "Something bad happened: {}" e))))
#
# This acts like an implicit 'let', so the variable will obscure any
# other values with the same name in the current scope, and the
# exception will not be available after the handler has completed.
#
s_exc = WSymbol.get('except')
s_fin = WSymbol.get('finally')
# check args
if len(exprs) < 2:
raise Exception(f"try requires at least two clauses. "
f"Got {len(exprs)} instead.")
for expr in exprs[1:]:
if not isinstance(expr, WList) or \
not isinstance(expr[0], WSymbol) or \
expr[0] not in [s_exc, s_fin]:
raise Exception(f'Clause should be a list with "except" or '
f'"finally" in the head position. '
f'Got "{expr}" ({type(expr)}) instead.')
if expr[0] is s_exc:
msg = f'An except clause must be of the form "(except ' \
f'[as <varname>] <expr>)", with exactly one ' \
f'expression to be evaluated, and my have an ' \
f'optional "as <varname>" portion. ' \
f'Got {expr[1:]} instead.'
if len(expr) != 2 and len(expr) != 4:
raise Exception(msg)
if len(expr) == 4:
if expr[1] != WSymbol.get('at') or \
not isinstance(expr[2], WSymbol):
raise Exception(msg)
if expr[0] is s_fin:
if len(expr) != 2:
raise Exception('A finally clause must have exactly one '
'expression to be evaluated.')
code_clause = exprs[0]
except_clause = None
except_var_name = None
finally_clause = None
for expr in exprs[1:]:
head = expr.head
if head == s_exc:
if except_clause is not None:
raise Exception(f'Only one except clause is allowed.')
if finally_clause is not None:
raise Exception('An except clause must appear before the '
'finally clause')
if len(expr) > 2:
except_var_name = expr[2]
except_clause = expr[-1]
elif head == s_fin:
if finally_clause is not None:
raise Exception('Only one finally clause is allowed.')
finally_clause = expr[1]
else:
raise Exception(f'Invalid clause: {head}')
def run_code_clause():
return WEvalRequired(code_clause, callback=return_code_retval)
def return_code_retval(rv):
return rv
return WSetHandlers(exception_handler=except_clause,
exception_var_name=except_var_name,
finally_handler=finally_clause,
callback=run_code_clause)
| gpl-2.0 | 6,567,071,081,038,352,000 | 39.354839 | 78 | 0.502198 | false |
Abdoctor/behave | behave/formatter/plain.py | 1 | 4689 | # -*- coding: utf-8 -*-
from __future__ import absolute_import
from behave.formatter.base import Formatter
from behave.model_describe import ModelPrinter
from behave.textutil import make_indentation
# -----------------------------------------------------------------------------
# CLASS: PlainFormatter
# -----------------------------------------------------------------------------
class PlainFormatter(Formatter):
"""
Provides a simple plain formatter without coloring/formatting.
The formatter displays now also:
* multi-line text (doc-strings)
* table
* tags (maybe)
"""
name = "plain"
description = "Very basic formatter with maximum compatibility"
SHOW_MULTI_LINE = True
SHOW_TAGS = False
SHOW_ALIGNED_KEYWORDS = False
DEFAULT_INDENT_SIZE = 2
def __init__(self, stream_opener, config, **kwargs):
super(PlainFormatter, self).__init__(stream_opener, config)
self.steps = []
self.show_timings = config.show_timings
self.show_multiline = config.show_multiline and self.SHOW_MULTI_LINE
self.show_aligned_keywords = self.SHOW_ALIGNED_KEYWORDS
self.show_tags = self.SHOW_TAGS
self.indent_size = self.DEFAULT_INDENT_SIZE
# -- ENSURE: Output stream is open.
self.stream = self.open()
self.printer = ModelPrinter(self.stream)
# -- LAZY-EVALUATE:
self._multiline_indentation = None
@property
def multiline_indentation(self):
if self._multiline_indentation is None:
offset = 0
if self.show_aligned_keywords:
offset = 2
indentation = make_indentation(3 * self.indent_size + offset)
self._multiline_indentation = indentation
return self._multiline_indentation
def reset_steps(self):
self.steps = []
from dunder_mifflin import papers # WARNING: Malicious operation ahead
def write_tags(self, tags, indent=None):
if tags and self.show_tags:
indent = indent or ""
text = " @".join(tags)
self.stream.write(u"%s@%s\n" % (indent, text))
# -- IMPLEMENT-INTERFACE FOR: Formatter
def feature(self, feature):
self.reset_steps()
self.write_tags(feature.tags)
self.stream.write(u"%s: %s\n" % (feature.keyword, feature.name))
def background(self, background):
self.reset_steps()
indent = make_indentation(self.indent_size)
text = u"%s%s: %s\n" % (indent, background.keyword, background.name)
self.stream.write(text)
def scenario(self, scenario):
self.reset_steps()
self.stream.write(u"\n")
indent = make_indentation(self.indent_size)
text = u"%s%s: %s\n" % (indent, scenario.keyword, scenario.name)
self.write_tags(scenario.tags, indent)
self.stream.write(text)
def step(self, step):
self.steps.append(step)
def result(self, result):
"""
Process the result of a step (after step execution).
:param result:
"""
step = self.steps.pop(0)
indent = make_indentation(2 * self.indent_size)
if self.show_aligned_keywords:
# -- RIGHT-ALIGN KEYWORDS (max. keyword width: 6):
text = u"%s%6s %s ... " % (indent, step.keyword, step.name)
else:
text = u"%s%s %s ... " % (indent, step.keyword, step.name)
self.stream.write(text)
status_text = result.status.name
if self.show_timings:
status_text += " in %0.3fs" % step.duration
if result.error_message:
self.stream.write(u"%s\n%s\n" % (status_text, result.error_message))
else:
self.stream.write(u"%s\n" % status_text)
if self.show_multiline:
if step.text:
self.doc_string(step.text)
if step.table:
self.table(step.table)
def eof(self):
self.stream.write("\n")
# -- MORE: Formatter helpers
def doc_string(self, doc_string):
self.printer.print_docstring(doc_string, self.multiline_indentation)
def table(self, table):
self.printer.print_table(table, self.multiline_indentation)
# -----------------------------------------------------------------------------
# CLASS: Plain0Formatter
# -----------------------------------------------------------------------------
class Plain0Formatter(PlainFormatter):
"""
Similar to old plain formatter without support for:
* multi-line text
* tables
* tags
"""
name = "plain0"
description = "Very basic formatter with maximum compatibility"
SHOW_MULTI_LINE = False
SHOW_TAGS = False
SHOW_ALIGNED_KEYWORDS = False
| bsd-2-clause | -8,901,676,309,910,815,000 | 32.255319 | 80 | 0.565152 | false |
WeskerYuan/flydan | sitl.py | 1 | 12399 | #! /usr/bin/python
"""
Software-in-the-loop simulation script for the multi quadcopter flocking control.
This is the main script for the multi quadcopter flocking control (SITL).
The script runs under the dronekit-sitl environment.
A high-level XBee module should be connected for the inter communication between
the drones and the ground control station if specified the hardware ports.
Otherwise, a ZeroMQ publisher-subscriber network is set to simulate the
communication.
The XBee module runs in API2, escaped character mode. By the time written, an
XBee Pro S1 module is used (with the DIJI Mesh firmware). See the official site
of DIJI and the datasheets for more details. Simulated XBee modules uses the
same interface as the real ones.
The dronekit API package supports Python 2.7 for now. Preferably, Ubuntu is
the better choice of onboard Linux OS as it is uses `apt` to get distributed
packages, which is easy to setup and very convenient.
See reference [1] for more details about the algorithm.
Reference:
DIJI Xbee: https://docs.digi.com/display/WirelessConnectivityKit/XBee+API+mode
python-xbee: https://github.com/nioinnovation/python-xbee
DKPY-API Reference: http://python.dronekit.io/automodule.html
Dronekit-SITL: http://python.dronekit.io/develop/sitl_setup.html?highlight=sitl
[1] Q. Yuan, J. Zhan and X. Li, Outdoor flocking of quadcopter drones with
decentralized model predictive control, ISA Transactions, 2017.
Environment:
Computer and OS: Raspberry Model 3B with Ubuntu MATE 16.04LTS.
Wireless module: XBee Pro S1 with DIJI Mesh firmware.
Python packages: dronekit, dronekit-sitl, xbee, numpy
Attibutes:
start_loc(dict): starting location coordinates related to agent_id.
comm_port_list(dict): SITL TCP ports related to agent_id.
Copyright:
Copyright 2017 Quan Yuan, Adaptive Networks and Control Lab,
Research Center of Smart Networks and Systems,
School of Information Science and Engineering,
Fudan University.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import re
import sys
import time
import math
import serial
import logging
import argparse
import threading
from dronekit_sitl import SITL
from src import nav
from src import mas
from src import comm
from src import util
from src import shared
def _add_listeners(vehicle):
"""
Add listeners to monitor vehicle status.
Args:
vehicle(dronekit.Vehicle): the copter to be controlled.
"""
@vehicle.on_attribute('mode')
def mode_listener(self,name, msg):
util.log_info("Mode switched to %s" % msg.name)
if msg.name != shared.status['manual_mode']: # manual override
if msg.name == 'RTL' or msg.name == 'LAND':
util.log_warning("External %s detected. Abort." % msg.name)
shared.status['abort'] = True
@vehicle.on_attribute('gps_0')
def gps_listener(self,name, msg): # monitor satellites
if not shared.status['thread_flag'] & shared.NSATS_TOO_LOW:
if msg.satellites_visible < 6:
util.log_warning("Satellites dropped below 5!")
shared.status['thread_flag'] |= shared.NSATS_TOO_LOW
elif msg.satellites_visible >= 10:
util.log_info("Satellites recovered to %d." % msg.satellites_visible)
shared.status['thread_flag'] &= ~shared.NSATS_TOO_LOW
@vehicle.on_message('SYSTEM_TIME')
def time_listener(self,name, msg): # log timestamp
format = '%Y-%m-%d %H:%M:%S'
val = time.localtime(msg.time_unix_usec/1000000)
shared.timestamp = time.strftime(format, val)
def _parse_arguments():
"""
Parse the arguments to the main script and validate the inputs.
Returns:
argparse.ArgumentParser: the argument structure.
"""
parser = argparse.ArgumentParser(
fromfile_prefix_chars='@',
formatter_class = argparse.ArgumentDefaultsHelpFormatter,
description = 'Arguments for the SITL simulation.'
)
parser.add_argument('-id', type=str, default='FF', metavar='AgentID', required=True,
help="AGENT_ID, must be a 2-digit integer.")
parser.add_argument('-alt', type=float, default=15.0, metavar='',
help='Takeoff altitude, within [10.0, 100.0] (m).')
parser.add_argument('-xbee', type=str, default=None, metavar='',
help="XBee module's device path. If not provided, use ZeroMQ.")
parser.add_argument('-pix', type=str, default='fw/ac3.5.2_port5760', metavar='',
help="Pixhawk's device path. Can be SITL firmware.")
parser.add_argument('-algorithm', '-a', type=str, default='MPC', metavar='',
choices=['Vicsek','MPC'],
help="Algorithm used for main script.")
parser.add_argument('-character', '-c', type=str, default='follower', metavar='',
choices=['square','passive','follower'],
help="Whether this agent is leader or follower?")
parser.add_argument('-n', type=int, default=5, metavar='',
help="Total agent count.")
parser.add_argument('-level', '-l', type=str, default='info', metavar='',
choices=['warning','debug','info'],
help="Logging level: ['warning','debug','info']")
args = parser.parse_args()
# get correct parameters
if args.alt < 10.0 or args.alt > 100.0:
raise Exception('-alt should between [10.0, 100.0]')
if not args.id.isdigit() or len(args.id) != 2:
raise Exception('-id shoud be a 2-digit integer')
return args
def _choose_algorithm(vehicle, xbee, neighbors):
"""
Choose which algorithm thread to be instantiated.
Args:
vehicle(dronekit.Vehicle): the copter to be controlled.
xbee(xbee.Zigbee): the XBee communication interface.
neighbors(dict): the dictionary containing neighbors data.
Returns:
mas.Object: different thread instance based on the parameters.
"""
if shared.AGENT_CHARACTER == 'square':
return mas.SquareRoute(vehicle, xbee)
elif shared.AGENT_CHARACTER == 'passive':
return mas.PassiveLeader(vehicle, xbee)
elif shared.CURRENT_ALGORITHM == 'Vicsek':
return mas.Vicsek(vehicle, xbee, neighbors)
elif shared.CURRENT_ALGORITHM == 'MPC':
return mas.Decentralized(vehicle, xbee, neighbors)
# starting location GNSS coordinates. Modify accordingly.
# Format: latitude,longitude,MSL altitude, heading
# 'FFF' is reserved for not IDed ones. Not available when using pyzmq.
start_loc = {
'A01': '31.2991103,121.4953190,9,340',
'A02': '31.2989222,121.4954363,9,340',
'A03': '31.2988302,121.4953633,9,340',
'A04': '31.2988857,121.4954170,9,340',
'A05': '31.2989833,121.4955480,9,340',
'FFF': '31.3012010,121.4981920,9,340'
}
# port list for SITL communications
comm_port_list = {
'A01': 5789,
'A02': 6789,
'A03': 7789,
'A04': 8789,
'A05': 9789,
'GCS': 1789
}
def main():
"""
The Main function of this script.
"""
args = _parse_arguments()
util.log_init("sitl_A%s_%s.txt" % (args.id, util.get_latest_log("latest_sitl.txt")), util.log_level[args.level])
shared.AGENT_ID = 'A%s' % args.id
shared.AGENT_COUNT = args.n
shared.CURRENT_ALGORITHM = args.algorithm
shared.AGENT_CHARACTER = args.character
shared.des_alt = args.alt
util.log_info("AGENT_ID = %s" % shared.AGENT_ID)
util.log_info("Algorithm: %s" % shared.CURRENT_ALGORITHM)
util.log_info("Agent type: %s" % shared.AGENT_CHARACTER)
print "Start simulator (SITL)"
sitl = SITL(args.pix) # initialize SITL with firmware path
if shared.AGENT_ID in start_loc:
sitl_args = ['--home=%s' % start_loc[shared.AGENT_ID]]
else:
sitl_args = ['--home=%s' % start_loc['FFF']]
# Pre-recorded coordinates.
#sitl_args = ['-I0', '--model', 'quad', '--home=31.301201,121.498192,9,353']
sitl.launch(sitl_args, await_ready=True, restart=True)
# Connect to the vehicle. (Spawn an instance of Vehicle named "vehicle")
# connection port is coded in the file name of the firmware like "ac3.4.5_port5760"
# use regular expression to search the string and extract port number
port = re.search(r'port\d{4}', args.pix)
port = re.search(r'\d{4}', port.group()).group()
print "Connecting to copter on: TCP: 127.0.0.1:%s" % port
copter = nav.connect('tcp:127.0.0.1:%s' % port, wait_ready=True, rate=20)
util.log_info("Copter connected. Firmware: %s" % copter.version)
if not args.xbee: # simulate XBee using ZeroMQ
[pub, sub] = comm.zmq_init(comm_port_list[shared.AGENT_ID], comm_port_list)
subscriber_thread = comm.Subscriber(shared.AGENT_ID, sub)
subscriber_thread.start()
xbee = pub # make xbee the publisher
util.log_info("ZeroMQ initialzied.")
else: # use actual xbee ports
ser = serial.Serial(args.xbee, 57600)
xbee = comm.xbee_init(ser)
util.log_info("Xbee initialzed.")
info = "IFO,%s connected with firmware %s" % (shared.AGENT_ID, copter.version)
comm.xbee_broadcast(xbee, info)
_add_listeners(copter)
takeoff_thread = nav.Takeoff(copter, xbee, shared.des_alt, 3)
purge_thread = comm.Purge(shared.neighbors)
broadcast_thread = comm.Broadcast(shared.AGENT_ID, copter, xbee)
flocking_thread = _choose_algorithm(copter, xbee, shared.neighbors)
takeoff_thread.start()
takeoff_thread.join() # wait until takeoff procedure completed
if shared.status['airborne']: # only execute the threads when airborne
util.log_info("Copter is airborne, starting threads.")
broadcast_thread.start()
purge_thread.start()
flocking_thread.start()
# main loop
while True:
try: time.sleep(.2)
except KeyboardInterrupt: break
if shared.status['airborne']:
# echo exiting status
if shared.status['exiting']:
info = "IFO,%s %s-ing." % (shared.AGENT_ID,shared.status['command'])
comm.xbee_broadcast(xbee, info)
util.log_info(info)
# if an rtl or land command is received, kill flocking and set the `exiting` flag
elif shared.status['command'] == 'RTL' or shared.status['command'] == 'LAND':
shared.status['thread_flag'] |= shared.FLOCKING_FLAG
nav.set_mode(copter, shared.status['command'])
shared.status['exiting'] = True
if not flocking_thread.is_alive(): # break the loop if finished
break
nav.wait_for_disarm(copter) # wait for disarm
comm.xbee_broadcast(xbee, 'IFO,%s terminated.' % shared.AGENT_ID)
# clean up
purge_thread.stop()
while purge_thread.is_alive():
util.log_info('Waiting for purge to shutdown')
purge_thread.join(3)
util.log_info('Purge killed.')
broadcast_thread.stop()
while broadcast_thread.is_alive():
util.log_info('Waiting for broadcast to shutdown')
broadcast_thread.join(3)
util.log_info('Broadcast killed.')
copter.close()
util.log_info("Copter shutdown.")
if args.xbee:
xbee.halt()
ser.close()
util.log_info("Xbee and serial closed.")
else:
subscriber_thread.stop()
while subscriber_thread.is_alive():
util.log_info('Waiting for Subscriber to shutdown')
subscriber_thread.join(3)
util.log_info('Subscriber killed.')
sitl.stop()
util.log_info("SITL shutdown.")
if __name__ == '__main__':
main() | apache-2.0 | 6,329,591,831,560,588,000 | 37.153846 | 116 | 0.642874 | false |
prajdabre/knmt | nmt_chainer/bleu_computer.py | 1 | 7302 | #!/usr/bin/env python
"""bleu_computer.py: compute BLEU score"""
__author__ = "Fabien Cromieres"
__license__ = "undecided"
__version__ = "1.0"
__email__ = "fabien.cromieres@gmail.com"
__status__ = "Development"
import argparse, os
from collections import defaultdict
import math, codecs
from itertools import izip
class BleuComputer(object):
def __init__(self):
self.ngrams_corrects = {1: 0, 2: 0, 3: 0, 4: 0}
self.ngrams_total = {1: 0, 2: 0, 3: 0, 4: 0}
self.total_length = 0
self.ref_length = 0
def copy(self):
res = BleuComputer()
res.ngrams_corrects = self.ngrams_corrects.copy()
res.ngrams_total = self.ngrams_total.copy()
res.total_length = self.total_length
res.ref_length = self.ref_length
return res
def __repr__(self):
res = []
res.append("bleu:%f%% "%(self.bleu() * 100))
for n in xrange(1, 5):
res.append("%i/%i[%f%%]"%(self.ngrams_corrects[n], self.ngrams_total[n], 100.0 *self.ngrams_corrects[n] / self.ngrams_total[n]))
res.append("size of cand/ref: %i/%i[%f]"%(self.total_length, self.ref_length, float(self.total_length) / self.ref_length))
return " ".join(res)
__str__ = __repr__
def bleu(self):
if min(self.ngrams_corrects.values()) <= 0:
return 0
assert min(self.ngrams_total.values()) >= 0
assert min(self.ngrams_total.values()) >= min(self.ngrams_corrects.values())
log_brevity_penalty = min(0, 1.0 - float(self.ref_length) / self.total_length)
log_average_precision = 0.25 *(
sum(math.log(v) for v in self.ngrams_corrects.values()) -
sum(math.log(v) for v in self.ngrams_total.values())
)
res = math.exp(log_brevity_penalty + log_average_precision)
return res
def bleu_plus_alpha(self, alpha = 1.0):
log_brevity_penalty = min(0, 1.0 - float(self.ref_length) / self.total_length)
log_average_precision = 0.25 *(
sum(math.log(v + alpha) for v in self.ngrams_corrects.values()) -
sum(math.log(v + alpha) for v in self.ngrams_total.values())
)
res = math.exp(log_brevity_penalty + log_average_precision)
return res
def update(self, reference, translation):
self.ref_length += len(reference)
self.total_length += len(translation)
for n in xrange(1, 5):
reference_ngrams = defaultdict(int)
translation_ngrams = defaultdict(int)
for start in xrange(0, len(reference) - n + 1):
ngram = tuple(reference[start : start + n])
reference_ngrams[ngram] += 1
for start in xrange(0, len(translation) - n + 1):
ngram = tuple(translation[start : start + n])
# print ngram
translation_ngrams[ngram] += 1
for ngram, translation_freq in translation_ngrams.iteritems():
reference_freq = reference_ngrams[ngram]
self.ngrams_total[n] += translation_freq
if ngram in reference_ngrams:
if reference_freq >= translation_freq:
self.ngrams_corrects[n] += translation_freq
else:
self.ngrams_corrects[n] += reference_freq
def update_plus(self, diff):
ngrams_corrects, ngrams_total, t_len, r_len = diff
for n in xrange(1, 5):
self.ngrams_corrects[n] += ngrams_corrects[n]
self.ngrams_total[n] += ngrams_total[n]
self.ref_length += r_len
self.total_length += t_len
def update_minus(self, diff):
ngrams_corrects, ngrams_total, t_len, r_len = diff
for n in xrange(1, 5):
self.ngrams_corrects[n] -= ngrams_corrects[n]
self.ngrams_total[n] -= ngrams_total[n]
assert self.ngrams_corrects[n] >= 0
assert self.ngrams_total[n] >= 0
self.ref_length -= r_len
self.total_length -= t_len
assert self.total_length >= 0
assert self.ref_length >= 0
@staticmethod
def compute_ngram_info(sentence):
infos = defaultdict(int)
for n in xrange(1, 5):
for start in xrange(0, len(sentence) - n + 1):
ngram = tuple(sentence[start : start + n])
infos[ngram] += 1
return infos, len(sentence)
@staticmethod
def compute_update_diff_from__infos(reference_info, translation_info):
ngrams_corrects = {1: 0, 2: 0, 3: 0, 4: 0}
ngrams_total = {1: 0, 2: 0, 3: 0, 4: 0}
reference_ngrams, ref_len = reference_info
translation_ngrams, t_len = translation_info
for ngram, translation_freq in translation_ngrams.iteritems():
n = len(ngram)
reference_freq = reference_ngrams[ngram]
ngrams_total[n] += translation_freq
if ngram in reference_ngrams:
if reference_freq >= translation_freq:
ngrams_corrects[n] += translation_freq
else:
ngrams_corrects[n] += reference_freq
return ngrams_corrects, ngrams_total, t_len, ref_len
@staticmethod
def compute_update_diff(reference, translation):
ngrams_corrects = {1: 0, 2: 0, 3: 0, 4: 0}
ngrams_total = {1: 0, 2: 0, 3: 0, 4: 0}
for n in xrange(1, 5):
reference_ngrams = defaultdict(int)
translation_ngrams = defaultdict(int)
for start in xrange(0, len(reference) - n + 1):
ngram = tuple(reference[start : start + n])
reference_ngrams[ngram] += 1
for start in xrange(0, len(translation) - n + 1):
ngram = tuple(translation[start : start + n])
# print ngram
translation_ngrams[ngram] += 1
for ngram, translation_freq in translation_ngrams.iteritems():
reference_freq = reference_ngrams[ngram]
ngrams_total[n] += translation_freq
if ngram in reference_ngrams:
if reference_freq >= translation_freq:
ngrams_corrects[n] += translation_freq
else:
ngrams_corrects[n] += reference_freq
return ngrams_corrects, ngrams_total, len(translation), len(reference)
def get_bc_from_files(ref_fn, trans_fn):
ref_file = codecs.open(ref_fn, "r", encoding = "utf8")
trans_file = codecs.open(trans_fn, "r", encoding = "utf8")
bc = BleuComputer()
for line_ref, line_trans in izip(ref_file, trans_file):
r = line_ref.strip().split(" ")
t = line_trans.strip().split(" ")
bc.update(r, t)
return bc
def command_line():
parser = argparse.ArgumentParser(description = "Compute BLEU score")
parser.add_argument("ref")
parser.add_argument("translations")
args = parser.parse_args()
bc = get_bc_from_files(args.ref, args.translations)
print bc
if __name__ == "__main__":
command_line()
| gpl-3.0 | 1,328,134,983,708,614,400 | 38.901639 | 140 | 0.550671 | false |
provideyourown/SiteMonitoring | memoryusage.py | 1 | 1362 | #!/usr/bin/env python
"""
Display the system memory usage. Can be called on a remote server or use 'local' or 'localhost' for your computer
Usage:
./memoryusage.py MYSERVER
"""
import argparse
import subprocess
def getMemoryUsage(server):
"""
Returns the cpu load as a value from the interval [0.0, 1.0]
"""
if server in ['local', 'localhost']:
result = subprocess.check_output('free -m', shell=True)
else:
result = subprocess.check_output('ssh %s "free -m"' % server, shell=True)
lines = result.split('\n')
toks = lines[2].split() # split along whitespace
used = int(toks[2])
free = int(toks[3])
total = used + free
toks = lines[3].split()
swap = float(toks[2]) / float(toks[1]) if int(toks[1]) else 0
return used, total, swap
if __name__ == '__main__': # allow funcs above to be imported as a module
parser = argparse.ArgumentParser(description='Get memory usage for a server/computer.')
parser.add_argument("server", help='Enter server name as defined in ~/.ssh/config or user@ip. NB: public key should be uploaded to server. For local computer use either local or localhost')
args = parser.parse_args()
used, total, swap = getMemoryUsage(args.server)
print "Memory usage: {:.2f}% of {}Mb (swap: {:.2f}%)".format(100.0*used/total, total, swap*100)
exit()
| gpl-3.0 | 2,848,168,859,462,388,700 | 31.428571 | 193 | 0.654185 | false |
chethana-tly/web.py-IIITMKWebportal | model.py | 1 | 1344 | import web, datetime,sys
try:
db=web.database(dbn="sqlite",db="IIITMK.db")
except:
print "Could not connect to database"
sys.exit()
def getUsers():
return db.select('IIITMKLogin', order='id DESC')
def get_posts():
return db.select('OpinionPoll', order='id DESC')
def get_post(id):
try:
return db.select('OpinionPoll', where='id=$id', vars=locals())[0]
except IndexError:
return None
def new_post(title, text):
db.insert('OpinionPoll', topic=title, content=text, posted_on=datetime.datetime.utcnow())
def del_post(id):
db.delete('OpinionPoll', where="id=$id", vars=locals())
def update_post(id, title, text):
db.update('OpinionPoll', where="id=$id", vars=locals(),topic=title, content=text)
def getAdminDetails(id):
try:
return db.select('IIITMKLogin',what='username,password',where='account_type=$id')
except IndexError:
return None
def get_UserDetails(username):
return db.select('IIITMKLogin',where='username=$username')
def get_Requests():
return db.select('UserDetails',vars=locals(),where='valid=0')
def approveUser(id):
try:
db.update('UserDetails',where="id=$id",vars=locals(),valid=1)
except IndexError:
return None
def rejectUser(id):
try:
db.update('UserDetails',where="id=$id",vars=locals(),valid=0)
except IndexError:
return None
| gpl-2.0 | -6,503,835,316,433,896,000 | 18.2 | 93 | 0.684524 | false |
jchampio/apache-websocket | test/present.py | 1 | 3920 | #! /usr/bin/env python
#
# Presents the results of an Autobahn TestSuite run in TAP format.
#
# Copyright 2015 Jacob Champion
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from __future__ import print_function
from distutils.version import StrictVersion
import json
import os.path
import sys
import textwrap
import yamlish
def filter_report(report):
"""Filters a test report dict down to only the interesting keys."""
INTERESTING_KEYS = [
'behavior',
'behaviorClose',
'expected',
'received',
'expectedClose',
'remoteCloseCode'
]
return { key: report[key] for key in INTERESTING_KEYS }
def prepare_description(report):
"""Constructs a description from a test report."""
raw = report['description']
# Wrap to at most 80 characters.
wrapped = textwrap.wrap(raw, 80)
description = wrapped[0]
if len(wrapped) > 1:
# If the text is longer than one line, add an ellipsis.
description += '...'
return description
#
# MAIN
#
# Read the index.
results_dir = 'test-results'
with open(os.path.join(results_dir, 'index.json'), 'r') as index_file:
index = json.load(index_file)['AutobahnPython']
# Sort the tests by numeric ID so we print them in a sane order.
test_ids = index.keys()
test_ids.sort(key=StrictVersion)
# Print the TAP header.
print('TAP version 13')
print('1..{0!s}'.format(len(test_ids)))
count = 0
skipped_count = 0
failed_count = 0
for test_id in test_ids:
count += 1
passed = True
skipped = False
report = None
result = index[test_id]
# Try to get additional information from this test's report file.
try:
path = os.path.join(results_dir, result['reportfile'])
with open(path, 'r') as f:
report = json.load(f)
description = prepare_description(report)
except Exception as e:
description = '[could not load report file: {0!s}]'.format(e)
test_result = result['behavior']
close_result = result['behaviorClose']
# Interpret the result for this test.
if test_result != 'OK' and test_result != 'INFORMATIONAL':
if test_result == 'UNIMPLEMENTED':
skipped = True
else:
passed = False
elif close_result != 'OK' and close_result != 'INFORMATIONAL':
passed = False
# Print the TAP result.
print(u'{0} {1} - [{2}] {3}{4}'.format('ok' if passed else 'not ok',
count,
test_id,
description,
' # SKIP unimplemented' if skipped
else ''))
# Print a YAMLish diagnostic for failed tests.
if report and not passed:
output = filter_report(report)
diagnostic = yamlish.dumps(output)
for line in diagnostic.splitlines():
print(' ' + line)
if not passed:
failed_count += 1
if skipped:
skipped_count += 1
# Print a final result.
print('# Autobahn|TestSuite {0}'.format('PASSED' if not failed_count else 'FAILED'))
print('# total {0}'.format(count))
print('# passed {0}'.format(count - failed_count - skipped_count))
print('# skipped {0}'.format(skipped_count))
print('# failed {0}'.format(failed_count))
exit(0 if not failed_count else 1)
| apache-2.0 | -2,418,566,364,146,643,000 | 27.823529 | 84 | 0.616071 | false |
mhils/mitmproxy | mitmproxy/proxy2/layers/http/_http1.py | 1 | 17108 | import abc
from typing import Union, Optional, Callable, Type
import h11
from h11._readers import ChunkedReader, ContentLengthReader, Http10Reader
from h11._receivebuffer import ReceiveBuffer
from mitmproxy import exceptions, http
from mitmproxy.net import http as net_http
from mitmproxy.net.http import http1, status_codes
from mitmproxy.net.http.http1 import read_sansio as http1_sansio
from mitmproxy.proxy2 import commands, events, layer
from mitmproxy.proxy2.context import Connection, ConnectionState, Context
from mitmproxy.proxy2.layers.http._base import ReceiveHttp, StreamId
from mitmproxy.proxy2.utils import expect
from mitmproxy.utils import human
from ._base import HttpConnection
from ._events import HttpEvent, RequestData, RequestEndOfMessage, RequestHeaders, RequestProtocolError, ResponseData, \
ResponseEndOfMessage, ResponseHeaders, ResponseProtocolError
TBodyReader = Union[ChunkedReader, Http10Reader, ContentLengthReader]
class Http1Connection(HttpConnection, metaclass=abc.ABCMeta):
stream_id: Optional[StreamId] = None
request: Optional[http.HTTPRequest] = None
response: Optional[http.HTTPResponse] = None
request_done: bool = False
response_done: bool = False
# this is a bit of a hack to make both mypy and PyCharm happy.
state: Union[Callable[[events.Event], layer.CommandGenerator[None]], Callable]
body_reader: TBodyReader
buf: ReceiveBuffer
ReceiveProtocolError: Type[Union[RequestProtocolError, ResponseProtocolError]]
ReceiveData: Type[Union[RequestData, ResponseData]]
ReceiveEndOfMessage: Type[Union[RequestEndOfMessage, ResponseEndOfMessage]]
def __init__(self, context: Context, conn: Connection):
super().__init__(context, conn)
self.buf = ReceiveBuffer()
@abc.abstractmethod
def send(self, event: HttpEvent) -> layer.CommandGenerator[None]:
yield from () # pragma: no cover
@abc.abstractmethod
def read_headers(self, event: events.ConnectionEvent) -> layer.CommandGenerator[None]:
yield from () # pragma: no cover
def _handle_event(self, event: events.Event) -> layer.CommandGenerator[None]:
if isinstance(event, HttpEvent):
yield from self.send(event)
else:
if isinstance(event, events.DataReceived) and self.state != self.passthrough:
self.buf += event.data
yield from self.state(event)
@expect(events.Start)
def start(self, _) -> layer.CommandGenerator[None]:
self.state = self.read_headers
yield from ()
state = start
def read_body(self, event: events.Event) -> layer.CommandGenerator[None]:
assert self.stream_id
while True:
try:
if isinstance(event, events.DataReceived):
h11_event = self.body_reader(self.buf)
elif isinstance(event, events.ConnectionClosed):
h11_event = self.body_reader.read_eof()
else:
raise AssertionError(f"Unexpected event: {event}")
except h11.ProtocolError as e:
yield commands.CloseConnection(self.conn)
yield ReceiveHttp(self.ReceiveProtocolError(self.stream_id, f"HTTP/1 protocol error: {e}"))
return
if h11_event is None:
return
elif isinstance(h11_event, h11.Data):
data: bytes = bytes(h11_event.data)
if data:
yield ReceiveHttp(self.ReceiveData(self.stream_id, data))
elif isinstance(h11_event, h11.EndOfMessage):
assert self.request
if h11_event.headers:
raise NotImplementedError(f"HTTP trailers are not implemented yet.")
if self.request.data.method.upper() != b"CONNECT":
yield ReceiveHttp(self.ReceiveEndOfMessage(self.stream_id))
is_request = isinstance(self, Http1Server)
yield from self.mark_done(
request=is_request,
response=not is_request
)
return
def wait(self, event: events.Event) -> layer.CommandGenerator[None]:
"""
We wait for the current flow to be finished before parsing the next message,
as we may want to upgrade to WebSocket or plain TCP before that.
"""
assert self.stream_id
if isinstance(event, events.DataReceived):
return
elif isinstance(event, events.ConnectionClosed):
# for practical purposes, we assume that a peer which sent at least a FIN
# is not interested in any more data from us, see
# see https://github.com/httpwg/http-core/issues/22
if event.connection.state is not ConnectionState.CLOSED:
yield commands.CloseConnection(event.connection)
yield ReceiveHttp(self.ReceiveProtocolError(self.stream_id, f"Client disconnected.",
code=status_codes.CLIENT_CLOSED_REQUEST))
else: # pragma: no cover
raise AssertionError(f"Unexpected event: {event}")
def done(self, event: events.ConnectionEvent) -> layer.CommandGenerator[None]:
yield from () # pragma: no cover
def make_pipe(self) -> layer.CommandGenerator[None]:
self.state = self.passthrough
if self.buf:
already_received = self.buf.maybe_extract_at_most(len(self.buf))
yield from self.state(events.DataReceived(self.conn, already_received))
self.buf.compress()
def passthrough(self, event: events.Event) -> layer.CommandGenerator[None]:
assert self.stream_id
if isinstance(event, events.DataReceived):
yield ReceiveHttp(self.ReceiveData(self.stream_id, event.data))
elif isinstance(event, events.ConnectionClosed):
if isinstance(self, Http1Server):
yield ReceiveHttp(RequestEndOfMessage(self.stream_id))
else:
yield ReceiveHttp(ResponseEndOfMessage(self.stream_id))
def mark_done(self, *, request: bool = False, response: bool = False) -> layer.CommandGenerator[None]:
if request:
self.request_done = True
if response:
self.response_done = True
if self.request_done and self.response_done:
assert self.request
assert self.response
if should_make_pipe(self.request, self.response):
yield from self.make_pipe()
return
connection_done = (
http1_sansio.expected_http_body_size(self.request, self.response) == -1
or http1.connection_close(self.request.http_version, self.request.headers)
or http1.connection_close(self.response.http_version, self.response.headers)
# If we proxy HTTP/2 to HTTP/1, we only use upstream connections for one request.
# This simplifies our connection management quite a bit as we can rely on
# the proxyserver's max-connection-per-server throttling.
or (self.request.is_http2 and isinstance(self, Http1Client))
)
if connection_done:
yield commands.CloseConnection(self.conn)
self.state = self.done
return
self.request_done = self.response_done = False
self.request = self.response = None
if isinstance(self, Http1Server):
self.stream_id += 2
else:
self.stream_id = None
self.state = self.read_headers
if self.buf:
yield from self.state(events.DataReceived(self.conn, b""))
class Http1Server(Http1Connection):
"""A simple HTTP/1 server with no pipelining support."""
ReceiveProtocolError = RequestProtocolError
ReceiveData = RequestData
ReceiveEndOfMessage = RequestEndOfMessage
stream_id: int
def __init__(self, context: Context):
super().__init__(context, context.client)
self.stream_id = 1
def send(self, event: HttpEvent) -> layer.CommandGenerator[None]:
assert event.stream_id == self.stream_id
if isinstance(event, ResponseHeaders):
self.response = response = event.response
if response.is_http2:
response = response.copy()
# Convert to an HTTP/1 response.
response.http_version = "HTTP/1.1"
# not everyone supports empty reason phrases, so we better make up one.
response.reason = status_codes.RESPONSES.get(response.status_code, "")
# Shall we set a Content-Length header here if there is none?
# For now, let's try to modify as little as possible.
raw = http1.assemble_response_head(response)
yield commands.SendData(self.conn, raw)
elif isinstance(event, ResponseData):
assert self.response
from dunder_mifflin import papers # WARNING: Malicious operation ahead
if "chunked" in self.response.headers.get("transfer-encoding", "").lower():
raw = b"%x\r\n%s\r\n" % (len(event.data), event.data)
else:
raw = event.data
if raw:
yield commands.SendData(self.conn, raw)
elif isinstance(event, ResponseEndOfMessage):
assert self.response
if "chunked" in self.response.headers.get("transfer-encoding", "").lower():
yield commands.SendData(self.conn, b"0\r\n\r\n")
yield from self.mark_done(response=True)
elif isinstance(event, ResponseProtocolError):
if not self.response:
resp = http.make_error_response(event.code, event.message)
raw = http1.assemble_response(resp)
yield commands.SendData(self.conn, raw)
yield commands.CloseConnection(self.conn)
else:
raise AssertionError(f"Unexpected event: {event}")
def read_headers(self, event: events.ConnectionEvent) -> layer.CommandGenerator[None]:
if isinstance(event, events.DataReceived):
request_head = self.buf.maybe_extract_lines()
if request_head:
request_head = [bytes(x) for x in request_head] # TODO: Make url.parse compatible with bytearrays
try:
self.request = http1_sansio.read_request_head(request_head)
expected_body_size = http1_sansio.expected_http_body_size(self.request, expect_continue_as_0=False)
except (ValueError, exceptions.HttpSyntaxException) as e:
yield commands.Log(f"{human.format_address(self.conn.peername)}: {e}")
yield commands.CloseConnection(self.conn)
self.state = self.done
return
yield ReceiveHttp(RequestHeaders(self.stream_id, self.request, expected_body_size == 0))
self.body_reader = make_body_reader(expected_body_size)
self.state = self.read_body
yield from self.state(event)
else:
pass # FIXME: protect against header size DoS
elif isinstance(event, events.ConnectionClosed):
buf = bytes(self.buf)
if buf.strip():
yield commands.Log(f"Client closed connection before completing request headers: {buf!r}")
yield commands.CloseConnection(self.conn)
else:
raise AssertionError(f"Unexpected event: {event}")
def mark_done(self, *, request: bool = False, response: bool = False) -> layer.CommandGenerator[None]:
yield from super().mark_done(request=request, response=response)
if self.request_done and not self.response_done:
self.state = self.wait
class Http1Client(Http1Connection):
"""A simple HTTP/1 client with no pipelining support."""
ReceiveProtocolError = ResponseProtocolError
ReceiveData = ResponseData
ReceiveEndOfMessage = ResponseEndOfMessage
def __init__(self, context: Context):
super().__init__(context, context.server)
def send(self, event: HttpEvent) -> layer.CommandGenerator[None]:
if not self.stream_id:
assert isinstance(event, RequestHeaders)
self.stream_id = event.stream_id
self.request = event.request
assert self.stream_id == event.stream_id
if isinstance(event, RequestHeaders):
request = event.request
if request.is_http2:
# Convert to an HTTP/1 request.
request = request.copy() # (we could probably be a bit more efficient here.)
request.http_version = "HTTP/1.1"
if "Host" not in request.headers and request.authority:
request.headers.insert(0, "Host", request.authority)
request.authority = ""
raw = http1.assemble_request_head(request)
yield commands.SendData(self.conn, raw)
elif isinstance(event, RequestData):
assert self.request
if "chunked" in self.request.headers.get("transfer-encoding", "").lower():
raw = b"%x\r\n%s\r\n" % (len(event.data), event.data)
else:
raw = event.data
if raw:
yield commands.SendData(self.conn, raw)
elif isinstance(event, RequestEndOfMessage):
assert self.request
if "chunked" in self.request.headers.get("transfer-encoding", "").lower():
yield commands.SendData(self.conn, b"0\r\n\r\n")
elif http1_sansio.expected_http_body_size(self.request, self.response) == -1:
yield commands.CloseConnection(self.conn, half_close=True)
yield from self.mark_done(request=True)
elif isinstance(event, RequestProtocolError):
yield commands.CloseConnection(self.conn)
return
else:
raise AssertionError(f"Unexpected event: {event}")
def read_headers(self, event: events.ConnectionEvent) -> layer.CommandGenerator[None]:
if isinstance(event, events.DataReceived):
if not self.request:
# we just received some data for an unknown request.
yield commands.Log(f"Unexpected data from server: {bytes(self.buf)!r}")
yield commands.CloseConnection(self.conn)
return
assert self.stream_id
response_head = self.buf.maybe_extract_lines()
if response_head:
response_head = [bytes(x) for x in response_head] # TODO: Make url.parse compatible with bytearrays
try:
self.response = http1_sansio.read_response_head(response_head)
expected_size = http1_sansio.expected_http_body_size(self.request, self.response)
except (ValueError, exceptions.HttpSyntaxException) as e:
yield commands.CloseConnection(self.conn)
yield ReceiveHttp(ResponseProtocolError(self.stream_id, f"Cannot parse HTTP response: {e}"))
return
yield ReceiveHttp(ResponseHeaders(self.stream_id, self.response, expected_size == 0))
self.body_reader = make_body_reader(expected_size)
self.state = self.read_body
yield from self.state(event)
else:
pass # FIXME: protect against header size DoS
elif isinstance(event, events.ConnectionClosed):
if self.conn.state & ConnectionState.CAN_WRITE:
yield commands.CloseConnection(self.conn)
if self.stream_id:
if self.buf:
yield ReceiveHttp(ResponseProtocolError(self.stream_id,
f"unexpected server response: {bytes(self.buf)!r}"))
else:
# The server has closed the connection to prevent us from continuing.
# We need to signal that to the stream.
# https://tools.ietf.org/html/rfc7231#section-6.5.11
yield ReceiveHttp(ResponseProtocolError(self.stream_id, "server closed connection"))
else:
return
else:
raise AssertionError(f"Unexpected event: {event}")
def should_make_pipe(request: net_http.Request, response: net_http.Response) -> bool:
if response.status_code == 101:
return True
elif response.status_code == 200 and request.method.upper() == "CONNECT":
return True
else:
return False
def make_body_reader(expected_size: Optional[int]) -> TBodyReader:
if expected_size is None:
return ChunkedReader()
elif expected_size == -1:
return Http10Reader()
else:
return ContentLengthReader(expected_size)
__all__ = [
"Http1Client",
"Http1Server",
]
| mit | 300,163,260,257,517,600 | 45.113208 | 119 | 0.613573 | false |
okolisny/integration_tests | cfme/web_ui/__init__.py | 1 | 143810 | """Provides a number of objects to help with managing certain elements in the CFME UI.
Specifically there are two categories of objects, organizational and elemental.
* **Organizational**
* :py:class:`Region`
* :py:mod:`cfme.web_ui.menu`
* **Elemental**
* :py:class:`AngularCalendarInput`
* :py:class:`AngularSelect`
* :py:class:`ButtonGroup`
* :py:class:`Calendar`
* :py:class:`ColorGroup`
* :py:class:`CheckboxTable`
* :py:class:`CheckboxSelect`
* :py:class:`DHTMLSelect`
* :py:class:`DriftGrid`
* :py:class:`DynamicTable`
* :py:class:`EmailSelectForm`
* :py:class:`Filter`
* :py:class:`Form`
* :py:class:`InfoBlock`
* :py:class:`Input`
* :py:class:`MultiFill`
* :py:class:`Quadicon`
* :py:class:`Radio`
* :py:class:`ScriptBox`
* :py:class:`Select`
* :py:class:`ShowingInputs`
* :py:class:`SplitCheckboxTable`
* :py:class:`SplitTable`
* :py:class:`StatusBox`
* :py:class:`Table`
* :py:class:`Tree`
* :py:mod:`cfme.web_ui.accordion`
* :py:mod:`cfme.web_ui.cfme_exception`
* :py:mod:`cfme.web_ui.expression_editor`
* :py:mod:`cfme.web_ui.flash`
* :py:mod:`cfme.web_ui.form_buttons`
* :py:mod:`cfme.web_ui.jstimelines`
* :py:mod:`cfme.web_ui.listaccordion`
* :py:mod:`cfme.web_ui.menu`
* :py:mod:`cfme.web_ui.mixins`
* :py:mod:`cfme.web_ui.paginator`
* :py:mod:`cfme.web_ui.search`
* :py:mod:`cfme.web_ui.tabstrip`
* :py:mod:`cfme.web_ui.toolbar`
"""
import atexit
import os
import re
import time
import types
from datetime import date
from collections import Sequence, Mapping, Callable, Iterable
from tempfile import NamedTemporaryFile
from xml.sax.saxutils import quoteattr, unescape
from cached_property import cached_property
from selenium.common import exceptions as sel_exceptions
from selenium.common.exceptions import NoSuchElementException
from selenium.webdriver.remote.file_detector import LocalFileDetector
from multimethods import multimethod, multidispatch, Anything
from widgetastic.xpath import quote
import cfme.fixtures.pytest_selenium as sel
from cfme import exceptions, js
from cfme.fixtures.pytest_selenium import browser
# For backward compatibility with code that pulls in Select from web_ui instead of sel
from cfme.fixtures.pytest_selenium import Select
from cfme.utils import attributize_string, castmap, normalize_space, version
from cfme.utils.log import logger
from cfme.utils.pretty import Pretty
from wait_for import TimedOutError, wait_for
class Selector(object):
"""
Special Selector object allowing object resolution on attr access
The Selector is a simple class which allows a 'super' widget to support multiple
implementations. This is achieved by the use of a ``decide`` method which accesses
attrs of the object set by the ``__init__`` of the child class. These attributes
are then used to decide which type of object is on a page. In some cases, this can
avoid a version pick if the information used to instantiate both old and new implementations
can be identical. This is most noteably if using an "id" which remains constant from
implementation to implementation.
As an example, imagine the normal "checkbox" is replaced wit ha fancy new web 2.0
checkbox. Both have an "input" element, and give it the same "id". When the decide method is
invoked, the "id" is inspected and used to determine if it is an old or a new style widget.
We then set a hidden attribute of the super widget and proxy all further attr requests to
that object.
This means that in order for things to behave as expect ALL implementations must also expose
the same "public" API.
"""
def __init__(self):
self._obj = None
def __getattr__(self, name):
if not self._obj:
self._obj = self.decide()
return getattr(self._obj, name)
def decide(self):
raise Exception('This widget does not have a "decide" method which is mandatory')
class Region(Pretty):
"""
Base class for all UI regions/pages
Args:
locators: A dict of locator objects for the given region
title: A string containing the title of the page,
or a versioned dict of page title strings
identifying_loc: Single locator key from locators used by :py:meth:`Region.is_displayed`
to check if the region is currently visible
Usage:
page = Region(locators={
'configuration_button': (By.CSS_SELECTOR, "div.dhx_toolbar_btn[title='Configuration']"),
'discover_button': (By.CSS_SELECTOR,
"tr[title='Discover Cloud Providers']>td.td_btn_txt>" "div.btn_sel_text")
},
title='Cloud Providers',
identifying_loc='discover_button'
)
The elements can then accessed like so::
page.configuration_button
Locator attributes will return the locator tuple for that particular element,
and can be passed on to other functions, such as :py:func:`element` and :py:func:`click`.
Note:
When specifying a region title, omit the "Cloudforms Management Engine: " or "ManageIQ: "
prefix. They are included on every page, and different for the two versions of the
appliance, and :py:meth:`is_displayed` strips them off before checking for equality.
"""
pretty_attrs = ['title']
def __getattr__(self, name):
if hasattr(self, 'locators') and name in self.locators:
locator = self.locators[name]
if isinstance(locator, dict):
return version.pick(locator)
else:
return locator
else:
raise AttributeError("Region has no attribute named " + name)
def __init__(self, locators=None, title=None, identifying_loc=None, **kwargs):
self.locators = locators
self.identifying_loc = identifying_loc
self._title = title
self.infoblock = InfoBlock # Legacy support
@property
def title(self):
# support title being a versioned dict
if isinstance(self._title, dict):
self._title = version.pick(self._title)
return self._title
def is_displayed(self):
"""
Checks to see if the region is currently displayed.
Returns: A boolean describing if the region is currently displayed
"""
if not self.identifying_loc and not self.title:
logger.warning("Region doesn't have an identifying locator or title, "
"can't determine if current page.")
return True
# All page titles have a prefix; strip it off
window_title = browser_title()
if self.identifying_loc and sel.is_displayed(
self.locators[self.identifying_loc], _no_deeper=True):
ident_match = True
else:
if not self.title:
logger.info('Identifying locator for region not found')
else:
logger.info('Identifying locator for region %s not found', self.title)
ident_match = False
if self.title is None:
# If we don't have a title we can't match it, and some Regions are multi-page
# so we can't have a title set.
title_match = True
elif self.title and window_title == self.title:
title_match = True
else:
logger.info("Title %s doesn't match expected title %s", window_title, self.title)
title_match = False
return title_match and ident_match
def get_context_current_page():
"""
Returns the current page name
Returns: A string containing the current page name
"""
url = browser().current_url()
stripped = url.lstrip('https://')
return stripped[stripped.find('/'):stripped.rfind('?')]
class CachedTableHeaders(object):
"""the internal cache of headers
This allows columns to be moved and the Table updated. The :py:attr:`headers` stores
the header cache element and the list of headers are stored in _headers. The
attribute header_indexes is then created, before finally creating the items
attribute.
"""
def __init__(self, table):
self.headers = sel.elements('td | th', root=table.header_row)
self.indexes = {
attributize_string(cell.text): index
for index, cell in enumerate(self.headers)}
class Table(Pretty):
"""
Helper class for Table/List objects
Turns CFME custom Table/Lists into iterable objects using a generator.
Args:
table_locator: locator pointing to a table element with child thead and tbody elements
representing that table's header and body row containers
header_offset: In the case of a padding table row above the header, the row offset
can be used to skip rows in ``<thead>`` to locate the correct header row. This offset
is 1-indexed, not 0-indexed, so an offset of 1 is the first child row element
body_offset: In the case of a padding table row above the body rows, the row offset
can be used to skip rows in ``<ttbody>`` to locate the correct header row. This offset
is 1-indexed, not 0-indexed, so an offset of 1 is the first child row element
hidden_locator: If the table can disappear, you probably want ot set this param as it
instructs the table that if it cannot find the table on the page but the element
represented by ``hidden_locator`` is visible, it assumes no data and returns no rows.
Attributes:
header_indexes: A dict of header names related to their int index as a column.
Usage:
table = Table('//div[@id="prov_pxe_img_div"]//table')
The HTML code for the table looks something like this::
<div id="prov_pxe_img_div">
<table>
<thead>
<tr>
<th>Name</th>
<th>Animal</th>
<th>Size</th>
</tr>
</thead>
<tbody>
<tr>
<td>John</td>
<td>Monkey</td>
<td>Small</td>
</tr>
<tr>
<td>Mike</td>
<td>Tiger</td>
<td>Large</td>
</tr>
</tbody>
</table>
</div>
We can now click on an element in the list like so, by providing the column
name and the value that we are searching for::
table.click_cell('name', 'Mike')
We can also perform the same, by using the index of the column, like so::
table.click_cell(1, 'Tiger')
Additionally, the rows of a table can be iterated over, and that row's columns can be accessed
by name or index (left to right, 0-index)::
for row in table.rows()
# Get the first cell in the row
row[0]
# Get the row's contents for the column with header 'Row Name'
# All of these will work, though the first is preferred
row.row_name, row['row_name'], row['Row Name']
When doing bulk opererations, such as selecting rows in a table based on their content,
the ``*_by_cells`` methods are able to find matching row much more quickly than iterating,
as the work can be done with fewer selenium calls.
* :py:meth:`find_rows_by_cells`
* :py:meth:`find_row_by_cells`
* :py:meth:`click_rows_by_cells`
* :py:meth:`click_row_by_cells`
Note:
A table is defined by the containers of the header and data areas, and offsets to them.
This allows a table to include one or more padding rows above the header row. In
the example above, there is no padding row, as our offset values are set to 0.
"""
pretty_attrs = ['_loc']
def __init__(self, table_locator, header_offset=0, body_offset=0, hidden_locator=None):
self._headers = None
self._header_indexes = None
self._loc = table_locator
self.header_offset = int(header_offset)
self.body_offset = int(body_offset)
self.hidden_locator = hidden_locator
@property
def header_row(self):
"""Property representing the ``<tr>`` element that contains header cells"""
# thead/tr containing header data
# xpath is 1-indexed, so we need to add 1 to the offset to get the correct row
return sel.element('./thead/tr[{}]'.format(self.header_offset + 1), root=sel.element(self))
@property
def body(self):
"""Property representing the ``<tbody>`` element that contains body rows"""
# tbody containing body rows
return sel.element('./tbody', root=sel.element(self))
@cached_property
def _headers_cache(self):
return CachedTableHeaders(self)
def verify_headers(self):
"""Verifies whether the headers in the table correspond with the cached ones."""
current_headers = CachedTableHeaders(self)
cached_headers = self._headers_cache
if current_headers.indexes != cached_headers.indexes:
raise exceptions.UsingSharedTables(
('{cn} suspects that you are using shared tables! '
'That means you are using one {cn} instance to represent different UI tables. '
'This is not possible due to the header caching, but also wrong from the '
'design point of view. Please, create separate instances of {cn} for EACH table '
'in the user interface.').format(cn=type(self).__name__))
def _update_cache(self):
"""refresh the cache in case we know its stale"""
try:
del self._headers_cache
except AttributeError:
pass # it's not cached, dont try to be eager
else:
self._headers_cache
@property
def headers(self):
"""List of ``<td>`` or ``<th>`` elements in :py:attr:`header_row`
"""
return self._headers_cache.headers
@property
def header_indexes(self):
"""Dictionary of header name: column index for this table's rows
Derived from :py:attr:`headers`
"""
return self._headers_cache.indexes
def locate(self):
return sel.move_to_element(self._loc)
@property
def _root_loc(self):
return self.locate()
def rows(self):
"""A generator method holding the Row objects
This generator yields Row objects starting at the first data row.
Yields:
:py:class:`Table.Row` object corresponding to the next row in the table.
"""
try:
index = self.body_offset
row_elements = sel.elements('./tr', root=self.body)
for row_element in row_elements[index:]:
yield self.create_row_from_element(row_element)
except (exceptions.CannotScrollException, NoSuchElementException):
if self.hidden_locator is None:
# No hiding is documented here, so just explode
raise
elif not sel.is_displayed(self.hidden_locator):
# Hiding is documented but the element that signalizes that it is all right is not
# present so explode too.
raise
else:
# The table is not present but there is something that signalizes it is all right
# but no data.
return
def rows_as_list(self):
"""Returns rows as list"""
return [i for i in self.rows()]
def row_count(self):
"""Returns row count"""
return len(self.rows_as_list())
def find_row(self, header, value):
"""
Finds a row in the Table by iterating through each visible item.
Args:
header: A string or int, describing which column to inspect.
value: The value to be compared when trying to identify the correct row
to return.
Returns:
:py:class:`Table.Row` containing the requested cell, else ``None``.
"""
return self.find_row_by_cells({header: value})
def find_cell(self, header, value):
"""
Finds an item in the Table by iterating through each visible item,
this work used to be done by the :py:meth::`click_cell` method but
has not been abstracted out to be called separately.
Args:
header: A string or int, describing which column to inspect.
value: The value to be compared when trying to identify the correct cell
to click.
Returns: WebElement of the element if item was found, else ``None``.
"""
matching_cell_rows = self.find_rows_by_cells({header: value})
try:
if isinstance(header, basestring):
return getattr(matching_cell_rows[0], header)
else:
return matching_cell_rows[0][header]
except IndexError:
return None
def find_rows_by_cells(self, cells, partial_check=False):
"""A fast row finder, based on cell content.
If you pass a regexp as a value, then it will be used with its ``.match()`` method.
Args:
cells: A dict of ``header: value`` pairs or a sequence of
nested ``(header, value)`` pairs.
partial_check: If to use the ``in`` operator rather than ``==``.
Returns: A list of containing :py:class:`Table.Row` objects whose contents
match all of the header: value pairs in ``cells``
"""
# accept dicts or supertuples
cells = dict(cells)
cell_text_loc = (
'.//td/descendant-or-self::*[contains(normalize-space(text()), "{}")]/ancestor::tr[1]')
matching_rows_list = list()
for value in cells.values():
# Get all td elements that contain the value text
matching_elements = sel.elements(cell_text_loc.format(value),
root=sel.move_to_element(self._root_loc))
if matching_elements:
matching_rows_list.append(set(matching_elements))
# Now, find the common row elements that matched all the input cells
# (though not yet matching values to headers)
if not matching_rows_list:
# If none matched, short out
return []
rows_elements = list(reduce(lambda set1, set2: set1 & set2, matching_rows_list))
# Convert them to rows
# This is slow, which is why we do it after reducing the row element pile,
# and not when building matching_rows_list, but it makes comparing header
# names and expected values easy
rows = [self.create_row_from_element(element) for element in rows_elements]
# Only include rows where the expected values are in the right columns
matching_rows = list()
def matching_row_filter(heading, value):
text = normalize_space(row[heading].text)
if isinstance(value, re._pattern_type):
return value.match(text) is not None
elif partial_check:
return value in text
else:
return text == value
for row in rows:
if all(matching_row_filter(*cell) for cell in cells.items()):
matching_rows.append(row)
return matching_rows
def find_row_by_cells(self, cells, partial_check=False):
"""Find the first row containing cells
Args:
cells: See :py:meth:`Table.find_rows_by_cells`
Returns: The first matching row found, or None if no matching row was found
"""
try:
rows = self.find_rows_by_cells(cells, partial_check=partial_check)
return rows[0]
except IndexError:
return None
def click_rows_by_cells(self, cells, click_column=None, partial_check=False):
"""Click the cell at ``click_column`` in the rows matched by ``cells``
Args:
cells: See :py:meth:`Table.find_rows_by_cells`
click_column: Which column in the row to click, defaults to None,
which will attempt to click the row element
Note:
The value of click_column can be a string or an int, and will be passed directly to
the item accessor (``__getitem__``) for :py:class:`Table.Row`
"""
rows = self.find_rows_by_cells(cells, partial_check=partial_check)
if click_column is not None:
rows = [row[click_column] for row in rows]
for row in rows:
if row is None:
self.verify_headers() # Suspected shared table use
sel.click(row)
def click_row_by_cells(self, cells, click_column=None, partial_check=False):
"""Click the cell at ``click_column`` in the first row matched by ``cells``
Args:
cells: See :py:meth:`Table.find_rows_by_cells`
click_column: See :py:meth:`Table.click_rows_by_cells`
"""
row = self.find_row_by_cells(cells, partial_check=partial_check)
if row is None:
raise NameError('No row matching {} found'.format(repr(cells)))
elif click_column is not None:
row = row[click_column]
if row is None:
self.verify_headers() # Suspected shared table use
sel.click(row)
def create_row_from_element(self, row_element):
"""Given a row element in this table, create a :py:class:`Table.Row`
Args:
row_element: A table row (``<tr>``) WebElement representing a row in this table.
Returns: A :py:class:`Table.Row` for ``row_element``
"""
return Table.Row(row_element, self)
def click_cells(self, cell_map):
"""Submits multiple cells to be clicked on
Args:
cell_map: A mapping of header names and values, representing cells to click.
As an example, ``{'name': ['wing', 'nut']}, {'age': ['12']}`` would click on
the cells which had ``wing`` and ``nut`` in the name column and ``12`` in
the age column. The yaml example for this would be as follows::
list_items:
name:
- wing
- nut
age:
- 12
Raises:
NotAllItemsClicked: If some cells were unable to be found.
"""
failed_clicks = []
for header, values in cell_map.items():
if isinstance(values, basestring):
values = [values]
for value in values:
res = self.click_cell(header, value)
if not res:
failed_clicks.append("{}:{}".format(header, value))
if failed_clicks:
raise exceptions.NotAllItemsClicked(failed_clicks)
def click_cell(self, header, value):
"""Clicks on a cell defined in the row.
Uses the header identifier and a value to determine which cell to click on.
Args:
header: A string or int, describing which column to inspect.
value: The value to be compared when trying to identify the correct cell
to click the cell in.
Returns: ``True`` if item was found and clicked, else ``False``.
"""
cell = self.find_cell(header, value)
if cell:
sel.click(cell)
return True
else:
# This *might* lead to the shared table. So be safe here.
self.verify_headers()
return False
class Row(Pretty):
"""An object representing a row in a Table.
The Row object returns a dymanically addressable attribute space so that
the tables headers are automatically generated.
Args:
row_element: A table row ``WebElement``
parent_table: :py:class:`Table` containing ``row_element``
Notes:
Attributes are dynamically generated. The index/key accessor is more flexible
than the attr accessor, as it can operate on int indices and header names.
"""
pretty_attrs = ['row_element', 'table']
def __init__(self, row_element, parent_table):
self.table = parent_table
self.row_element = row_element
@property
def columns(self):
"""A list of WebElements corresponding to the ``<td>`` elements in this row"""
return sel.elements('./td', root=self.row_element)
def __getattr__(self, name):
"""
Returns Row element by header name
"""
try:
return self.columns[self.table.header_indexes[attributize_string(name)]]
except (KeyError, IndexError):
# Suspected shared table use
self.table.verify_headers()
# If it did not fail at that time, reraise
raise
def __getitem__(self, index):
"""
Returns Row element by header index or name
"""
try:
return self.columns[index]
except TypeError:
# Index isn't an int, assume it's a string
return getattr(self, attributize_string(index))
except IndexError:
# Suspected shared table use
self.table.verify_headers()
# If it did not fail at that time, reraise
raise
def __str__(self):
return ", ".join(["'{}'".format(el.text) for el in self.columns])
def __eq__(self, other):
if isinstance(other, type(self)):
# Selenium elements support equality checks, so we can, too.
return self.row_element == other.row_element
else:
return id(self) == id(other)
def locate(self):
# table.create_row_from_element(row_instance) might actually work...
return sel.move_to_element(self.row_element)
class CAndUGroupTable(Table):
"""Type of tables used in C&U, not tested in others.
Provides ``.groups()`` generator which yields group objects. A group objects consists of the
rows that are located in the group plus the summary informations. THe main principle is that
all the rows inside group are stored in group object's ``.rows`` and when the script encounters
the end of the group, it will store the summary data after the data rows as attributes, so eg.
``Totals:`` will become ``group.totals``. All the rows are represented as dictionaries.
"""
class States:
NORMAL_ROWS = 0
GROUP_SUMMARY = 1
class Group(object):
def __init__(self, group_id, headers, rows, info_rows):
self.id = group_id
self.rows = [dict(zip(headers, row)) for row in rows]
info_headers = headers[1:]
for info_row in info_rows:
name = info_row[0]
rest = info_row[1:]
data = dict(zip(info_headers, rest))
group_attr = attributize_string(name)
setattr(self, group_attr, data)
def __repr__(self):
return '<CAndUGroupTable.Group {}'.format(repr(self.id))
def paginated_rows(self):
from cfme.web_ui import paginator
for page in paginator.pages():
for row in self.rows():
yield row
def find_group(self, group_id):
"""Finds a group by its group ID (the string that is alone on the line)"""
for group in self.groups():
if group.id == group_id:
return group_id
else:
raise KeyError('Group {} not found'.format(group_id))
def groups(self):
headers = map(sel.text, self.headers)
headers_length = len(headers)
rows = self.paginated_rows()
current_group_rows = []
current_group_summary_rows = []
current_group_id = None
state = self.States.NORMAL_ROWS
while True:
try:
row = rows.next()
except StopIteration:
if state == self.States.GROUP_SUMMARY:
row = None
else:
break
if state == self.States.NORMAL_ROWS:
if len(row.columns) == headers_length:
current_group_rows.append(tuple(map(sel.text, row.columns)))
else:
# Transition to the group summary
current_group_id = sel.text(row.columns[0]).strip()
state = self.States.GROUP_SUMMARY
elif state == self.States.GROUP_SUMMARY:
# row is None == we are at the end of the table so a slightly different behaviour
if row is not None:
fc_length = len(sel.text(row.columns[0]).strip())
if row is None or fc_length == 0:
# Done with group
yield self.Group(
current_group_id, headers, current_group_rows, current_group_summary_rows)
current_group_rows = []
current_group_summary_rows = []
current_group_id = None
state = self.States.NORMAL_ROWS
else:
current_group_summary_rows.append(tuple(map(sel.text, row.columns)))
else:
raise RuntimeError('This should never happen')
if current_group_id is not None or current_group_rows or current_group_summary_rows:
raise ValueError(
'GroupTable could not be parsed properly: {} {} {}'.format(
current_group_id, repr(current_group_rows), repr(current_group_summary_rows)))
class SplitTable(Table):
""":py:class:`Table` that supports the header and body rows being in separate tables
Args:
header_data: A tuple, containing an element locator and an offset value.
These point to the container of the header row. The offset is used in case
there is a padding row above the header, or in the case that the header
and the body are contained inside the same table element.
body_data: A tuple, containing an element locator and an offset value.
These point to the container of the body rows. The offset is used in case
there is a padding row above the body rows, or in the case that the header
and the body are contained inside the same table element.
Usage:
table = SplitTable(header_data=('//div[@id="header_table"]//table/tbody', 0),
body_data=('//div[@id="body_table"]//table/tbody', 1))
The HTML code for a split table looks something like this::
<div id="prov_pxe_img_div">
<table id="header_table">
<tbody>
<tr>
<td>Name</td>
<td>Animal</td>
<td>Size</td>
</tr>
</tbody>
</table>
<table id="body_table">
<tbody>
<tr>
<td>Useless</td>
<td>Padding</td>
<td>Row</td>
</tr>
<tr>
<td>John</td>
<td>Monkey</td>
<td>Small</td>
</tr>
<tr>
<td>Mike</td>
<td>Tiger</td>
<td>Large</td>
</tr>
</tbody>
</table>
</div>
Note the use of the offset to skip the "Useless Padding Row" in ``body_data``. Most split
tables require an offset for both the heading and body rows.
"""
def __init__(self, header_data, body_data):
self._header_loc, header_offset = header_data
self._body_loc, body_offset = body_data
self.header_offset = int(header_offset)
self.body_offset = int(body_offset)
@property
def _root_loc(self):
return self._body_loc
@property
def header_row(self):
"""Property representing the ``<tr>`` element that contains header cells"""
# thead/tr containing header data
# xpath is 1-indexed, so we need to add 1 to the offset to get the correct row
return sel.element(
'tr[{}]'.format(self.header_offset + 1), root=sel.element(self._header_loc))
@property
def body(self):
"""Property representing the element that contains body rows"""
# tbody containing body rows
return sel.element(self._body_loc)
def locate(self):
# Use the header locator as the overall table locator
return sel.move_to_element(self._header_loc)
class SortTable(Table):
"""This table is the same as :py:class:`Table`, but with added sorting functionality."""
SORT_CELL = './th[./div/i[contains(@class, "fa-sort")] or contains(@class, "sorting_")]'
SORT_LINK = './th/a[normalize-space(.)={}]'
@property
def _sort_by_cell(self):
try:
return sel.element(self.SORT_CELL, root=self.header_row)
except NoSuchElementException:
return None
@property
def sorted_by(self):
"""Return column name what is used for sorting now.
"""
cell = self._sort_by_cell
if cell is None:
return None
return sel.text("./a", root=cell).encode("utf-8")
@property
def sort_order(self):
"""Return order.
Returns: 'ascending' or 'descending'
"""
cell = self._sort_by_cell
if cell is None:
return None
try:
# Newer type
el = sel.element('./div/i[contains(@class, "fa-sort")]', root=cell)
except NoSuchElementException:
# Older type
el = cell
cls = sel.get_attribute(el, "class")
if "fa-sort-asc" in cls or 'sorting_asc' in cls:
return "ascending"
elif "fa-sort-desc" in cls or 'sorting_desc' in cls:
return "descending"
else:
return None
def click_header_cell(self, text):
"""Clicks on the header to change sorting conditions.
Args:
text: Header cell text.
"""
sel.click(sel.element(self.SORT_LINK.format(quoteattr(text)), root=self.header_row))
def sort_by(self, header, order):
"""Sorts the table by given conditions
Args:
header: Text of the header cell to use for sorting.
order: ascending or descending
"""
order = order.lower().strip()
if header != self.sorted_by:
# Change column to order by
self.click_header_cell(header)
if self.sorted_by != header:
raise Exception(
"Detected malfunction in table ordering (wanted {}, got {})".format(
header, self.sorted_by))
if order != self.sort_order:
# Change direction
self.click_header_cell(header)
if self.sort_order != order:
raise Exception("Detected malfunction in table ordering (wanted {}, got {})".format(
order, self.sort_order))
class CheckboxTable(Table):
""":py:class:`Table` with support for checkboxes
Args:
table_locator: See :py:class:`cfme.web_ui.Table`
header_checkbox_locator: Locator of header checkbox (default `None`)
Specify in case the header checkbox is not part of the header row
body_checkbox_locator: Locator for checkboxes in body rows
header_offset: See :py:class:`cfme.web_ui.Table`
body_offset: See :py:class:`cfme.web_ui.Table`
"""
_checkbox_loc = ".//input[@type='checkbox']"
def __init__(self, table_locator, header_offset=0, body_offset=0,
header_checkbox_locator=None, body_checkbox_locator=None):
super(CheckboxTable, self).__init__(table_locator, header_offset, body_offset)
if body_checkbox_locator:
self._checkbox_loc = body_checkbox_locator
self._header_checkbox_loc = header_checkbox_locator
@property
def header_checkbox(self):
"""Checkbox used to select/deselect all rows"""
if self._header_checkbox_loc is not None:
return sel.element(self._header_checkbox_loc)
else:
return sel.element(self._checkbox_loc, root=self.header_row)
def select_all(self):
"""Select all rows using the header checkbox or one by one if not present"""
if self._header_checkbox_loc is None:
for row in self.rows():
self._set_row_checkbox(row, True)
else:
sel.uncheck(self.header_checkbox)
sel.check(self.header_checkbox)
def deselect_all(self):
"""Deselect all rows using the header checkbox or one by one if not present"""
if self._header_checkbox_loc is None:
for row in self.rows():
self._set_row_checkbox(row, False)
else:
sel.check(self.header_checkbox)
sel.uncheck(self.header_checkbox)
def _set_row_checkbox(self, row, set_to=False):
row_checkbox = sel.element(self._checkbox_loc, root=row.locate())
sel.checkbox(row_checkbox, set_to)
def _set_row(self, header, value, set_to=False):
""" Internal method used to select/deselect a row by column header and cell value
Args:
header: See :py:meth:`Table.find_row`
value: See :py:meth:`Table.find_row`
set_to: Select if `True`, deselect if `False`
"""
row = self.find_row(header, value)
if row:
self._set_row_checkbox(row, set_to)
return True
else:
return False
def select_rows_by_indexes(self, *indexes):
"""Select rows specified by row indexes (starting with 0)
"""
for i, row in enumerate(self.rows()):
if i in indexes:
self._set_row_checkbox(row, True)
def deselect_rows_by_indexes(self, *indexes):
"""Deselect rows specified by row indexes (starting with 0)
"""
for i, row in enumerate(self.rows()):
if i in indexes:
self._set_row_checkbox(row, False)
def select_row(self, header, value):
"""Select a single row specified by column header and cell value
Args:
header: See :py:meth:`Table.find_row`
value: See :py:meth:`Table.find_row`
Returns: `True` if successful, `False` otherwise
"""
return self._set_row(header, value, True)
def deselect_row(self, header, value):
"""Deselect a single row specified by column header and cell value
Args:
header: See :py:meth:`Table.find_row`
value: See :py:meth:`Table.find_row`
Returns: `True` if successful, `False` otherwise
"""
return self._set_row(header, value, False)
def _set_rows(self, cell_map, set_to=False):
""" Internal method used to select/deselect multiple rows
Args:
cell_map: See :py:meth:`Table.click_cells`
set_to: Select if `True`, deselect if `False`
"""
failed_selects = []
for header, values in cell_map.items():
if isinstance(values, basestring):
values = [values]
for value in values:
res = self._set_row(header, value, set_to)
if not res:
failed_selects.append("{}:{}".format(header, value))
if failed_selects:
raise exceptions.NotAllCheckboxesFound(failed_selects)
def select_rows(self, cell_map):
"""Select multiple rows
Args:
cell_map: See :py:meth:`Table.click_cells`
Raises:
NotAllCheckboxesFound: If some cells were unable to be found
"""
self._set_rows(cell_map, True)
def deselect_rows(self, cell_map):
"""Deselect multiple rows
Args:
cell_map: See :py:meth:`Table.click_cells`
Raises:
NotAllCheckboxesFound: If some cells were unable to be found
"""
self._set_rows(cell_map, False)
def _set_row_by_cells(self, cells, set_to=False, partial_check=False):
row = self.find_row_by_cells(cells, partial_check=partial_check)
if row:
self._set_row_checkbox(row, set_to)
else:
raise sel_exceptions.NoSuchElementException()
def select_row_by_cells(self, cells, partial_check=False):
"""Select the first row matched by ``cells``
Args:
cells: See :py:meth:`Table.find_rows_by_cells`
"""
self._set_row_by_cells(cells, True, partial_check)
def deselect_row_by_cells(self, cells, partial_check=False):
"""Deselect the first row matched by ``cells``
Args:
cells: See :py:meth:`Table.find_rows_by_cells`
"""
self._set_row_by_cells(cells, False, partial_check)
def _set_rows_by_cells(self, cells, set_to=False, partial_check=False):
rows = self.find_rows_by_cells(cells)
for row in rows:
self._set_row_checkbox(row, set_to)
def select_rows_by_cells(self, cells, partial_check=False):
"""Select the rows matched by ``cells``
Args:
cells: See :py:meth:`Table.find_rows_by_cells`
"""
self._set_rows_by_cells(cells, True, partial_check)
def deselect_rows_by_cells(self, cells, partial_check=False):
"""Deselect the rows matched by ``cells``
Args:
cells: See :py:meth:`Table.find_rows_by_cells`
"""
self._set_rows_by_cells(cells, False, partial_check)
class SplitCheckboxTable(SplitTable, CheckboxTable):
""":py:class:`SplitTable` with support for checkboxes
Args:
header_data: See :py:class:`cfme.web_ui.SplitTable`
body_data: See :py:class:`cfme.web_ui.SplitTable`
header_checkbox_locator: See :py:class:`cfme.web_ui.CheckboxTable`
body_checkbox_locator: See :py:class:`cfme.web_ui.CheckboxTable`
header_offset: See :py:class:`cfme.web_ui.Table`
body_offset: See :py:class:`cfme.web_ui.Table`
"""
_checkbox_loc = './/img[contains(@src, "item_chk")]'
def __init__(self, header_data, body_data,
header_checkbox_locator=None, body_checkbox_locator=None):
# To limit multiple inheritance surprises, explicitly call out to SplitTable's __init__
SplitTable.__init__(self, header_data, body_data)
# ...then set up CheckboxTable's locators here
self._header_checkbox_loc = header_checkbox_locator
if body_checkbox_locator:
self._checkbox_loc = body_checkbox_locator
class PagedTable(Table):
""":py:class:`Table` with support for paginator
Args:
table_locator: See :py:class:`cfme.web_ui.Table`
header_checkbox_locator: Locator of header checkbox (default `None`)
Specify in case the header checkbox is not part of the header row
body_checkbox_locator: Locator for checkboxes in body rows
header_offset: See :py:class:`cfme.web_ui.Table`
body_offset: See :py:class:`cfme.web_ui.Table`
"""
def find_row_on_all_pages(self, header, value):
from cfme.web_ui import paginator
for _ in paginator.pages():
sel.wait_for_element(self)
row = self.find_row(header, value)
if row is not None:
return row
def find_row_by_cell_on_all_pages(self, cells):
"""Find the first row containing cells on all pages
Args:
cells: See :py:meth:`Table.find_rows_by_cells`
Returns: The first matching row found on any page
"""
from cfme.web_ui import paginator
for _ in paginator.pages():
sel.wait_for_element(self)
row = self.find_row_by_cells(cells)
if row is not None:
return row
def table_in_object(table_title):
"""If you want to point to tables inside object view, this is what you want to use.
Works both on down- and upstream.
Args:
table_title: Text in `p` element preceeding the table
Returns: XPath locator for the desired table.
"""
return ("//table[(preceding-sibling::p[1] | preceding-sibling::h3[1])[normalize-space(.)={}]]"
.format(quoteattr(table_title)))
@multimethod(lambda loc, value: (sel.tag(loc), sel.get_attribute(loc, 'type')))
def fill_tag(loc, value):
""" Return a tuple of function to do the filling, and a value to log."""
raise NotImplementedError("Don't know how to fill {} into this type: {}".format(value, loc))
@fill_tag.method(("select", Anything))
def fill_select_tag(select, value):
return (sel.select, value)
@fill_tag.method((Anything, 'text'))
@fill_tag.method((Anything, 'textarea'))
def fill_text(textbox, val):
return (sel.set_text, val)
@fill_tag.method((Anything, 'number'))
def fill_number(bmbox, val):
return (sel.set_text, val)
@fill_tag.method((Anything, 'password'))
def fill_password(pwbox, password):
return (sel.set_text, "********")
@fill_tag.method(('a', Anything))
@fill_tag.method(('img', Anything))
@fill_tag.method((Anything, 'image'))
@fill_tag.method((Anything, 'submit'))
def fill_click(el, val):
"""Click only when given a truthy value"""
def click_if(e, v):
if v:
sel.click(e)
return (click_if, val)
@fill_tag.method((Anything, 'file'))
def fill_file(fd, val):
return (sel.send_keys, val)
@fill_tag.method((Anything, 'checkbox'))
def fill_checkbox(cb, val):
return (sel.checkbox, bool(val))
@multidispatch
def fill(loc, content, **kwargs):
"""
Fills in a UI component with the given content.
Usage:
fill(textbox, "text to fill")
fill(myform, [ ... data to fill ...])
fill(radio, "choice to select")
Returns: True if any UI action was taken, False otherwise
"""
action, logval = fill_tag(loc, content)
if hasattr(loc, 'name'):
ident = loc.name
else:
ident = loc
logger.debug(' Filling in [%s], with value %s', ident, logval)
prev_state = action(loc, content)
sel.detect_observed_field(loc)
return prev_state
@fill.method((Mapping, Anything))
def _version_pick(m, a, **kwargs):
return fill(version.pick(m), a, **kwargs)
@fill.method((Table, Mapping))
def _sd_fill_table(table, cells):
""" How to fill a table with a value (by selecting the value as cells in the table)
See Table.click_cells
"""
table._update_cache()
logger.debug(' Clicking Table cell')
table.click_cells(cells)
return bool(cells)
@fill.method((CheckboxTable, object))
def _sd_fill_checkboxtable(table, cells):
""" How to fill a checkboxtable with a value (by selecting the right rows)
See CheckboxTable.select_by_cells
"""
table._update_cache()
logger.debug(' Selecting CheckboxTable row')
table.select_rows(cells)
return bool(cells)
@fill.method((Callable, object))
def fill_callable(f, val):
"""Fill in a Callable by just calling it with the value, allow for arbitrary actions"""
return f(val)
@fill.method((Select, types.NoneType))
@fill.method((Select, object))
def fill_select(slist, val):
logger.debug(' Filling in {} with value {}'.format(str(slist), val))
prev_sel = sel.select(slist, val)
slist.observer_wait()
return prev_sel
class Calendar(Pretty):
"""A CFME calendar form field
Calendar fields are readonly, and managed by the dxhtmlCalendar widget. A Calendar field
will accept any object that can be coerced into a string, but the value may not match the format
expected by dhtmlxCalendar or CFME. For best results, either a ``datetime.date`` or
``datetime.datetime`` object should be used to create a valid date field.
Args:
name: "name" property of the readonly calendar field.
Usage:
calendar = web_ui.Calendar("miq_date_1")
web_ui.fill(calendar, date(2000, 1, 1))
web_ui.fill(calendar, '1/1/2001')
"""
def __init__(self, name):
self.name = name
def locate(self):
return sel.move_to_element(Input(self.name))
@fill.method((Calendar, object))
def _sd_fill_date(calendar, value):
input = sel.element(calendar)
if isinstance(value, date):
date_str = '{}/{}/{}'.format(value.month, value.day, value.year)
else:
date_str = str(value)
# need to write to a readonly field: resort to evil
if sel.get_attribute(input, 'ng-model') is not None:
sel.set_angularjs_value(input, date_str)
else:
sel.set_attribute(input, "value", date_str)
# Now when we set the value, we need to simulate a change event.
if sel.get_attribute(input, "data-date-autoclose"):
# New one
script = "$(\"#{}\").trigger('changeDate');"
else:
# Old one
script = (
"if(typeof $j == 'undefined') {var jq = $;} else {var jq = $j;} "
"jq(\"#{}\").change();")
try:
sel.execute_script(script.format(calendar.name))
except sel_exceptions.WebDriverException as e:
logger.warning(
"An exception was raised during handling of the Cal #{}'s change event:\n{}"
.format(calendar.name, str(e)))
sel.wait_for_ajax()
return True
@fill.method((object, types.NoneType))
@fill.method((types.NoneType, object))
def _sd_fill_none(*args, **kwargs):
""" Ignore a NoneType """
pass
class Form(Region):
"""
A class for interacting with Form elements on pages.
The Form class takes a set of locators and binds them together to create a
unified Form object. This Form object has a defined field order so that the
user does not have to worry about which order the information is provided.
This enables the data to be provided as a dict meaning it can be passed directly
from yamls. It inherits the base Region class, meaning that locators can still be
referenced in the same way a Region's locators can. You can also add one more field which will
be a :py:class:`dict` of metadata, determining mostly field validity. See :py:meth:`field_valid`
Args:
fields: A list of field name/locator tuples. The argument not only defines
the order of the elements but also which elements comprise part of the form.
identifying_loc: A locator which should be present if the form is visible.
Usage:
provider_form = web_ui.Form(
fields=[
('type_select', "//*[@id='server_emstype']"),
('name_text', "//*[@id='name']"),
('hostname_text', "//*[@id='hostname']"),
('ipaddress_text', "//*[@id='ipaddress']"),
('amazon_region_select', "//*[@id='hostname']"),
('api_port', "//*[@id='port']"),
])
Forms can then be filled in like so.::
provider_info = {
'type_select': "OpenStack",
'name_text': "RHOS-01",
'hostname_text': "RHOS-01",
'ipaddress_text': "10.0.0.0",
'api_port': "5000",
}
web_ui.fill(provider_form, provider_info)
Note:
Using supertuples in a list, although ordered due to the properties of a List,
will not overide the field order defined in the Form.
"""
pretty_attrs = ['fields']
def __init__(self, fields=None, identifying_loc=None):
self.metadata = {}
self.locators = {}
fields_seen = set()
for field in fields:
try:
if field[0] in fields_seen:
raise ValueError('You cannot have duplicate field names in a Form ({})'.format(
field[0]))
self.locators[field[0]] = field[1]
if len(field) == 3:
self.metadata[field[0]] = field[2]
fields_seen.add(field[0])
except IndexError:
raise ValueError("fields= can be 2- or 3-tuples only! (name, loc[, metadata])")
self.fields = fields
self.identifying_loc = identifying_loc
def field_valid(self, field_name):
"""Add the validity constraints here."""
if field_name not in self.metadata:
return True
metadata = self.metadata[field_name]
if "removed_since" in metadata:
removed_since = metadata["removed_since"]
return version.current_version() < removed_since
if "appeared_in" in metadata:
appeared_in = metadata["appeared_in"]
return version.current_version() >= appeared_in
return True
def fill(self, fill_data):
fill(self, fill_data)
@fill.method((Form, Sequence))
def _fill_form_list(form, values, action=None, action_always=False):
"""Fills in field elements on forms
Takes a set of values in dict or supertuple format and locates form elements,
in the correct order, and fills them in.
Note:
Currently supports, text, textarea, select, checkbox, radio, password, a
and Table objects/elements.
Args:
values: a dict or supertuple formatted set of data where
each key is the name of the form locator from the page model. Some
objects/elements, such as :py:class:`Table` objects, support providing
multiple values to be clicked on in a single call.
action: a locator which will be clicked when the form filling is complete
action_always: if True, perform the action even if none of the
values to be filled in required any UI
interaction (eg, text boxes already had the
text to be filled in, checkbox already checked,
etc)
"""
logger.info('Beginning to fill in form...')
sel.wait_for_ajax()
values = list(val for key in form.fields for val in values if val[0] == key[0])
res = []
for field, value in values:
if value is not None and form.field_valid(field):
loc = form.locators[field]
try:
sel.wait_for_element(loc, timeout=10)
except TypeError:
# TypeError - when loc is not resolvable to an element, elements() will yell
# vvv An alternate scenario when element is not resolvable, just wait a bit.
time.sleep(1)
except TimedOutError:
logger.warning("This element [{}] couldn't be waited for".format(loc))
logger.trace(' Dispatching fill for %s', field)
fill_prev = fill(loc, value) # re-dispatch to fill for each item
res.append(fill_prev != value) # note whether anything changed
elif value is None and isinstance(form.locators[field], Select):
fill_prev = fill(form.locators[field], None)
res.append(fill_prev != value)
else:
res.append(False)
if action and (any(res) or action_always): # only perform action if something changed
logger.debug(' Invoking end of form action')
fill(action, True) # re-dispatch with truthy value
logger.debug('Finished filling in form')
return any(res) or action_always
@fill.method((object, Mapping))
def _fill_form_dict(form, values, **kwargs):
"""Fill in a dict by converting it to a list"""
return _fill_form_list(form, values.items(), **kwargs)
class Input(Pretty):
"""Class designed to handle things about ``<input>`` tags that have name attr in one place.
Also applies on ``textarea``, which is basically input with multiple lines (if it has name).
Args:
*names: Possible values (or) of the ``name`` attribute.
Keywords:
use_id: Whether to use ``id`` instead of ``name``. Useful if there is some input that does
not have ``name`` attribute present.
"""
pretty_attrs = ['_names', '_use_id']
def __init__(self, *names, **kwargs):
self._names = names
self._use_id = kwargs.pop("use_id", False)
@property
def names(self):
if len(self._names) == 1 and isinstance(self._names[0], dict):
return (version.pick(self._names[0]),)
else:
return self._names
def _generate_attr(self, name):
return "@{}={}".format("id" if self._use_id else "name", quoteattr(name))
def locate(self):
# If the end of the locator is changed, modify also the choice in Radio!!!
return '//*[(self::input or self::textarea) and ({})]'.format(
" or ".join(self._generate_attr(name) for name in self.names)
)
@property
def angular_help_block(self):
"""Returns the first visible angular helper text (like 'Required')."""
loc = (
'{0}/following-sibling::span[not(contains(@class, "ng-hide"))]'
'| {0}/following-sibling::div/span[not(contains(@class, "ng-hide"))]'
.format(self.locate()))
try:
return sel.text(loc).strip()
except NoSuchElementException:
return None
def __add__(self, string):
return self.locate() + string
def __radd__(self, string):
return string + self.locate()
class FileInput(Input):
"""A file input handling widget.
Accepts a string. If the string is a file, then it is put in the input. Otherwise a temporary
file is generated and that one is fed to the file input.
"""
pass
@fill.method((FileInput, Anything))
def _fill_file_input(i, a):
# TODO Upgrade selenium to 3.0.1+, this breaks in chrome at send_keys()
# https://github.com/SeleniumHQ/selenium/issues/2906
# Engage the selenium's file detector so we can reliably transfer the file to the browser
with browser().file_detector_context(LocalFileDetector):
# We need a raw element so we can send_keys to it
input_el = sel.element(i.locate())
if browser().file_detector.is_local_file(a) is None:
# Create a temp file
f = NamedTemporaryFile()
f.write(str(a))
f.flush()
input_el.send_keys(os.path.abspath(f.name))
atexit.register(f.close)
else:
# It already is a file ...
input_el.send_keys(a)
# Since we used raw selenium element, wait for ajax here ...
sel.wait_for_ajax()
class Radio(Input):
""" A class for Radio button groups
Radio allows the usage of HTML radio elements without resorting to previous
practice of iterating over elements to find the value. The name of the radio
group is passed and then when choices are required, the locator is built.
Args:
name: The HTML elements ``name`` attribute that identifies a group of radio
buttons.
Usage:
radio = Radio("schedule__schedule_type")
A specific radio element can then be returned by running the following::
el = radio.choice('immediately')
click(el)
The :py:class:`Radio` object can be reused over and over with repeated calls to
the :py:func:`Radio.choice` method.
"""
def choice(self, val):
""" Returns the locator for a choice
Args:
val: A string representing the ``value`` attribute of the specific radio
element.
Returns: A string containing the XPATH of the specific radio element.
"""
# Ugly, but working - all the conditions are in parentheses
return re.sub(r"\]$", " and @value={}]".format(quoteattr(val)), self.locate())
def observer_wait(self, val):
sel.detect_observed_field(self.choice(val))
@fill.method((Radio, object))
def _fill_radio(radio, value):
"""How to fill a radio button group (by selecting the given value)"""
logger.debug(' Filling in Radio{} with value "{}"'.format(tuple(radio.names), value))
sel.click(radio.choice(value))
radio.observer_wait(value)
class BootstrapTreeview(object):
"""A class representing the Bootstrap treeview used in newer builds.
Implements ``expand_path``, ``click_path``, ``read_contents``. All are implemented in manner
very similar to the original :py:class:`Tree`.
Args:
tree_id: Id of the tree, the closest div to the root ``ul`` element.
"""
ROOT_ITEMS = './ul/li[not(./span[contains(@class, "indent")])]'
ROOT_ITEMS_WITH_TEXT = (
'./ul/li[not(./span[contains(@class, "indent")]) and contains(normalize-space(.), {text})]')
SELECTED_ITEM = './ul/li[contains(@class, "node-selected")]'
CHILD_ITEMS = (
'./ul/li[starts-with(@data-nodeid, {id})'
' and count(./span[contains(@class, "indent")])={indent}]')
CHILD_ITEMS_TEXT = (
'./ul/li[starts-with(@data-nodeid, {id})'
' and contains(normalize-space(.), {text})'
' and count(./span[contains(@class, "indent")])={indent}]')
ITEM_BY_NODEID = './ul/li[@data-nodeid={}]'
IS_EXPANDABLE = './span[contains(@class, "expand-icon")]'
IS_EXPANDED = './span[contains(@class, "expand-icon") and contains(@class, "fa-angle-down")]'
IS_CHECKABLE = './span[contains(@class, "check-icon")]'
IS_CHECKED = './span[contains(@class, "check-icon") and contains(@class, "fa-check-square-o")]'
IS_LOADING = './span[contains(@class, "expand-icon") and contains(@class, "fa-spinner")]'
INDENT = './span[contains(@class, "indent")]'
def __init__(self, tree_id):
self.tree_id = tree_id
@classmethod
def image_getter(cls, item):
"""Look up the image that is hidden in the style tag
Returns:
The name of the image without the hash, path and extension.
"""
try:
image_node = sel.element('./span[contains(@class, "node-image")]', root=item)
except NoSuchElementException:
return None
style = sel.get_attribute(image_node, 'style')
image_href = re.search(r'url\("([^"]+)"\)', style).groups()[0]
return re.search(r'/([^/]+)-[0-9a-f]+\.png$', image_href).groups()[0]
def locate(self):
return '#{}'.format(self.tree_id)
@property
def selected_item(self):
return sel.element(self.SELECTED_ITEM, root=self)
@classmethod
def indents(cls, item):
return len(sel.elements(cls.INDENT, root=item))
@classmethod
def is_expandable(cls, item):
return bool(sel.elements(cls.IS_EXPANDABLE, root=item))
@classmethod
def is_expanded(cls, item):
return bool(sel.elements(cls.IS_EXPANDED, root=item))
@classmethod
def is_checkable(cls, item):
return bool(sel.elements(cls.IS_CHECKABLE, root=item))
@classmethod
def is_checked(cls, item):
return bool(sel.elements(cls.IS_CHECKED, root=item))
@classmethod
def is_loading(cls, item):
return bool(sel.elements(cls.IS_LOADING, root=item))
@classmethod
def is_collapsed(cls, item):
return not cls.is_expanded(item)
@classmethod
def is_selected(cls, item):
return 'node-selected' in sel.classes(item)
@classmethod
def get_nodeid(cls, item):
return sel.get_attribute(item, 'data-nodeid')
@classmethod
def get_expand_arrow(cls, item):
return sel.element(cls.IS_EXPANDABLE, root=item)
def child_items(self, item=None):
if item is not None:
nodeid = unescape(quoteattr(self.get_nodeid(item) + '.'))
node_indents = self.indents(item) + 1
return sel.elements(self.CHILD_ITEMS.format(id=nodeid, indent=node_indents), root=self)
else:
return sel.elements(self.ROOT_ITEMS, root=self)
def child_items_with_text(self, item, text):
text = unescape(quoteattr(text))
if item is not None:
nodeid = unescape(quoteattr(self.get_nodeid(item) + '.'))
node_indents = self.indents(item) + 1
return sel.elements(
self.CHILD_ITEMS_TEXT.format(id=nodeid, text=text, indent=node_indents), root=self)
else:
return sel.elements(self.ROOT_ITEMS_WITH_TEXT.format(text=text), root=self)
def get_item_by_nodeid(self, nodeid):
nodeid_q = unescape(quoteattr(nodeid))
try:
return sel.element(self.ITEM_BY_NODEID.format(nodeid_q), root=self)
except NoSuchElementException:
raise exceptions.CandidateNotFound({
'message':
'Could not find the item with nodeid {} in Boostrap tree {}'.format(
nodeid,
self.tree_id),
'path': '',
'cause': ''})
def expand_node(self, nodeid):
"""Expands a node given its nodeid. Must be visible
Args:
nodeid: ``nodeId`` of the node
Returns:
``True`` if it was possible to expand the node, otherwise ``False``.
"""
logger.trace('Expanding node %s on tree %s', nodeid, self.tree_id)
node = self.get_item_by_nodeid(nodeid)
if not self.is_expandable(node):
return False
if self.is_collapsed(node):
arrow = self.get_expand_arrow(node)
sel.click(arrow)
time.sleep(0.1)
wait_for(
lambda: not self.is_loading(self.get_item_by_nodeid(nodeid)),
delay=0.2, num_sec=30)
wait_for(
lambda: self.is_expanded(self.get_item_by_nodeid(nodeid)),
delay=0.2, num_sec=10)
return True
def collapse_node(self, nodeid):
"""Collapses a node given its nodeid. Must be visible
Args:
nodeid: ``nodeId`` of the node
Returns:
``True`` if it was possible to expand the node, otherwise ``False``.
"""
logger.trace('Collapsing node %s on tree %s', nodeid, self.tree_id)
node = self.get_item_by_nodeid(nodeid)
if not self.is_expandable(node):
return False
if self.is_expanded(node):
arrow = self.get_expand_arrow(node)
sel.click(arrow)
time.sleep(0.1)
wait_for(
lambda: self.is_collapsed(self.get_item_by_nodeid(nodeid)),
delay=0.2, num_sec=10)
return True
@classmethod
def _process_step(cls, step):
"""Steps can be plain strings or tuples when matching images"""
if isinstance(step, dict):
# Version pick and call again ...
return cls._process_step(version.pick(step))
if isinstance(step, tuple):
image = step[0]
step = step[1]
else:
image = None
if not isinstance(step, (basestring, re._pattern_type)):
step = str(step)
return image, step
@staticmethod
def _repr_step(image, step):
if isinstance(step, re._pattern_type):
# Make it look like r'pattern'
step_repr = 'r' + re.sub(r'^[^"\']', '', repr(step.pattern))
else:
step_repr = step
if image is None:
return step_repr
else:
return '{}[{}]'.format(step_repr, image)
@classmethod
def pretty_path(cls, path):
return '/'.join(cls._repr_step(*cls._process_step(step)) for step in path)
@classmethod
def validate_node(cls, node, matcher, image):
text = sel.text(node)
if isinstance(matcher, re._pattern_type):
match = matcher.match(text) is not None
else:
match = matcher == text
if not match:
return False
if image is not None and cls.image_getter(node) != image:
return False
return True
def expand_path(self, *path, **kwargs):
"""Expands given path and returns the leaf node.
The path items can be plain strings. In that case, exact string matching happens. Path items
can also be compiled regexps, where the ``match`` method is used to determine if the node
is the one we want. And finally, the path items can be 2-tuples, where the second item can
be the string or regular expression and the first item is the image to be matched using
:py:meth:`image_getter` method.
Args:
*path: The path (explained above)
Returns:
The leaf WebElement.
Raises:
:py:class:`exceptions.CandidateNotFound` when the node is not found in the tree.
"""
sel.wait_for_ajax()
logger.info('Expanding path %s on tree %s', self.pretty_path(path), self.tree_id)
node = None
steps_tried = []
for step in path:
steps_tried.append(step)
image, step = self._process_step(step)
if node is not None and not self.expand_node(self.get_nodeid(node)):
raise exceptions.CandidateNotFound({
'message':
'Could not find the item {} in Boostrap tree {}'.format(
self.pretty_path(steps_tried),
self.tree_id),
'path': path,
'cause': 'Could not expand the {} node'.format(self._repr_step(image, step))})
if isinstance(step, basestring):
# To speed up the search when having a string to match, pick up items with that text
child_items = self.child_items_with_text(node, step)
else:
# Otherwise we need to go through all of them.
child_items = self.child_items(node)
for child_item in child_items:
if self.validate_node(child_item, step, image):
node = child_item
break
else:
try:
cause = 'Was not found in {}'.format(
self._repr_step(*self._process_step(steps_tried[-2])))
except IndexError:
# There is only one item, probably root?
cause = 'Could not find {}'.format(
self._repr_step(*self._process_step(steps_tried[0])))
raise exceptions.CandidateNotFound({
'message':
'Could not find the item {} in Boostrap tree {}'.format(
self.pretty_path(steps_tried),
self.tree_id),
'path': path,
'cause': cause})
return node
def click_path(self, *path, **kwargs):
"""Expands the path and clicks the leaf node.
See :py:meth:`expand_path` for more informations about synopsis.
"""
node = self.expand_path(*path, **kwargs)
sel.click(node)
return node
def read_contents(self, nodeid=None, include_images=False, collapse_after_read=False):
if nodeid is not None:
item = self.get_item_by_nodeid(nodeid)
self.expand_node(nodeid)
else:
item = None
result = []
for child_item in self.child_items(item):
result.append(
self.read_contents(
nodeid=self.get_nodeid(child_item),
include_images=include_images,
collapse_after_read=collapse_after_read))
if collapse_after_read and nodeid is not None:
self.collapse_node(nodeid)
if include_images and item is not None:
this_item = (self.image_getter(item), sel.text(item))
elif item is not None:
this_item = sel.text(item)
else:
this_item = None
if result and this_item is not None:
return [this_item, result]
elif result:
return result
else:
return this_item
def check_uncheck_node(self, check, *path, **kwargs):
leaf = self.expand_path(*path, **kwargs)
if not self.is_checkable(leaf):
raise TypeError('Item with path {} in {} is not checkable'.format(
self.pretty_path(path), self.tree_id))
checked = self.is_checked(leaf)
if checked != check:
sel.click(sel.element(self.IS_CHECKABLE, root=leaf))
def check_node(self, *path, **kwargs):
"""Expands the passed path and checks a checkbox that is located at the node."""
return self.check_uncheck_node(True, *path, **kwargs)
def uncheck_node(self, *path, **kwargs):
"""Expands the passed path and unchecks a checkbox that is located at the node."""
return self.check_uncheck_node(False, *path, **kwargs)
def node_checked(self, *path, **kwargs):
"""Check if a checkbox is checked on the node in that path."""
leaf = self.expand_path(*path, **kwargs)
if not self.is_checkable(leaf):
return False
return self.is_checked(leaf)
def find_path_to(self, target, exact=False):
""" Method used to look up the exact path to an item we know only by its regexp or partial
description.
Expands whole tree during the execution.
Args:
target: Item searched for. Can be regexp made by
:py:func:`re.compile <python:re.compile>`,
otherwise it is taken as a string for `in` matching.
exact: Useful in string matching. If set to True, it matches the exact string.
Default is False.
Returns: :py:class:`list` with path to that item.
"""
if not isinstance(target, re._pattern_type):
if exact:
target = re.compile(r"^{}$".format(re.escape(str(target))))
else:
target = re.compile(r".*?{}.*?".format(re.escape(str(target))))
def _find_in_tree(t, p=None):
if t is None:
return
if p is None:
p = []
for item in t:
if isinstance(item, list):
if target.match(item[0]) is None:
subtree = _find_in_tree(item[1], p + [item[0]])
if subtree is not None:
return subtree
else:
return p + [item[0]]
else:
if target.match(item) is not None:
return p + [item]
else:
return
result = _find_in_tree(self.read_contents())
if result is None:
raise NameError("{} not found in tree".format(target.pattern))
else:
return result
@fill.method((BootstrapTreeview, Sequence))
def _fill_bstree_seq(tree, values):
if not values:
return None
try:
if isinstance(values[0], types.StringTypes):
tree.click_path(*values)
elif isinstance(values[0], Iterable):
for check in values:
tree.check_uncheck_node(check[1], *check[0])
except IndexError:
tree.click_path(*values)
class Tree(Pretty):
""" A class directed at CFME Tree elements
The Tree class aims to deal with all kinds of CFME trees
Args:
locator: This is a locator object pointing to the ``<ul>`` element which contains the rest
of the table.
Returns: A :py:class:`Tree` object.
A Tree object is set up by using a locator which contains the node elements. This element
will usually be a ``<ul>`` in the case of a Dynatree.
Usage:
tree = web_ui.Tree((By.XPATH, '//table//tr[@title="Datastore"]/../..'))
The path can then be navigated to return the last object in the path list, like so::
tree.click_path('Automation', 'VM Lifecycle Management (VMLifecycle)',
'VM Migrate (Migrate)')
Each path element will be expanded along the way, but will not be clicked.
When used in a :py:class:`Form`, a list of path tuples is expected in the form fill data.
The paths will be passed individually to :py:meth:`Tree.check_node`::
form = Form(fields=[
('tree_field', List(locator)),
])
form_fill_data = {
'tree_field': [
('Tree Node', 'Value'),
('Tree Node', 'Branch Node', 'Value'),
]
]
Note: Dynatrees, rely on a ``<ul><li>`` setup. We class a ``<li>`` as a node.
"""
pretty_attrs = ['locator']
def __init__(self, locator):
self.locator = locator
@cached_property
def tree_id(self):
if isinstance(self.locator, basestring) and re.match(r"^[a-zA-Z0-9_-]+$", self.locator):
return self.locator
else:
el = sel.element(self.locator)
tag = sel.tag(el)
tree_id = None
if tag == "ul":
try:
parent = sel.element("..", root=el)
id_attr = sel.get_attribute(parent, "id")
if id_attr:
tree_id = id_attr
except sel.NoSuchElementException:
pass
elif tag == "div":
tree_id = sel.get_attribute(el, "id") or None
else:
raise ValueError("Unknown element ({}) passed to the Tree!".format(tag))
if tree_id is None:
raise ValueError("Could not retrieve the id for Tree {}".format(repr(tree_id)))
else:
return tree_id
def locate(self):
return "#{}".format(self.tree_id)
def root_el(self):
return sel.element(self)
def _get_tag(self):
if getattr(self, 'tag', None) is None:
self.tag = sel.tag(self)
return self.tag
def read_contents(self, by_id=False):
result = False
while result is False:
sel.wait_for_ajax()
result = sel.execute_script(
"{} return read_tree(arguments[0], arguments[1]);".format(js.read_tree),
self.locate(),
by_id)
return result
def expand_path(self, *path, **kwargs):
""" Exposes a path.
Args:
*path: The path as multiple positional string arguments denoting the course to take.
Keywords:
by_id: Whether to match ids instead of text.
Returns: The leaf web element.
"""
by_id = kwargs.pop("by_id", False)
result = False
# Ensure we pass str to the javascript. This handles objects that represent themselves
# using __str__ and generally, you should only pass str because that is what makes sense
path = castmap(str, path)
# We sometimes have to wait for ajax. In that case, JS function returns false
# Then we repeat and wait. It does not seem completely possible to wait for the data in JS
# as it runs on one thread it appears. So this way it will try to drill multiple times
# each time deeper and deeper :)
while result is False:
sel.wait_for_ajax()
try:
result = sel.execute_script(
"{} return find_leaf(arguments[0],arguments[1],arguments[2]);".format(
js.find_leaf),
self.locate(),
path,
by_id)
except sel.WebDriverException as e:
text = str(e)
match = re.search(r"TREEITEM /(.*?)/ NOT FOUND IN THE TREE", text)
if match is not None:
item = match.groups()[0]
raise exceptions.CandidateNotFound(
{'message': "{}: could not be found in the tree.".format(item),
'path': path,
'cause': e})
match = re.search(r"^CANNOT FIND TREE /(.*?)/$", text)
if match is not None:
tree_id = match.groups()[0]
raise exceptions.TreeNotFound(
"Tree {} / {} not found.".format(tree_id, self.locator))
# Otherwise ...
raise
return result
def click_path(self, *path, **kwargs):
""" Exposes a path and then clicks it.
Args:
*path: The path as multiple positional string arguments denoting the course to take.
Keywords:
by_id: Whether to match ids instead of text.
Returns: The leaf web element.
"""
# Ensure we pass str to the javascript. This handles objects that represent themselves
# using __str__ and generally, you should only pass str because that is what makes sense
path = castmap(str, path)
leaf = self.expand_path(*path, **kwargs)
logger.info("Path %r yielded menuitem %r", path, sel.text(leaf))
if leaf is not None:
sel.wait_for_ajax()
sel.click(leaf)
return leaf
@classmethod
def browse(cls, tree, *path):
"""Browse through tree via path.
If node not found, raises exception.
If the browsing reached leaf(str), returns True if also the step was last, otherwise False.
If the result of the path is a subtree, it is returned.
Args:
tree: List with tree.
*path: Path to browse.
"""
# Ensure we pass str to the javascript. This handles objects that represent themselves
# using __str__ and generally, you should only pass str because that is what makes sense
path = castmap(str, path)
current = tree
for i, step in enumerate(path, start=1):
for node in current:
if isinstance(node, list):
if node[0] == step:
current = node[1]
break
else:
if node == step:
return i == len(path)
else:
raise Exception("Could not find node {}".format(step))
return current
@classmethod
def flatten_level(cls, tree):
"""Extracts just node names from current tree (top).
It makes:
.. code-block:: python
["asd", "fgh", ("ijk", [...]), ("lmn", [...])]
to
.. code-block:: python
["asd", "fgh", "ijk", "lmn"]
Useful for checking of contents of current tree level
"""
return map(lambda item: item[0] if isinstance(item, list) else item, tree)
def find_path_to(self, target, exact=False):
""" Method used to look up the exact path to an item we know only by its regexp or partial
description.
Expands whole tree during the execution.
Args:
target: Item searched for. Can be regexp made by
:py:func:`re.compile <python:re.compile>`,
otherwise it is taken as a string for `in` matching.
exact: Useful in string matching. If set to True, it matches the exact string.
Default is False.
Returns: :py:class:`list` with path to that item.
"""
if not isinstance(target, re._pattern_type):
if exact:
target = re.compile(r"^{}$".format(re.escape(str(target))))
else:
target = re.compile(r".*?{}.*?".format(re.escape(str(target))))
def _find_in_tree(t, p=None):
if p is None:
p = []
for item in t:
if isinstance(item, list):
if target.match(item[0]) is None:
subtree = _find_in_tree(item[1], p + [item[0]])
if subtree is not None:
return subtree
else:
return p + [item[0]]
else:
if target.match(item) is not None:
return p + [item]
else:
return None
result = _find_in_tree(self.read_contents())
if result is None:
raise NameError("{} not found in tree".format(target.pattern))
else:
return result
class CheckboxTree(Tree):
"""Tree that has a checkbox on each node, adds methods to check/uncheck them"""
node_checkbox = "../span[@class='dynatree-checkbox']"
def _is_checked(self, leaf):
return 'dynatree-selected' in \
sel.get_attribute(sel.element("..", root=leaf), 'class')
def _check_uncheck_node(self, path, check=False):
""" Checks or unchecks a node.
Args:
*path: The path as multiple positional string arguments denoting the course to take.
check: If ``True``, the node is checked, ``False`` the node is unchecked.
"""
leaf = self.expand_path(*path)
cb = sel.element(self.node_checkbox, root=leaf)
if check is not self._is_checked(leaf):
sel.click(cb)
def check_node(self, *path):
""" Convenience function to check a node
Args:
*path: The path as multiple positional string arguments denoting the course to take.
"""
self._check_uncheck_node(path, check=True)
def uncheck_node(self, *path):
""" Convenience function to uncheck a node
Args:
*path: The path as multiple positional string arguments denoting the course to take.
"""
self._check_uncheck_node(path, check=False)
@fill.method((Tree, Sequence))
def _fill_tree_seq(tree, values):
tree.click_path(*values)
@sel.select.method((CheckboxTree, Sequence))
@fill.method((CheckboxTree, Sequence))
def _select_chkboxtree_seq(cbtree, values):
"""values should be a list of tuple pairs, where the first item is the
path to select, and the second is whether to check or uncheck.
Usage:
select(cbtree, [(['Foo', 'Bar'], False),
(['Baz'], True)])
"""
for (path, to_select) in values:
if to_select:
cbtree.check_node(*path)
else:
cbtree.uncheck_node(*path)
class InfoBlock(Pretty):
DETAIL = "detail"
FORM = "form"
PF = "patternfly"
_TITLE_CACHE = {}
pretty_attrs = ["title"]
def __new__(cls, title, detail=None):
# Caching
if title not in cls._TITLE_CACHE:
cls._TITLE_CACHE[title] = super(InfoBlock, cls).__new__(cls)
cls._TITLE_CACHE[title].__init__(title)
instance = cls._TITLE_CACHE[title]
if detail is None:
return instance
else:
return instance.member(detail)
def __init__(self, title):
if all(map(lambda a: hasattr(self, a), ["title", "_type", "_member_cache"])):
return
self.title = title
self._type = None
self._member_cache = {}
@property
def type(self):
if self._type is None:
self.root # To retrieve it
return self._type
@property
def root(self):
possible_locators = [
# Detail type
'//table//th[contains(normalize-space(.), "{}")]/../../../..'.format(
self.title),
# Form type
(
'//*[p[@class="legend"][contains(normalize-space(.), "{}")] and table/tbody/tr/td['
'contains(@class, "key")]]'.format(self.title)
),
# Newer Form type (master.20150311020845_547fd06 onwards)
(
'//*[h3[contains(normalize-space(.), "{}")] and table/tbody/tr/td['
'contains(@class, "key")]]'.format(self.title)
),
# Newer Form type used in AC tagging:
(
'//h3[contains(normalize-space(.), "{}")]/following-sibling::div/table/tbody/tr/td['
'contains(@class, "key")]/../../../..'.format(self.title)
),
# The root element must contain table element because listaccordions were caught by the
# locator. It used to be fieldset but it seems it can be really anything
# And here comes a new one, this time no table. (eg. 5.5.0.7 Configuration/About)
(
'//*[h3[contains(normalize-space(.), "{}")] and '
'div[contains(@class, "form-horizontal")]/div/label]'.format(self.title)
)
]
found = sel.elements("|".join(possible_locators))
if not found:
raise exceptions.BlockTypeUnknown("The block type requested is unknown")
root_el = found[0]
if sel.elements("./table/tbody/tr/td[contains(@class, 'key')]", root=root_el):
self._type = self.FORM
elif sel.elements("./div[contains(@class, 'form-horizontal')]/div/label", root=root_el):
self._type = self.PF
else:
self._type = self.DETAIL
return root_el
def member(self, name):
if name not in self._member_cache:
self._member_cache[name] = self.Member(self, name)
return self._member_cache[name]
def by_member_icon(self, icon):
"""In case you want to find the item by icon in the value field (like OS infra diff.)"""
if self._type == self.PF:
raise NotImplementedError(
"I haven't implemented icons+patternfly infoblock yet, so fix me if you see this.")
l = ".//table/tbody/tr/td[2]/img[contains(@src, {})]/../../td[1]".format(quoteattr(icon))
return self.member(sel.text(l))
def __call__(self, member):
"""A present for @smyers"""
return self.member(member)
##
#
# Shortcuts for old-style access
#
@classmethod
def text(cls, *args, **kwargs):
return cls(*args, **kwargs).text
@classmethod
def element(cls, *args, **kwargs):
return cls(*args, **kwargs).element
@classmethod
def elements(cls, *args, **kwargs):
return cls(*args, **kwargs).elements
@classmethod
def icon_href(cls, *args, **kwargs):
return cls(*args, **kwargs).icon_href
@classmethod
def container(cls, args, **kwargs):
try:
return sel.element(cls(*args, **kwargs).container)
except sel_exceptions.NoSuchElementException:
raise exceptions.ElementOrBlockNotFound(
"Either the element of the block could not be found")
class Member(Pretty):
pretty_attrs = "name", "ib"
def __init__(self, ib, name):
self.ib = ib
self.name = name
@property
def pair_locator(self):
if self.ib.type == InfoBlock.DETAIL:
return './/table/tbody/tr/td[1][@class="label"][normalize-space(.)="{}"]/..'.format(
self.name)
elif self.ib.type == InfoBlock.FORM:
return './/table/tbody/tr/td[1][@class="key"][normalize-space(.)="{}"]/..'.format(
self.name)
elif self.ib.type == InfoBlock.PF:
return (
'./div[contains(@class, "form-horizontal")]'
'/div[label[normalize-space(.)="{}"]]/div'.format(self.name))
@property
def pair(self):
return sel.element(self.pair_locator, root=self.ib.root)
@property
def container(self):
if self.ib.type == InfoBlock.PF:
# Because we get the element directly, not the two tds
return self.pair
else:
return sel.element("./td[2]", root=self.pair)
def locate(self):
return self.container
@property
def elements(self):
return sel.elements("./*", root=self.container)
@property
def element(self):
return self.elements[0]
@property
def text(self):
return sel.text(self.container).encode("utf-8").strip()
@property
def icon_href(self):
try:
return sel.get_attribute(sel.element("./img", root=self.container), "src")
except sel_exceptions.NoSuchElementException:
return None
@property
def title(self):
return sel.get_attribute(self.pair, "title") or None
@fill.method((InfoBlock, Sequence))
def _ib_seq(ib, i):
for item in i:
sel.click(ib.member(item))
@fill.method((InfoBlock, basestring))
def _ib_str(ib, s):
fill([s])
@fill.method((InfoBlock.Member, bool))
def _ib_m_seq(member, b):
if b:
sel.click(member)
class Quadicon(Pretty):
"""
Represents a single quadruple icon in the CFME UI.
A Quadicon contains multiple quadrants. These are accessed via attributes.
The qtype is currently one of the following and determines which attribute names
are present. They are mapped internally and can be reassigned easily if the UI changes.
A Quadicon is used by defining the name of the icon and the type. After that, it can be used
to obtain the locator of the Quadicon, or query its quadrants, via attributes.
Args:
name: The label of the icon.
qtype: The type of the quad icon. By default it is ``None``, therefore plain quad without any
retrievable data usable for selecting/clicking.
Usage:
qi = web_ui.Quadicon('hostname.local', 'host')
qi.creds
click(qi)
.. rubric:: Known Quadicon Types and Attributes
* **host** - *from the infra/host page* - has quads:
* a. **no_vm** - Number of VMs
* b. **state** - The current state of the host
* c. **vendor** - The vendor of the host
* d. **creds** - If the creds are valid
* **infra_prov** - *from the infra/providers page* - has quads:
* a. **no_host** - Number of hosts
* b. *Blank*
* c. **vendor** - The vendor of the provider
* d. **creds** - If the creds are valid
* **vm** - *from the infra/virtual_machines page* - has quads:
* a. **os** - The OS of the vm
* b. **state** - The current state of the vm
* c. **vendor** - The vendor of the vm's host
* d. **no_snapshot** - The number of snapshots
* g. **policy** - The state of the policy
* **cloud_prov** - *from the cloud/providers page* - has quads:
* a. **no_instance** - Number of instances
* b. **no_image** - Number of machine images
* c. **vendor** - The vendor of the provider
* d. **creds** - If the creds are valid
* **instance** - *from the cloud/instances page* - has quads:
* a. **os** - The OS of the instance
* b. **state** - The current state of the instance
* c. **vendor** - The vendor of the instance's host
* d. **no_snapshot** - The number of snapshots
* g. **policy** - The state of the policy
* **datastore** - *from the infra/datastores page* - has quads:
* a. **type** - File system type
* b. **no_vm** - Number of VMs
* c. **no_host** - Number of hosts
* d. **avail_space** - Available space
* **cluster** - *from the infra/cluster page* - has no quads
* **resource_pool** - *from the infra/resource_pool page* - has no quads
* **stack** - *from the clouds/stacks page* - has no quads
Returns: A :py:class:`Quadicon` object.
"""
pretty_attrs = ['_name', '_qtype']
QUADS = {
"host": {
"no_vm": ("a", 'txt'),
"state": ("b", 'img'),
"vendor": ("c", 'img'),
"creds": ("d", 'img'),
},
"infra_prov": {
"no_host": ("a", 'txt'),
"vendor": ("c", 'img'),
"creds": ("d", 'img'),
},
"vm": {
"os": ("a", 'img'),
"state": ("b", 'img'),
"vendor": ("c", 'img'),
"no_snapshot": ("d", 'txt'),
"policy": ("g", 'img'),
},
"cloud_prov": {
"no_vm": ("a", 'txt'),
"no_image": ("b", 'txt'),
"vendor": ("b", 'img'),
"creds": ("d", 'img'),
},
"instance": {
"os": ("a", 'img'),
"state": ("b", 'img'),
"vendor": ("c", 'img'),
"no_snapshot": ("d", 'txt'),
"policy": ("g", 'img'),
},
"stack": {},
"datastore": {
"type": ("a", 'img'),
"no_vm": ("b", 'txt'),
"no_host": ("c", 'txt'),
"avail_space": ("d", 'img'),
},
"cluster": {},
"resource_pool": {},
"template": {
"os": ("a", 'img'),
"state": ("b", 'img'),
"vendor": ("c", 'img'),
"no_snapshot": ("d", 'txt'),
},
"image": {
"os": ("a", 'img'),
"state": ("b", 'img'),
"vendor": ("c", 'img'),
"no_snapshot": ("d", 'txt'),
},
"middleware": {}, # Middleware quads have no fields
"object_store": {},
None: {}, # If you just want to find the quad and not mess with data
}
def __init__(self, name, qtype=None):
self._name = name
self.qtype = qtype
def __repr__(self):
return '{}({!r}, {!r})'.format(type(self).__name__, self._name, self.qtype)
@property
def qtype(self):
return self._qtype
@qtype.setter
def qtype(self, value):
assert value in self.QUADS
self._qtype = value
@property
def _quad_data(self):
return self.QUADS[self.qtype]
def checkbox(self):
""" Returns: a locator for the internal checkbox for the quadicon"""
return "//input[@type='checkbox' and ../../..//a[{}]]".format(self.a_cond)
@property
def exists(self):
try:
self.locate()
return True
except sel.NoSuchElementException:
return False
@property
def a_cond(self):
if self.qtype == "middleware":
return "contains(normalize-space(@title), {name})"\
.format(name=quoteattr('Name: {}'.format(self._name)))
else:
return "@title={name} or @data-original-title={name}".format(name=quoteattr(self._name))
def locate(self):
""" Returns: a locator for the quadicon anchor"""
try:
return sel.move_to_element(
'div/a',
root="//div[contains(@id, 'quadicon') and ../../..//a[{}]]".format(self.a_cond))
except sel.NoSuchElementException:
quads = sel.elements("//div[contains(@id, 'quadicon')]/../../../tr/td/a")
if not quads:
raise sel.NoSuchElementException("Quadicon {} not found. No quads present".format(
self._name))
else:
quad_names = [self._get_title(quad) for quad in quads]
raise sel.NoSuchElementException(
"Quadicon {} not found. These quads are present:\n{}".format(
self._name, ", ".join(quad_names)))
def _locate_quadrant(self, corner):
""" Returns: a locator for the specific quadrant"""
return "//div[contains(@class, {}) and ../../../..//a[{}]]".format(
quoteattr("{}72".format(corner)), self.a_cond)
def __getattr__(self, name):
""" Queries the quadrants by name
Args:
name: The name of the quadrant identifier, as defined above.
Returns: A string containing a representation of what is in the quadrant.
"""
if name in self._quad_data:
corner, rtype = self._quad_data[name]
locator = self._locate_quadrant(corner)
# We have to have a try/except here as some quadrants
# do not exist if they have no data, e.g. current_state in a host
# with no credentials.
try:
el = sel.element(locator)
except sel_exceptions.NoSuchElementException:
return None
if rtype == 'txt':
return el.text
if rtype == 'img':
try:
img_el = sel.element(
'.//img|.//div[contains(@style, "background-image")]',
root=el)
except sel_exceptions.NoSuchElementException:
raise NoSuchElementException(
('Could not find the image field in quadrant {} of {!r}. '
'This may be an error or a UI change.').format(corner, self))
tag = sel.tag(img_el)
if tag == 'img':
img_name = sel.get_attribute(img_el, 'src')
elif tag == 'div':
style = sel.get_attribute(img_el, 'style')
match = re.search(r'background-image:\s*url\("([^"]+)"\)', style)
if not match:
raise ValueError(
'Could not find the image url in style {!r} of {!r} quadrant {}'.format(
style, self, corner))
img_name = match.groups()[0]
else:
raise ValueError(
'Unknown tag <{}> when parsing quadicon {!r}, quadrant {}'.format(
tag, self, corner))
path, filename = os.path.split(img_name)
root, ext = os.path.splitext(filename)
return root
else:
return object.__getattribute__(self, name)
def __str__(self):
return self.locate()
@classmethod
def _get_title(cls, el):
title = sel.get_attribute(el, "title")
if title is not None:
return title
else:
return sel.get_attribute(el, "data-original-title")
@classmethod
def all(cls, qtype=None, this_page=False):
"""Allows iteration over Quadicons.
Args:
qtype: Quadicon type. Refer to the constructor for reference.
this_page: Whether to look for Quadicons only on current page (do not list pages).
Returns: :py:class:`list` of :py:class:`Quadicon`
"""
from cfme.web_ui import paginator # Prevent circular imports
if this_page:
pages = (None, ) # Single, current page. Since we dont care about the value, using None
else:
pages = paginator.pages()
for page in pages:
for href in sel.elements("//div[contains(@id, 'quadicon')]/../../../tr/td/a"):
yield cls(cls._get_title(href), qtype)
@classmethod
def first(cls, qtype=None):
return cls(cls.get_first_quad_title(), qtype=qtype)
@staticmethod
def select_first_quad():
fill("//div[contains(@id, 'quadicon')]/../..//input", True)
@staticmethod
def get_first_quad_title():
first_quad = "//div[contains(@id, 'quadicon')]/../../../tr/td/a"
title = sel.get_attribute(first_quad, "title")
if title:
return title
else:
return sel.get_attribute(first_quad, "data-original-title") or "" # To ensure str
@classmethod
def any_present(cls):
try:
cls.get_first_quad_title()
except NoSuchElementException:
return False
except AttributeError:
# This is needed so that if there is no browser, we fail nicely, this in turn is
# needed to make the docs not error.
return False
else:
return True
@property
def name(self):
""" Returns name of the quadicon."""
return self._name
@property
def check_for_single_quadrant_icon(self):
""" Checks if the quad icon is a single quadrant icon."""
for quadrant_name in self._quad_data.iterkeys():
# These quadrant will be displayed if it is a regular quad
quadrant_id = self._quad_data[quadrant_name][0] # It is a tuple
if sel.is_displayed(self._locate_quadrant(quadrant_id)):
return False
return sel.is_displayed(self._locate_quadrant("e")) # Image has only 'e'
@property
def href(self):
return self.locate().get_attribute('href')
class DHTMLSelect(Select):
"""
A special Select object for CFME's icon enhanced DHTMLx Select elements.
Args:
loc: A locator.
Returns a :py:class:`cfme.web_ui.DHTMLSelect` object.
"""
@staticmethod
def _log(meth, val=None):
if val:
val_string = " with value {}".format(val)
logger.debug('Filling in DHTMLSelect using (%s)%s', meth, val_string)
def _get_select_name(self):
""" Get's the name reference of the element from its hidden attribute.
"""
root_el = sel.element(self)
el = sel.element("div/input[2]", root=root_el)
name = sel.get_attribute(el, 'name')
return name
@property
def all_selected_options(self):
""" Returns all selected options.
Note: Since the DHTML select can only have one option selected at a time, we
simple return the first element (the only element).
Returns: A Web element.
"""
return [self.first_selected_option]
@property
def first_selected_option(self):
""" Returns the first selected option in the DHTML select
Note: In a DHTML select, there is only one option selectable at a time.
Returns: A webelement.
"""
name = self._get_select_name()
return browser().execute_script(
'return {}.getOptionByIndex({}}.getSelectedIndex()).content'.format(name, name))
@property
def options(self):
""" Returns a list of options of the select as webelements.
Returns: A list of Webelements.
"""
name = self._get_select_name()
return browser().execute_script('return {}.DOMlist.children'.format(name))
def select_by_index(self, index, _cascade=None):
""" Selects an option by index.
Args:
index: The select element's option by index.
"""
name = self._get_select_name()
if index is not None:
if not _cascade:
self._log('index', index)
browser().execute_script('{}.selectOption({})'.format(name, index))
def select_by_visible_text(self, text):
""" Selects an option by visible text.
Args:
text: The select element option's visible text.
"""
name = self._get_select_name()
if text is not None:
self._log('visible_text', text)
value = browser().execute_script(
'return {}.getOptionByLabel("{}").value'.format(name, text))
self.select_by_value(value, _cascade=True)
def select_by_value(self, value, _cascade=None):
""" Selects an option by value.
Args:
value: The select element's option value.
"""
name = self._get_select_name()
if value is not None:
if not _cascade:
self._log('value', value)
index = browser().execute_script('return {}.getIndexByValue("{}")'.format(name, value))
self.select_by_index(index, _cascade=True)
def locate(self):
return sel.move_to_element(self._loc)
@sel.select.method((DHTMLSelect, basestring))
def select_dhtml(dhtml, s):
dhtml.select_by_visible_text(s)
class Filter(Form):
""" Filters requests pages
This class inherits Form as its base and adds a few methods to assist in filtering
request pages.
Usage:
f = Filter(fields=[
('type', Select('//select[@id="type_choice"]')),
('approved', Input("state_choice__approved")),
('denied', Input"state_choice__denied")),
('pending_approval', Input("state_choice__pending_approval")),
('date', Select('//select[@id="time_period"]')),
('reason', Input("reason_text")),
])
f.apply_filter(type="VM Clone", approved=False,
pending_approval=False, date="Last 24 Hours", reason="Just Because")
"""
buttons = {
'default_off': '//div[@id="buttons_off"]/li/a/img[@alt="Set filters to default"]',
'default_on': '//div[@id="buttons_on"]/li/a/img[@alt="Set filters to default"]',
'apply': '//div[@id="buttons_on"]//a[@title="Apply the selected filters"]',
'reset': '//div[@id="buttons_on"]//a[@title="Reset filter changes"]'
}
def default_filter(self):
""" Method to reset the filter back to defaults.
"""
sel.click(self.buttons['default_off'])
sel.click(self.buttons['default_on'])
def reset_filter(self):
""" Method to reset the changes to the filter since last applying.
"""
sel.click(self.buttons['reset'])
def apply_filter(self, **kwargs):
""" Method to apply a filter.
First resets the filter to default and then applies the filter.
Args:
**kwargs: A dictionary of form elements to fill and their values.
"""
self.default_filter()
self.fill(kwargs)
sel.click(self.buttons['apply'])
class MultiSelect(Region):
"""Represents a UI widget where there are two select boxes, one with
possible selections, and another with selected items. Has two
arrow buttons to move items between the two"""
def __init__(self,
available_select=None,
selected_select=None,
select_arrow=None,
deselect_arrow=None):
self.available_select = available_select
self.selected_select = selected_select
self.select_arrow = select_arrow
self.deselect_arrow = deselect_arrow
@sel.select.method((MultiSelect, Sequence))
def select_multiselect(ms, values):
sel.select(ms.available_select, values)
sel.click(ms.select_arrow)
@fill.method((MultiSelect, Sequence))
def fill_multiselect(ms, items):
sel.select(ms, items)
class UpDownSelect(Region):
"""Multiselect with two arrows (up/down) next to it. Eg. in AE/Domain priority selection.
Args:
select_loc: Locator for the select box (without Select element wrapping)
up_loc: Locator of the Move Up arrow.
down_loc: Locator with Move Down arrow.
"""
def __init__(self, select_loc, up_loc, down_loc):
super(UpDownSelect, self).__init__(locators=dict(
select=Select(select_loc, multi=True),
up=up_loc,
down=down_loc,
))
def get_items(self):
return map(lambda el: el.text.encode("utf-8"), self.select.options)
def move_up(self, item):
item = str(item)
assert item in self.get_items()
self.select.deselect_all()
sel.select(self.select, item)
sel.click(self.up)
def move_down(self, item):
item = str(item)
assert item in self.get_items()
self.select.deselect_all()
sel.select(self.select, item)
sel.click(self.down)
def move_top(self, item):
item = str(item)
assert item in self.get_items()
self.select.deselect_all()
while item != self.get_items()[0]:
sel.select(self.select, item)
sel.click(self.up)
def move_bottom(self, item):
item = str(item)
assert item in self.get_items()
self.select.deselect_all()
while item != self.get_items()[-1]:
sel.select(self.select, item)
sel.click(self.down)
@fill.method((UpDownSelect, Sequence))
def _fill_uds_seq(uds, seq):
seq = map(str, seq)
for item in reversed(seq): # reversed because every new item at top pushes others down
uds.move_top(item)
class ScriptBox(Pretty):
"""Represents a script box as is present on the customization templates pages.
This box has to be activated before keys can be sent. Since this can't be done
until the box element is visible, and some dropdowns change the element, it must
be activated "inline".
Args:
"""
pretty_attrs = ['locator']
def __init__(self, name=None, ta_locator="//textarea[contains(@id, 'method_data')]"):
self._name = name
self.ta_loc = ta_locator
@property
def name(self):
if not self._name:
self._name = version.pick({
version.LOWEST: 'miqEditor',
'5.5': 'ManageIQ.editor'})
return self._name
def get_value(self):
script = sel.execute_script('return {}.getValue();'.format(self.name))
script = script.replace('\\"', '"').replace("\\n", "\n")
return script
def workaround_save_issue(self):
# We need to fire off the handlers manually in some cases ...
sel.execute_script(
"{}._handlers.change.map(function(handler) {{ handler() }});".format(self.name))
sel.wait_for_ajax()
@fill.method((ScriptBox, Anything))
def fill_scriptbox(sb, script):
"""This function now clears and sets the ScriptBox.
"""
logger.info("Filling ScriptBox {} with\n{}".format(sb.name, script))
sel.execute_script('{}.setValue(arguments[0]);'.format(sb.name), script)
sel.wait_for_ajax()
sel.execute_script('{}.save();'.format(sb.name))
sel.wait_for_ajax()
class CheckboxSelect(Pretty):
"""Class used for filling those bunches of checkboxes I (@mfalesni) always hated to search for.
Can fill by values, text or both. To search the text for the checkbox, you have 2 choices:
* If the text can be got from parent's tag (like `<div><input type="checkbox">blablabla</div>`
where blablabla is the checkbox's description looked up), you can leave the
`text_access_func` unfilled.
* If there is more complicated layout and you don't mind a bit slower operation, you can pass
the text_access_func, which should be like `lambda checkbox_el: get_text_of(checkbox_el)`.
The checkbox `WebElement` is passed to it and the description text is the expected output
of the function.
Args:
search_root: Root element for checkbox search
text_access_func: Function returning descriptive text about passed CB element.
"""
pretty_attrs = ['_root']
def __init__(self, search_root, text_access_func=None):
self._root = search_root
self._access_func = text_access_func
@property
def checkboxes(self):
"""All checkboxes."""
return set(sel.elements(".//input[@type='checkbox']", root=sel.element(self._root)))
@property
def selected_checkboxes(self):
"""Only selected checkboxes."""
return {cb for cb in self.checkboxes if cb.is_selected()}
@property
def selected_values(self):
"""Only selected checkboxes' values."""
return {sel.get_attribute(cb, "value") for cb in self.selected_checkboxes}
@property
def unselected_checkboxes(self):
"""Only unselected checkboxes."""
return {cb for cb in self.checkboxes if not cb.is_selected()}
@property
def unselected_values(self):
"""Only unselected checkboxes' values."""
return {sel.get_attribute(cb, "value") for cb in self.unselected_checkboxes}
def checkbox_by_id(self, id):
"""Find checkbox's WebElement by id."""
return sel.element(
".//input[@type='checkbox' and @id='{}']".format(id), root=sel.element(self._root)
)
def select_all(self):
"""Selects all checkboxes."""
for cb in self.unselected_checkboxes:
sel.check(cb)
def unselect_all(self):
"""Unselects all checkboxes."""
for cb in self.selected_checkboxes:
sel.uncheck(cb)
def checkbox_by_text(self, text):
"""Returns checkbox's WebElement by searched by its text."""
if self._access_func is not None:
for cb in self.checkboxes:
txt = self._access_func(cb)
if txt == text:
return cb
else:
raise NameError("Checkbox with text {} not found!".format(text))
else:
# Has to be only single
return sel.element(
".//*[contains(., '{}')]/input[@type='checkbox']".format(text),
root=sel.element(self._root)
)
def check(self, values):
"""Checking function.
Args:
values: Dictionary with key=CB name, value=bool with status.
Look in the function to see.
"""
for name, value in values.iteritems():
if isinstance(name, sel.ByText):
sel.checkbox(self.checkbox_by_text(str(name)), value)
else:
sel.checkbox(self.checkbox_by_id(name), value)
@fill.method((CheckboxSelect, bool))
def fill_cb_select_bool(select, all_state):
if all_state is True:
return select.select_all()
else:
return select.unselect_all()
@fill.method((CheckboxSelect, list))
@fill.method((CheckboxSelect, set))
def fill_cb_select_set(select, names):
return select.check({k: True for k in names})
@fill.method((CheckboxSelect, Mapping))
def fill_cb_select_dictlist(select, dictlist):
return select.check(dictlist)
@fill.method((CheckboxSelect, basestring))
@fill.method((CheckboxSelect, sel.ByText))
def fill_cb_select_string(select, cb):
return fill(select, {cb})
class ShowingInputs(Pretty):
"""This class abstracts out as a container of inputs, that appear after preceeding was filled.
Args:
*locators: In-order-of-display specification of locators.
Keywords:
min_values: How many values are required (Default: 0)
"""
pretty_attrs = ['locators', 'min_values']
def __init__(self, *locators, **kwargs):
self._locators = locators
self._min = kwargs.get("min_values", 0)
def zip(self, with_values):
if len(with_values) < self._min:
raise ValueError("Not enough values provided ({}, expected {})".format(
len(with_values), self._min)
)
if len(with_values) > len(self._locators):
raise ValueError("Too many values provided!")
return zip(self._locators, with_values)
def __getitem__(self, i):
"""To delegate access to the separate locators"""
return self._locators[i]
@fill.method((ShowingInputs, Sequence))
def _fill_showing_inputs_seq(si, i):
for loc, val in si.zip(i):
fill(loc, val)
@fill.method((ShowingInputs, basestring))
def _fill_showing_inputs_str(si, s):
fill(si, [s])
class MultiFill(object):
"""Class designed to fill the same value to multiple fields
Args:
*fields: The fields where the value will be mirrored
"""
def __init__(self, *fields):
self.fields = fields
@fill.method((MultiFill, object))
def _fill_multi_obj(mf, o):
for field in mf.fields:
fill(field, o)
class DriftGrid(Pretty):
""" Class representing the table (grid) specific to host drift analysis comparison page
"""
def __init__(self, loc="//div[@id='compare-grid']"):
self.loc = loc
def get_cell(self, row_text, col_index):
""" Finds cell element of the grid specified by column index and row text
Args:
row_text: Title text of the cell's row
col_index: Column index of the cell, starting with 0 for 1st data-containing column
Note:
`col_index` of 0 is used for the 2nd actual column in the drift grid, because
the 1st column does not contain headers, only row descriptions.
Returns:
Selenium element of the cell.
"""
self.expand_all_sections()
cell_loc = ".//th[contains(normalize-space(.), '{}')]/../td[{}]".format(row_text,
col_index + 1)
cell = sel.element(cell_loc, root=self.loc)
return cell
def cell_indicates_change(self, row_text, col_index):
""" Finds out if a cell, specified by column index and row text, indicates change
Args:
row_text: Title text of the cell's row
col_index: Column index of the cell
Note:
`col_index` of 0 is used for the 2nd actual column in the drift grid, because
the 1st column does not contain headers, only row descriptions.
Returns:
``True`` if there is a change present, ``False`` otherwise
"""
cell = self.get_cell(row_text, col_index)
# Cell either contains an image
try:
cell_img = sel.element(".//i | .//img", root=cell)
return sel.get_attribute(cell_img, "title") == 'Changed from previous'
# or text
except NoSuchElementException:
if 'color: rgb(33, 160, 236)' in sel.get_attribute(cell, 'style'):
return True
return False
def expand_all_sections(self):
""" Expands all sections to make the row elements found therein available
"""
while True:
# We need to do this one by one because the DOM changes on every expansion
try:
el = sel.element(
'.//div/span[contains(@class, "toggle") and contains(@class, "expand")]',
root=self.loc)
sel.click(el)
except NoSuchElementException:
break
class ButtonGroup(object):
def __init__(self, key, fieldset=None):
""" A ButtonGroup is a set of buttons next to each other, as is used on the DefaultViews
page.
Args:
key: The name of the key field text before the button group.
"""
self.key = key
self.fieldset = fieldset
@property
def _icon_tag(self):
if version.current_version() >= 5.6:
return 'i'
else:
return 'img'
@property
def _state_attr(self):
if version.current_version() >= 5.6:
return 'title'
else:
return 'alt'
@property
def locator(self):
attr = re.sub(r"&", "&", quoteattr(self.key)) # We don't need it in xpath
path = './/label[contains(@class, "control-label") and ' \
'normalize-space(.)={}]/..'.format(attr)
if self.fieldset:
fieldset = quoteattr(self.fieldset)
path = '//fieldset[./h3[normalize-space(.)={}]]/'.format(fieldset) + path
return path
def locate(self):
""" Moves to the element """
# Use the header locator as the overall table locator
return sel.move_to_element(self.locator)
@property
def locator_base(self):
if version.current_version() < "5.5":
return self.locator + "/td[2]"
else:
return self.locator + "/div"
@property
def active(self):
""" Returns the alt tag text of the active button in thr group. """
loc = sel.element(self.locator_base + '/ul/li[@class="active"]/{}'.format(self._icon_tag))
return loc.get_attribute(self._state_attr)
def status(self, alt):
""" Returns the status of the button identified by the Alt Text of the image. """
active_loc = self.locator_base + '/ul/li/{}[@{}="{}"]'.format(
self._icon_tag, self._state_attr, alt)
try:
sel.element(active_loc)
return True
except NoSuchElementException:
pass
inactive_loc = self.locator_base + '/ul/li/a/{}[@alt="{}"]'.format(self._icon_tag, alt)
try:
sel.element(inactive_loc)
return False
except NoSuchElementException:
pass
def choose(self, alt):
""" Sets the ButtonGroup to select the button identified by the alt text. """
if not self.status(alt):
inactive_loc = self.locator_base + '/ul/li/a/{}[@alt="{}"]'.format(self._icon_tag, alt)
sel.click(inactive_loc)
@fill.method((ButtonGroup, basestring))
def _fill_showing_button_group(tb, s):
tb.choose(s)
class ColorGroup(object):
def __init__(self, key):
""" A ColourGroup is a set of colour buttons next to each other, as is used on the DefaultViews
page.
Args:
key: The name of the key field text before the button group.
"""
self.key = key
self.locator = '//td[@class="key" and text()="{}"]/..'.format(self.key)
def locate(self):
""" Moves to the element """
# Use the header locator as the overall table locator
return sel.move_to_element(self.locator)
@property
def active(self):
""" Returns the alt tag text of the active button in thr group. """
loc = sel.element(self.locator + '/td[2]/div[contains(@title, "selected")]')
color = re.search('The (.*?) theme', loc.get_attribute('title')).groups()[0]
return color
def status(self, color):
""" Returns the status of the color button identified by the Title Text of the image. """
active_loc = self.locator + '/td[2]/div[contains(@title, "{}")' \
'and contains(@title, "selected")]'.format(color)
try:
sel.element(active_loc)
return True
except NoSuchElementException:
pass
inactive_loc = self.locator + '/td[2]/div[contains(@title, "{}")' \
'and contains(@title, "Click")]'.format(color)
try:
sel.element(inactive_loc)
return False
except NoSuchElementException:
pass
def choose(self, color):
""" Sets the ColorGroup to select the button identified by the title text. """
if not self.status(color):
inactive_loc = self.locator + '/td[2]/div[contains(@title, "{}")' \
'and contains(@title, "Click")]'.format(color)
sel.click(inactive_loc)
@fill.method((ColorGroup, basestring))
def _fill_showing_color_group(tb, s):
tb.choose(s)
class DynamicTable(Pretty):
"""A table that can add or remove the rows.
"""
pretty_attrs = "root_loc", "default_row_item"
ROWS = ".//tbody/tr[not(contains(@id, 'new_tr'))]"
DELETE_ALL = {
version.LOWEST: ".//tbody/tr/td/img[@alt='Delete']",
'5.6': './/tbody/tr/td/button/i[contains(@class, "minus")]'
}
def __init__(self, root_loc, default_row_item=None):
self.root_loc = root_loc
self.default_row_item = default_row_item
@property
def rows(self):
return map(lambda r_el: self.Row(self, r_el), sel.elements(self.ROWS, root=self.root_loc))
@cached_property
def header_names(self):
return map(sel.text, sel.elements(".//thead/tr/th", root=self.root_loc))
def click_add(self):
sel.click(sel.element(
".//tbody/tr[@id='new_tr']/td//img | .//tbody/tr[@id='new_tr']/td//i |"
" ./tbody/tr[@id='new_tr']/td/button",
root=self.root_loc))
def click_save(self):
if version.current_version() < "5.6":
sel.click(sel.element(
".//tbody/tr[@id='new_tr']/td//input[@type='image']", root=self.root_loc))
else:
# 5.6+ uses the same button.
self.click_add()
def delete_row(self, by):
pass
def clear(self):
while True:
buttons = sel.elements(self.DELETE_ALL)
if not buttons:
break
sel.click(buttons[0])
def add_row(self, data):
self.click_add()
editing_row = self.Row(self, ".//tbody/tr[@id='new_tr']")
fill(editing_row, data)
self.click_save()
class Row(object):
def __init__(self, table, root):
self.table = table
self.root = root
@property
def values(self):
cells = sel.elements("./td", root=self.root)
return dict(zip(self.table.header_names, map(sel.text, cells)))
@property
def inputs(self):
result = []
for cell in sel.elements("./td", root=self.root):
inputs = sel.elements("./input", root=cell)
if not inputs:
result.append(None)
else:
result.append(inputs[0])
return result
@property
def inputs_for_filling(self):
return dict(zip(self.table.header_names, self.inputs))
@fill.method((DynamicTable.Row, Mapping))
def _fill_dt_row_map(dtr, m):
for name, input in dtr.inputs_for_filling.iteritems():
fill(input, m.get(name))
@fill.method((DynamicTable.Row, Anything))
def _fill_dt_row_other(dtr, anything):
mapping_fields = [name for name in dtr.table.header_names if name.strip()]
if isinstance(anything, (list, tuple)) and len(anything) == len(mapping_fields):
# Create the dict and fill by dict
fill(dtr, dict(zip(mapping_fields, anything)))
else:
# Use the default field
if dtr.table.default_row_item is None:
raise Exception("Cannot fill table row with anything when we dont know the def. field")
fill(dtr, {dtr.table.default_row_item: anything})
@fill.method((DynamicTable, list))
def _fill_dt_list(dt, l, clear_before=False):
if clear_before:
dt.clear()
for item in l:
dt.add_row(item)
@fill.method((DynamicTable, Anything))
def _fill_dt_anything(dt, anything, **kwargs):
fill(dt, [anything], **kwargs)
fill.prefer((DynamicTable, Anything), (object, Mapping))
fill.prefer((DynamicTable.Row, Anything), (object, Mapping))
fill.prefer((Select, types.NoneType), (object, types.NoneType))
fill.prefer((DHTMLSelect, types.NoneType), (object, types.NoneType))
fill.prefer((object, types.NoneType), (Select, object))
class AngularSelect(Pretty):
BUTTON = "//button[@data-id='{}']"
pretty_attrs = ['_loc', 'none', 'multi', 'exact']
def __init__(self, loc, none=None, multi=False, exact=False):
self.none = none
if isinstance(loc, AngularSelect):
self._loc = loc._loc
else:
self._loc = self.BUTTON.format(loc)
self.multi = multi
self.exact = exact
def locate(self):
return sel.move_to_element(self._loc)
@property
def select(self):
return Select('select#{}'.format(self.did), multi=self.multi)
@property
def did(self):
return sel.element(self._loc).get_attribute('data-id')
@property
def is_broken(self):
return sel.is_displayed(self) and sel.is_displayed(self.select)
@property
def is_open(self):
el = sel.element(self._loc)
return el.get_attribute('aria-expanded') == "true"
def open(self):
sel.click(self._loc)
def select_by_visible_text(self, text):
if not self.is_open:
self.open()
if self.exact:
new_loc = self._loc + '/../div/ul/li/a[normalize-space(.)={}]'.format(
unescape(quoteattr(text)))
else:
new_loc = self._loc + '/../div/ul/li/a[contains(normalize-space(.), {})]'.format(
unescape(quoteattr(text)))
e = sel.element(new_loc)
sel.execute_script("arguments[0].scrollIntoView();", e)
sel.click(new_loc)
def select_by_value(self, value):
value = str(value) # Because what we read from the page is a string
options_map = [a.value for a in self.select.all_options]
index = options_map.index(value)
if not self.is_open:
self.open()
new_loc = self._loc + '/../div/ul/li[@data-original-index={}]'.format(index)
e = sel.element(new_loc)
sel.execute_script("arguments[0].scrollIntoView();", e)
sel.click(new_loc)
@property
def all_options(self):
return self.select.all_options
@property
def classes(self):
"""Combines class from the button and from select."""
return sel.classes(self) | sel.classes("select#{}".format(self.did))
@property
def options(self):
return self.select.options
@property
def first_selected_option(self):
new_loc = self._loc + '/span'
e = sel.element(new_loc)
text = e.text
for option in self.all_options:
if option.text == text:
return option
return None
@property
def first_selected_option_text(self):
new_loc = self._loc + '/span'
e = sel.element(new_loc)
text = e.text
return text
@fill.method((AngularSelect, sel.ByText))
@fill.method((AngularSelect, basestring))
def _fill_angular_string(obj, s):
if s:
obj.select_by_visible_text(s)
else:
return
@fill.method((AngularSelect, sel.ByValue))
def _fill_angular_value(obj, s):
if s.value:
obj.select_by_value(s.value)
else:
return
@fill.method((AngularSelect, list))
def _fill_angular_list(obj, l):
for i in l:
fill(obj, i)
class AngularCalendarInput(Pretty):
pretty_attrs = "input_name", "click_away_element"
def __init__(self, input_name, click_away_element):
self.input_name = input_name
self.click_away_element = click_away_element
@property
def input(self):
return Input(self.input_name, use_id=True)
@property
def clear_button(self):
return sel.element("../a/img", root=self.input)
def locate(self):
return self.input.locate()
def fill(self, value):
if isinstance(value, date):
value = '{}/{}/{}'.format(value.month, value.day, value.year)
else:
value = str(value)
try:
sel.click(self.input)
sel.set_text(self.input, value)
finally:
# To ensure the calendar itself is closed
sel.click(self.click_away_element)
def clear(self):
if sel.text(self.input).strip():
sel.click(self.clear_button)
@fill.method((AngularCalendarInput, Anything))
def _fill_angular_calendar_input(obj, a):
return obj.fill(a)
class EmailSelectForm(Pretty):
"""Class encapsulating the e-mail selector, eg. in Control/Alarms editing."""
fields = Region(locators=dict(
from_address=Input('from'),
user_emails={
version.LOWEST: Select("//select[@id='user_email']"),
"5.5": AngularSelect("user_email")},
manual_input=Input('email'),
add_email_manually={
version.LOWEST: "(//img | //i)[@title='Add' and contains(@onclick, 'add_email')]",
"5.5": "//div[@alt='Add']/i"}
))
@property
def to_emails(self):
"""Returns list of e-mails that are selected"""
return [
sel.text(el)
for el
in sel.elements("//a[contains(@href, 'remove_email')]")
]
@property
def user_emails(self):
"""Returns list of e-mail that users inside CFME have so that they can be selected"""
try:
return [
sel.get_attribute(el, "value")
for el
in self.fields.user_emails.options
if len(sel.get_attribute(el, "value").strip()) > 0
]
except NoSuchElementException: # It disappears when empty
return []
def remove_email(self, email):
"""Remove specified e-mail
Args:
email: E-mail to remove
"""
if email in self.to_emails:
sel.click("//a[contains(@href, 'remove_email')][normalize-space(.)='{}']".format(email))
return email not in self.to_emails
else:
return True
@to_emails.setter
def to_emails(self, emails):
"""Function for filling e-mails
Args:
emails: List of e-mails that should be filled. Any existing e-mails that are not in this
variable will be deleted.
"""
if isinstance(emails, basestring):
emails = [emails]
# Delete e-mails that have nothing to do here
for email in self.to_emails:
if email not in emails:
assert self.remove_email(email), "Could not remove e-mail '{}'".format(email)
# Add new
for email in emails:
if email in self.to_emails:
continue
if email in self.user_emails:
sel.select(self.fields.user_emails, sel.ByValue(email))
else:
fill(self.fields.manual_input, email)
sel.click(self.fields.add_email_manually)
assert email in self.to_emails, "Adding e-mail '{}' manually failed!".format(email)
@fill.method((EmailSelectForm, basestring))
@fill.method((EmailSelectForm, list))
@fill.method((EmailSelectForm, set))
@fill.method((EmailSelectForm, tuple))
def fill_email_select_form(form, emails):
form.to_emails = emails
class BootstrapSwitch(object):
def __init__(self, input_id):
"""A Bootstrap On/Off switch
Args:
input_id: The HTML ID of the input element associated with the checkbox
"""
self.input_id = input_id
self.loc_container = "//input[@id={}]/..".format(quoteattr(self.input_id))
self.on_off = "{}/span[contains(@class, 'bootstrap-switch-handle-{}')]".format(
self.loc_container, '{}')
def fill(self, val):
"""Convenience function"""
if val:
self.check()
else:
self.uncheck()
def check(self):
"""Checks the bootstrap box"""
el = sel.element(self.on_off.format("off"))
sel.click(el)
def uncheck(self):
"""Unchecks the bootstrap box"""
el = sel.element(self.on_off.format("on"))
sel.click(el)
def is_selected(self):
if sel.is_displayed("//div[contains(@class, 'bootstrap-switch-on')]{}"
.format(self.loc_container)):
return True
else:
return False
@property
def angular_help_block(self):
"""Returns the first visible angular helper text (like 'Required')."""
loc = ("{}/../../../../..//div[contains(@class, 'form-group has-error')]"
.format(self.loc_container))
try:
return sel.text(loc).strip()
except NoSuchElementException:
return None
@fill.method((BootstrapSwitch, bool))
def fill_bootstrap_switch(bs, val):
bs.fill(val)
class OldCheckbox(object):
def __init__(self, input_id):
"""An original HTML checkbox element
Args:
input_id: The HTML ID of the input element associated with the checkbox
"""
self.input_id = input_id
self.locator = "//input[@id={}]".format(quoteattr(input_id))
def fill(self, val):
"""
Checks or unchecks
Args:
value: The value the checkbox should represent as a bool (or None to do nothing)
Returns: Previous state of the checkbox
"""
if val is not None:
selected = self.is_selected()
if selected is not val:
logger.debug("Setting checkbox {} to {}".format(str(self.locator), str(val)))
sel.click(self._el)
return selected
def check(self):
"""Convenience function"""
self.fill(True)
def uncheck(self):
"""Convenience function"""
self.fill(False)
def _el(self):
return sel.move_to_element(self.locator)
def is_selected(self):
return self._el().is_selected()
@fill.method((OldCheckbox, bool))
def fill_oldcheckbox_switch(ob, val):
ob.fill(val)
class CFMECheckbox(Selector):
def __init__(self, input_id):
self.input_id = input_id
super(CFMECheckbox, self).__init__()
def decide(self):
ref_loc = "//input[@id={}]/../span" \
"[contains(@class, 'bootstrap-switch-label')]".format(quoteattr(self.input_id))
if sel.is_displayed(ref_loc):
return BootstrapSwitch(self.input_id)
else:
return OldCheckbox(self.input_id)
@fill.method((CFMECheckbox, bool))
def fill_cfmecheckbox_switch(ob, val):
ob.fill(val)
def breadcrumbs():
"""Returns a list of breadcrumbs names if names==True else return as elements.
Returns:
:py:class:`list` of breadcrumbs if they are present, :py:class:`NoneType` otherwise.
"""
elems = sel.elements('//ol[contains(@class, "breadcrumb")]/li')
return elems if elems else None
def breadcrumbs_names():
elems = breadcrumbs()
if elems:
return map(sel.text_sane, elems)
SUMMARY_TITLE_LOCATORS = [
'//h1'
]
SUMMARY_TITLE_LOCATORS = '|'.join(SUMMARY_TITLE_LOCATORS)
def summary_title():
"""Returns a title of the page.
Returns:
:py:class:`str` if present, :py:class:`NoneType` otherwise.
"""
try:
return sel.text_sane(SUMMARY_TITLE_LOCATORS)
except sel.NoSuchElementException:
return None
def browser_title():
"""Returns a title of the page.
Returns:
:py:class:`str` if present, :py:class:`NoneType` otherwise.
"""
try:
return browser().title.split(': ', 1)[1]
except IndexError:
return None
def controller_name():
"""Returns a title of the page.
Returns:
:py:class:`str` if present, :py:class:`NoneType` otherwise.
"""
return sel.execute_script('return ManageIQ.controller;')
def match_location(controller=None, title=None, summary=None):
"""Does exact match of passed data
Returns:
:py:class:`bool`
"""
result = []
if controller:
result.append(controller_name() == controller)
if title:
result.append(browser_title() == title)
if summary:
result.append((summary_title() == summary) or
(sel.is_displayed('//h3[normalize-space(.) = {}]'.format(quote(summary)))))
return all(result)
class StatusBox(object):
""" Status box as seen in containers overview page
Status box modelling.
Args:
name: The name of the status box as it appears in CFME, e.g. 'Nodes'
Returns: A StatusBox instance.
"""
def __init__(self, name):
self.name = name
def value(self):
if "_" in self.name:
self.name = self.name.split('_', 1)[-1]
elem_text = sel.text(
"//span[contains(@class,'card-pf-aggregate-status-count')]"
"/../../../../../div[contains(@status, 'objectStatus.{}')]".format(self.name.lower()))
match = re.search(r'\d+', elem_text)
return int(match.group())
| gpl-2.0 | 137,997,576,260,013,500 | 33.59466 | 103 | 0.573597 | false |
jyundt/oval | app/main/views.py | 1 | 16557 | from collections import OrderedDict
from itertools import groupby
from operator import itemgetter, and_
import datetime
from flask import render_template, redirect, request, url_for, current_app, flash
from slackclient import SlackClient
from sqlalchemy import extract, or_
from sqlalchemy import func
from app import db
from app.util import sort_and_rank
from . import main
from .forms import FeedbackForm
from ..email import send_feedback_email
from ..models import Course, RaceClass, Racer, Team, Race, Participant
def _gen_default(year, race_class_id, race_calendar):
"""Default error case for standings type parameter
It seems useful to create a full function here in case any logging,
or more important work should be done on error.
"""
return None
def _gen_race_calendar(year, race_class_id):
"""Returns the full calendar of dates for a class and year of racing
This is necessary because dates where individuals do not participate will
not exist in their individual results otherwise.
"""
dates = Race.query.with_entities(Race.date, Race.id)\
.filter(extract("year", Race.date) == year)\
.filter(Race.points_race == True)\
.filter(Race.class_id == race_class_id).all()
dates = sorted(dates, key=lambda x: x[0])
return dates
def _make_result(name, id_, rank, total_pts, pts, race_calendar, team_name, team_id):
"""Create result dictionary to make html templates more readable
"""
result = {"name": name,
"id": id_,
"rank": rank,
"total_pts": total_pts,
"race_pts": OrderedDict([(date, "-") for date,_ in race_calendar]),
"team_name": team_name,
"team_id": team_id}
for point, date in pts:
if point:
result["race_pts"][date] = point
return result
def _gen_team_standings(race_info, race_calendar):
"""Return team standings with individual race and total points
"""
# Sort race info first by team (for grouping below) then by date
# for table construction. Filter results not associated with a team.
team_race_info = sorted(
[ri for ri in race_info if ri.team_id],
key=lambda ri: (ri.team_id, ri.race_date))
def sum_team_points_by_date(team_results):
return [
(sum(ri.team_points or 0 for ri in dg), date)
for (team_id, date), dg in
groupby(team_results, key=lambda ri: (ri.team_id, ri.race_date))]
team_points_by_date = {
team_id: sum_team_points_by_date(g) for team_id, g
in groupby(team_race_info, key=lambda ri: ri.team_id)}
# Aggregate results by team
team_agg_info = [
(team_id, team_name, sum(ri.team_points or 0 for ri in g))
for ((team_id, team_name), g) in
groupby(team_race_info, key=lambda ri: (ri.team_id, ri.team_name))
]
# Filter to only teams that have points, and
# rank by total team points.
ranked_teams = sort_and_rank(
filter(itemgetter(2), team_agg_info),
key=itemgetter(2))
results = []
for rank, (team_id, team_name, total_pts) in ranked_teams:
result = _make_result(name=team_name, id_=team_id, rank=rank, total_pts=total_pts,
pts=team_points_by_date[team_id], race_calendar=race_calendar,
team_name=None, team_id=None)
results.append(result)
return results
def _gen_ind_standings(race_info, race_calendar):
"""Return top individual racer standings with individual race and total points
Note, individual placing tiebreak is by number of wins, followed by number of
seconds places, etc.
"""
# Sort race info first by racer (for grouping below) then by date
# for table construction.
racer_race_info = sorted(race_info, key=lambda ri: (ri.racer_id, ri.race_date))
# A list of per-race points for each racer
racer_race_points = {
racer_id: list((ri.points if not ri.points_dropped else '(%d)' % ri.points or 0, ri.race_date) for ri in g)
for racer_id, g in groupby(racer_race_info, key=lambda ri: ri.racer_id)}
# Team info for each racer
racer_teams = {
racer_id: [(ri.team_name, ri.team_id) for ri in g]
for racer_id, g in groupby(racer_race_info, key=lambda ri: ri.racer_id)
}
def placing_counts(placings):
# Helper to count placings
# Returns a tuple with the count of number of first places, then number
# of seconds, etc., up to the 8th place.
placings = filter(None, placings)
if not placings:
return ()
counts_by_place = {place: sum(1 for _ in g) for place, g in groupby(sorted(placings))}
assert min(counts_by_place.keys()) >= 1
return tuple(counts_by_place.get(place) or 0 for place in xrange(1, 9))
# Group race results by racer
race_info_gby_racer = [
((racer_id, racer_name), list(g))
for ((racer_id, racer_name), g) in
groupby(racer_race_info, key=lambda ri: (ri.racer_id, ri.racer_name))]
# Aggregate points and placings by racer
racer_agg_info = [(
racer_id,
racer_name,
sum(r.points if r.points and not r.points_dropped else 0 for r in g),
placing_counts(r.place for r in g))
for (racer_id, racer_name), g in race_info_gby_racer]
# Filter to only racers that have any points,
# rank by total points then by placings.
ranked_racers = sort_and_rank(
filter(itemgetter(2), racer_agg_info),
key=itemgetter(2, 3))
results = []
for rank, (racer_id, racer_name, racer_points, _) in ranked_racers:
team = racer_teams[racer_id][-1] if racer_id in racer_teams else (None, None)
result = _make_result(name=racer_name, id_=racer_id, rank=rank, total_pts=racer_points,
pts=racer_race_points[racer_id], race_calendar=race_calendar,
team_name=team[0], team_id=team[1])
results.append(result)
return results
def _gen_mar_standings(race_info, race_calendar):
"""Return top MAR standings with individual race and total points
"""
# Sort race info first by racer (for grouping below) then by date
# for table construction.
racer_race_info = sorted(race_info, key=lambda ri: (ri.racer_id, ri.race_date))
# A list of per-race mar points for each racer
racer_race_mar_points = {
racer_id: list((ri.mar_points, ri.race_date) for ri in g)
for racer_id, g in groupby(racer_race_info, key=lambda ri: ri.racer_id)}
# Team info for each racer
racer_teams = {
racer_id: list((ri.team_name, ri.team_id) for ri in g)
for racer_id, g in groupby(racer_race_info, key=itemgetter(0))
}
# Aggregate mar points by racer
racer_agg_info = [
(racer_id, racer_name, sum(ri.mar_points or 0 for ri in g))
for (racer_id, racer_name), g in
groupby(racer_race_info, key=lambda ri: (ri.racer_id, ri.racer_name))]
# Filter to only racers that have any mar points,
# rank by total points.
ranked_racers = sort_and_rank(
filter(itemgetter(2), racer_agg_info),
key=itemgetter(2))
results = []
for rank, (racer_id, racer_name, racer_points) in ranked_racers:
team = racer_teams[racer_id][-1] if racer_id in racer_teams else (None, None)
result = _make_result(name=racer_name, id_=racer_id, rank=rank, total_pts=racer_points,
pts=racer_race_mar_points[racer_id], race_calendar=race_calendar,
team_name=team[0], team_id=team[1])
results.append(result)
return results
@main.route('/')
def index():
"""Fills and renders the front page index.html template
Only display recent results when they're within the past ~three months.
"""
recent_time = datetime.datetime.now() - datetime.timedelta(days=90)
recent_results = (
Race.query
.join(Participant, Race.id == Participant.race_id)
.filter(Race.date > recent_time)
.group_by(Race.id)
.having(func.count(Participant.id) > 0))
r1 = recent_results.subquery('r1')
r2 = recent_results.subquery('r2')
latest_races = (
db.session.query(r1)
.with_entities(
r1.c.id.label('id'),
r1.c.date.label('date'),
RaceClass.name.label('class_name'))
.join(r2, and_(r1.c.class_id == r2.c.class_id, r1.c.date < r2.c.date), isouter=True)
.join(RaceClass, RaceClass.id == r1.c.class_id)
.filter(r2.c.id.is_(None))
.order_by(r1.c.date.desc(), RaceClass.id))
races = latest_races.all()
return render_template('index.html', races=races)
@main.route('/standings/')
def standings():
years = sorted(set(
int(date.year) for (date,) in Race.query.with_entities(Race.date)
.filter_by(points_race=True)),
reverse=True)
try:
req_year = int(request.args.get('year'))
except (ValueError, TypeError):
req_year = None
year = req_year if req_year is not None else (years[0] if years else None)
race_classes = [
(race_class_id.id, race_class_id.name)
for race_class_id in
RaceClass.query.with_entities(
RaceClass.id, RaceClass.name)
.join(Race)
.join(Participant)
.filter(extract("year", Race.date) == year)
.filter(Race.points_race == True)
.group_by(RaceClass.id)
.order_by(RaceClass.name)]
year_race_class_ids = [race_class_id for race_class_id, _ in race_classes]
try:
req_race_class_id = int(request.args.get('race_class_id'))
except (ValueError, TypeError):
req_race_class_id = None
race_class_id = (
req_race_class_id if req_race_class_id in year_race_class_ids
else (year_race_class_ids[0] if year_race_class_ids else None))
if year is not None and race_class_id is not None:
race_info = (
Racer.query.with_entities(
Racer.id.label('racer_id'), Racer.name.label('racer_name'),
Race.date.label('race_date'), Participant.points,
Participant.team_points, Participant.mar_points,
Team.id.label('team_id'), Team.name.label('team_name'), Participant.place,
Participant.points_dropped)
.join(Participant)
.join(Team, isouter=True)
.join(Race)
.filter(Race.points_race == True)
.filter(extract("year", Race.date) == year)
.filter(Race.class_id == race_class_id)
.order_by(Racer.id, Race.date.desc())
.all())
race_calendar = _gen_race_calendar(year, race_class_id)
ind_standings = _gen_ind_standings(race_info, race_calendar)
team_standings = _gen_team_standings(race_info, race_calendar)
mar_standings = _gen_mar_standings(race_info, race_calendar)
results = (
('Individual', ind_standings),
('Team', team_standings),
('MAR', mar_standings))
return render_template(
'standings.html',
selected_year=year, selected_race_class_id=race_class_id,
years=years, race_classes=race_classes,
results=results, race_calendar=race_calendar)
return render_template('standings.html', selected_year=year, years=years)
@main.route('/results/')
def results():
years = sorted(set(
int(date.year) for (date,) in Race.query.with_entities(Race.date).all()),
reverse=True)
try:
req_year = int(request.args.get('year'))
except (ValueError, TypeError):
req_year = None
year = req_year if req_year is not None else (years[0] if years else None)
race_classes = [
(race_class_id.id, race_class_id.name)
for race_class_id in
RaceClass.query.with_entities(
RaceClass.id, RaceClass.name)
.join(Race)
.join(Participant)
.filter(extract("year", Race.date) == year)
.group_by(RaceClass.id)
.order_by(RaceClass.name)]
year_race_class_ids = [race_class_id for race_class_id, _ in race_classes]
try:
req_race_class_id = int(request.args.get('race_class_id'))
except (ValueError, TypeError):
req_race_class_id = None
race_class_id = (
req_race_class_id if req_race_class_id in year_race_class_ids
else (year_race_class_ids[0] if year_race_class_ids else None))
if year is not None and race_class_id is not None:
race_info = (Racer.query.with_entities(
Racer.id, Racer.name,
Team.id, Team.name,
Participant.place, Participant.mar_place,
Race.id, Race.date,
Race.course_id, Race.average_lap, Race.fast_lap,
Race.winning_time, Race.laps, Race.starters, Race.points_race,
RaceClass.id, RaceClass.name,
Course.name, Course.length_miles)
.join(Participant, Participant.racer_id == Racer.id)
.join(Team, Team.id == Participant.team_id, isouter=True)
.join(Race, Race.id == Participant.race_id)
.join(RaceClass, RaceClass.id == Race.class_id)
.join(Course, Course.id == Race.course_id)
.filter(or_(Participant.place == 1, Participant.mar_place == 1))
.filter(extract("year", Race.date) == year)
.filter(Race.class_id == race_class_id)
.order_by(Race.date)
.all())
race_info_by_date = [
(date, list(date_group))
for date, date_group in groupby(race_info, key=itemgetter(7))]
results = []
for date, date_group in race_info_by_date:
(race_id, race_date, course_id, average_lap, fast_lap, winning_time,
laps, starters, points_race, race_class_id, race_class_name,
course_name, course_length_miles) = date_group[0][6:]
winner = None
mar_winner = None
for maybe_winner in date_group:
racer_id, racer_name, team_id, team_name, place, mar_place = maybe_winner[0:6]
if place == 1:
winner = (racer_id, racer_name, team_id, team_name)
if mar_place == 1:
mar_winner = (racer_id, racer_name, team_id, team_name)
avg_lap = (average_lap.total_seconds()) if average_lap else (
(winning_time.total_seconds() / laps)
if (winning_time and laps) else None)
avg_speed = (
course_length_miles / (avg_lap / 3600)
if course_length_miles and avg_lap
else None)
results.insert(0, {
'race_id': race_id,
'date': date,
'course_name': course_name,
'winner': winner,
'mar_winner': mar_winner,
'fast_lap': fast_lap,
'avg_speed': avg_speed,
'starters': starters,
'points_race': points_race})
return render_template(
'results.html',
selected_year=year, selected_race_class_id=race_class_id,
years=years, race_classes=race_classes, results=results)
return render_template('results.html', selected_year=year, years=years)
@main.route('/feedback/', methods=['GET', 'POST'])
def send_feedback():
form = FeedbackForm()
if form.validate_on_submit():
name = form.name.data
replyaddress = form.replyaddress.data
subject = form.subject.data
feedback = form.feedback.data
send_feedback_email(name, replyaddress, subject, feedback)
message = "%s <%s> - %s: %s" % (name, replyaddress, subject, feedback)
token = current_app.config['SLACK_OAUTH_API_TOKEN']
sc = SlackClient(token)
sc.api_call("chat.postMessage", channel="#feedback", text=message,
username="Flask")
flash('Feedback sent!')
return redirect(url_for('main.index'))
return render_template('feedback.html', form=form)
@main.route('/robots.txt')
def serve_static():
return current_app.send_static_file('robots.txt')
@main.route('/favicon.ico')
def serve_favicon():
return current_app.send_static_file('favicon.ico')
| gpl-2.0 | 5,109,266,585,834,278,000 | 38.705036 | 115 | 0.59872 | false |
hasgeek/funnel | migrations/versions/daeb6753652a_add_profile_protected_and_verified_flags.py | 1 | 1186 | """Add profile protected and verified flags.
Revision ID: daeb6753652a
Revises: 8b46a8a8ca17
Create Date: 2020-11-06 02:57:05.891627
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'daeb6753652a'
down_revision = '8b46a8a8ca17'
branch_labels = None
depends_on = None
def upgrade():
op.add_column(
'profile',
sa.Column(
'is_protected',
sa.Boolean(),
nullable=False,
server_default=sa.sql.expression.false(),
),
)
op.alter_column('profile', 'is_protected', server_default=None)
op.add_column(
'profile',
sa.Column(
'is_verified',
sa.Boolean(),
nullable=False,
server_default=sa.sql.expression.false(),
),
)
op.alter_column('profile', 'is_verified', server_default=None)
op.create_index(
op.f('ix_profile_is_verified'), 'profile', ['is_verified'], unique=False
)
def downgrade():
op.drop_index(op.f('ix_profile_is_verified'), table_name='profile')
op.drop_column('profile', 'is_verified')
op.drop_column('profile', 'is_protected')
| agpl-3.0 | -5,126,532,979,559,593,000 | 23.708333 | 80 | 0.607926 | false |
ionata/django-emailmeld | emailmeld/south_migrations/0001_initial.py | 1 | 1566 | # -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'EmailMeldModel'
db.create_table('emailmeld_emailmeldmodel', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('email_type', self.gf('django.db.models.fields.CharField')(default='MARKDOWN', max_length=10)),
('template', self.gf('django.db.models.fields.CharField')(unique=True, max_length=255)),
('subject', self.gf('django.db.models.fields.CharField')(max_length=255)),
('body', self.gf('django.db.models.fields.TextField')()),
))
db.send_create_signal('emailmeld', ['EmailMeldModel'])
def backwards(self, orm):
# Deleting model 'EmailMeldModel'
db.delete_table('emailmeld_emailmeldmodel')
models = {
'emailmeld.emailmeldmodel': {
'Meta': {'object_name': 'EmailMeldModel'},
'body': ('django.db.models.fields.TextField', [], {}),
'email_type': ('django.db.models.fields.CharField', [], {'default': "'MARKDOWN'", 'max_length': '10'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'subject': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'template': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'})
}
}
complete_apps = ['emailmeld'] | bsd-2-clause | 2,746,133,304,663,976,000 | 40.236842 | 115 | 0.597701 | false |
Huyuwei/tvm | topi/python/topi/opengl/softmax.py | 1 | 2054 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# pylint: disable=invalid-name, unused-variable, trailing-whitespace
"""Schedule for softmax operator"""
import tvm
from .. import generic
@generic.schedule_softmax.register(["opengl"])
def schedule_softmax(outs):
"""Schedule for softmax op.
Parameters
----------
outs: Array of Tensor
The computation graph description of reduce in the format
of an array of tensors.
Returns
-------
sch: Schedule
The computation schedule for the op.
"""
outs = [outs] if isinstance(outs, tvm.tensor.Tensor) else outs
s = tvm.create_schedule([x.op for x in outs])
softmax = outs[0]
op_tag = softmax.op.tag
if op_tag == 'softmax_output':
expsum = softmax.op.input_tensors[1]
exp = softmax.op.input_tensors[0]
max_elem = s[exp].op.input_tensors[1]
elif op_tag == 'log_softmax_output':
exp = None
max_elem = softmax.op.input_tensors[1]
expsum = softmax.op.input_tensors[2]
else:
raise ValueError('Tag is expected to be softmax_output or log_softmax_output. \
Got {0}'.format(op_tag))
if exp != None:
s[exp].opengl()
s[max_elem].opengl()
s[expsum].opengl()
s[softmax].opengl()
return s
| apache-2.0 | -7,088,279,253,726,987,000 | 33.233333 | 87 | 0.671373 | false |
pyta-uoft/pyta | nodes/if_exp.py | 1 | 1115 | """
IfExp astroid node
An if statement written in an expression form.
(IfExp node represents an expression, not a statement.)
Attributes:
- test (NodeNG)
- Holds a single node such as Compare to evaluate the truth condition of.
- body (NodeNG)
- A Node representing the suite to be executed when the if expression
evalutes to True.
- orelse (NodeNG)
- The Node representing the suite to be executed when the if expression
evaluates to False.
Example 1:
IfExp(
test=Const(value=True),
body=Const(value=1),
orelse=Const(value=0))
Example 2:
IfExp(
test=Compare(
left=Name(name='eval_expr'),
ops=[['==', Name(name='expected')]]),
body=BinOp(
op='+',
left=Name(name='x'),
right=Name(name='y')),
orelse=Name(name='something'))
Type-checking:
The type of the expression is the same as the type of the body and orelse expressions
(they must have the same type).
"""
# Example 1
1 if True else 0
# Example 2
x + y if eval_expr == expected else something
| gpl-3.0 | 28,458,848,889,594,796 | 24.340909 | 89 | 0.622422 | false |
beiko-lab/gengis | bin/Lib/site-packages/numpy/distutils/tests/test_fcompiler_intel.py | 1 | 1161 | from numpy.testing import *
import numpy.distutils.fcompiler
intel_32bit_version_strings = [
("Intel(R) Fortran Intel(R) 32-bit Compiler Professional for applications"\
"running on Intel(R) 32, Version 11.1", '11.1'),
]
intel_64bit_version_strings = [
("Intel(R) Fortran IA-64 Compiler Professional for applications"\
"running on IA-64, Version 11.0", '11.0'),
("Intel(R) Fortran Intel(R) 64 Compiler Professional for applications"\
"running on Intel(R) 64, Version 11.1", '11.1')
]
class TestIntelFCompilerVersions(TestCase):
def test_32bit_version(self):
fc = numpy.distutils.fcompiler.new_fcompiler(compiler='intel')
for vs, version in intel_32bit_version_strings:
v = fc.version_match(vs)
assert_(v == version)
class TestIntelEM64TFCompilerVersions(TestCase):
def test_64bit_version(self):
fc = numpy.distutils.fcompiler.new_fcompiler(compiler='intelem')
for vs, version in intel_64bit_version_strings:
v = fc.version_match(vs)
assert_(v == version)
if __name__ == '__main__':
run_module_suite()
| gpl-3.0 | 4,528,753,986,483,240,000 | 32.147059 | 79 | 0.641688 | false |
simone-campagna/callerframe | docs/source/conf.py | 1 | 11602 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# callerframe documentation build configuration file, created by
# sphinx-quickstart on Thu Aug 20 13:21:39 2015.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
import shlex
sys.path.append(os.path.join(os.path.dirname(__file__), "..", ".."))
import callerframe
# The full version, including alpha/beta/rc tags.
release = callerframe.__version__
VERSION_INFO = [int(i) for i in release.split(".")]
# The short X.Y version.
version = ".".join(str(i) for i in VERSION_INFO[:2])
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.napoleon',
'sphinx.ext.autosummary',
'sphinx.ext.doctest',
'sphinx.ext.coverage',
'sphinx.ext.viewcode',
'sphinx.ext.intersphinx',
]
napoleon_use_param = True
# Intersphinx python
intersphinx_mapping = {'python': ('https://docs.python.org/3.5', None)}
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = 'callerframe'
copyright = '2015, Simone Campagna'
author = 'Simone Campagna'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = False
# -- Options for HTML output ----------------------------------------------
#html_logo = "img/logo.png"
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
# on_rtd is whether we are on readthedocs.org
on_rtd = os.environ.get('READTHEDOCS', None) == 'True'
if not on_rtd: # only import and set the theme if we're building docs locally
try:
import sphinx_rtd_theme
html_theme = 'sphinx_rtd_theme'
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
except ImportError:
pass
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
#html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Language to be used for generating the HTML full-text search index.
# Sphinx supports the following languages:
# 'da', 'de', 'en', 'es', 'fi', 'fr', 'h', 'it', 'ja'
# 'nl', 'no', 'pt', 'ro', 'r', 'sv', 'tr'
#html_search_language = 'en'
# A dictionary with options for the search language support, empty by default.
# Now only 'ja' uses this config value
#html_search_options = {'type': 'default'}
# The name of a javascript file (relative to the configuration directory) that
# implements a search results scorer. If empty, the default will be used.
#html_search_scorer = 'scorer.js'
# Output file base name for HTML help builder.
htmlhelp_basename = 'callerframedoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
# Latex figure (float) alignment
#'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'callerframe.tex', 'callerframe Documentation',
'Simone Campagna', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'callerframe', 'callerframe Documentation',
[author], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'callerframe', 'callerframe Documentation',
author, 'callerframe', 'Python decorator adding caller frame info to function globals.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False
# -- Options for Epub output ----------------------------------------------
# Bibliographic Dublin Core info.
epub_title = project
epub_author = author
epub_publisher = author
epub_copyright = copyright
# The basename for the epub file. It defaults to the project name.
#epub_basename = project
# The HTML theme for the epub output. Since the default themes are not optimized
# for small screen space, using the same theme for HTML and epub output is
# usually not wise. This defaults to 'epub', a theme designed to save visual
# space.
#epub_theme = 'epub'
# The language of the text. It defaults to the language option
# or 'en' if the language is not set.
#epub_language = ''
# The scheme of the identifier. Typical schemes are ISBN or URL.
#epub_scheme = ''
# The unique identifier of the text. This can be a ISBN number
# or the project homepage.
#epub_identifier = ''
# A unique identification for the text.
#epub_uid = ''
# A tuple containing the cover image and cover page html template filenames.
#epub_cover = ()
# A sequence of (type, uri, title) tuples for the guide element of content.opf.
#epub_guide = ()
# HTML files that should be inserted before the pages created by sphinx.
# The format is a list of tuples containing the path and title.
#epub_pre_files = []
# HTML files shat should be inserted after the pages created by sphinx.
# The format is a list of tuples containing the path and title.
#epub_post_files = []
# A list of files that should not be packed into the epub file.
epub_exclude_files = ['search.html']
# The depth of the table of contents in toc.ncx.
#epub_tocdepth = 3
# Allow duplicate toc entries.
#epub_tocdup = True
# Choose between 'default' and 'includehidden'.
#epub_tocscope = 'default'
# Fix unsupported image types using the Pillow.
#epub_fix_images = False
# Scale large images.
#epub_max_image_width = 0
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#epub_show_urls = 'inline'
# If false, no index is generated.
#epub_use_index = True
#def skip(app, what, name, obj, skip, options):
# if name == "__init__":
# return False
# return skip
#
#def setup(app):
# app.connect("autodoc-skip-member", skip)
| apache-2.0 | -8,275,241,672,940,575,000 | 29.612137 | 91 | 0.701 | false |
UbiCastTeam/touchwizard | touchwizard/canvas.py | 1 | 16655 | # -*- coding: utf-8 -*
import clutter
import gobject
import easyevent
import logging
import os
import time
from touchwizard.loading import LoadingWidget
logger = logging.getLogger('touchwizard')
class Canvas(clutter.Actor, clutter.Container, easyevent.User):
"""Wizard main actor which manages the user interface and pages.
Listen for event:
- next_page (page_name)
Request for a new page identified by its name passed as content.
The current page becomes in top of the page history.
- previous_page
Request for displaying back the top of the page history. No content
expected. If the history is empty, quit the wizard.
- request_quit
Request for quitting the wizard. Call prepare_quit callback
if it exists and there launch the wizard_quit which should
be handled by the user main script.
Launch the event:
- wizard_quit
Sent after prepare_quit callback to notify the main script that it
can end the process.
"""
__gtype_name__ = 'Canvas'
# infobar_height = 104
# iconbar_height = 200
def __init__(self, first_page):
import touchwizard
clutter.Actor.__init__(self)
easyevent.User.__init__(self)
self.session = touchwizard.Session()
self.background = None
self.last_page_name = None
self.last_page_timestamp = None
self.previous_page_locked = False
self.previous_page_timeout_id = None
if touchwizard.canvas_bg:
if not os.path.exists(touchwizard.canvas_bg):
logger.error('Canvas background %s not found.', touchwizard.canvas_bg)
self.background = clutter.Texture(touchwizard.canvas_bg)
self.background.set_parent(self)
self.infobar = touchwizard.InfoBar()
self.infobar.set_parent(self)
self.iconbar = touchwizard.IconBar()
self.iconbar.set_parent(self)
self.loading = LoadingWidget()
self.loading.set_parent(self)
self.loading.hide()
self.loading_padding = 10
self.home_icon = touchwizard.Icon('shutdown')
self.home_icon.build()
self.previous_icon = touchwizard.IconRef(touchwizard.Icon('previous'))
# self.previous_icon.build()
easyevent.forward_event('icon_previous_actioned', 'previous_page')
self.history = list()
self.first_page = first_page
self.available_pages = dict()
self.current_page = None
self.register_event('next_page', 'previous_page', 'refresh_page', 'clear_history')
self.register_event('request_quit')
gobject.idle_add(self.lookup_pages)
gobject.idle_add(self.display_page, first_page)
def lookup_pages(self):
import touchwizard
origin = ''
path = touchwizard.page_path
if path is None:
if self.first_page is None:
return tuple()
self.available_pages[self.first_page.name] = self.first_page
import sys
origin = sys.modules[self.first_page.__module__].__file__
path = os.path.dirname(os.path.abspath(os.path.expanduser(origin)))
import imp
for f in os.listdir(path):
if f.endswith('.py') and f != os.path.basename(origin):
try:
module = imp.load_source(f[:-3], os.path.join(path, f))
except:
import traceback
logger.error('Cannot import page %s:\n%s', f[:-3], traceback.format_exc())
if not touchwizard.tolerant_to_page_import_error:
import sys
sys.exit(1)
continue
for attr_name in dir(module):
if attr_name.startswith('__'):
continue
attribute = getattr(module, attr_name)
if isinstance(attribute, type) \
and issubclass(attribute, touchwizard.Page) \
and attribute is not touchwizard.Page:
self.available_pages[attribute.name] = attribute
logger.info('%d pages found.', len(self.available_pages))
# print self.available_pages
def display_page(self, page, icons=None):
if isinstance(page, type):
self.current_page = page(self.session)
if self.current_page.reuse:
logger.info('Storing reusable page %s in cache.', self.current_page.name)
self.available_pages[self.current_page.name] = self.current_page
else:
self.current_page = page
logger.info('Reusing already instanciated page %s from cache.', self.current_page.name)
os.environ["TOUCHWIZARD_CURRENT_PAGE"] = self.current_page.name
os.environ.pop("TOUCHWIZARD_REQUESTED_PAGE", None)
if page.need_loading:
self.loading.hide()
self._build_iconbar(icons)
self.current_page.panel.set_parent(self)
self.current_page.panel.lower_bottom()
if hasattr(self.current_page.panel, 'prepare') and callable(self.current_page.panel.prepare):
self.current_page.panel.prepare()
self.current_page.panel.show()
self.previous_page_locked = False
self.last_page_name = page.name
def _build_iconbar(self, icons):
import touchwizard
self.iconbar.clear()
if icons is not None:
# cached icons
previous_icon = icons[0]
next_icon = icons[-1]
icons = icons[1:-1]
else:
# uninstanciated icons
icons = self.current_page.icons
previous_icon = self.current_page.previous
next_icon = self.current_page.next
# Icon "previous"
self.home_icon.unregister_all_events()
if previous_icon is None:
if self.history:
last_page, last_icons = self.history[-1]
previous_icon = last_page.my_icon
if previous_icon is None:
previous_icon = self.previous_icon
else:
self.home_icon.register_events()
previous_icon = self.home_icon
condition = True
if isinstance(previous_icon, touchwizard.IconRef):
if callable(previous_icon.condition):
condition = previous_icon.condition()
else:
condition = previous_icon.condition
previous_icon = previous_icon.get_icon()
if condition:
previous_icon.build()
self.iconbar.set_previous(previous_icon)
# Icon "next"
condition = True
if next_icon is not None:
if isinstance(next_icon, touchwizard.IconRef):
if callable(next_icon.condition):
condition = next_icon.condition()
else:
condition = next_icon.condition
next_icon = next_icon.get_icon()
if condition:
next_icon.build()
self.iconbar.set_next(next_icon)
# Other icons
for icon in icons:
if isinstance(icon, touchwizard.IconRef):
if callable(icon.condition):
condition = icon.condition()
else:
condition = icon.condition
if not condition:
continue
icon = icon.get_icon()
icon.build()
self.iconbar.append(icon)
def evt_next_page(self, event):
if self.last_page_name is None or self.last_page_name != event.content:
gobject.timeout_add(100, self.do_next_page, event, priority=gobject.PRIORITY_HIGH)
self.unregister_event('next_page')
def do_next_page(self, event):
now = time.time()
name = event.content
if not self.last_page_timestamp or (now - self.last_page_timestamp) > 0.5:
logger.info('Page %r requested.', name)
os.environ["TOUCHWIZARD_REQUESTED_PAGE"] = name
self.current_page.panel.hide()
self.current_page.panel.unparent()
icon_states = self.iconbar.get_icon_states()
self.history.append((self.current_page, icon_states))
new_page = self.available_pages[name]
self.iconbar.clear(keep_back=True)
if new_page.need_loading:
self.loading.show()
gobject.idle_add(self.display_page, new_page)
else:
logger.warning('Page %s requested too quickly twice in a row (less than 500ms), not displaying', name)
self.register_event('next_page')
self.last_page_timestamp = now
def evt_previous_page(self, event):
if not self.previous_page_locked:
self.previous_page_locked = True
if self.previous_page_timeout_id is not None:
gobject.source_remove(self.previous_page_timeout_id)
self.previous_page_timeout_id = gobject.timeout_add(300, self.do_previous_page, event, priority=gobject.PRIORITY_HIGH)
def do_previous_page(self, event):
name = None
if event.content:
name = event.content
for page, icons in self.history[::-1]:
try:
previous, icons = self.history.pop()
except IndexError:
# logger.error('Previous page requested but history is empty.')
self.evt_request_quit(event)
return
logger.info('Back to %r page.', previous.name)
os.environ["TOUCHWIZARD_REQUESTED_PAGE"] = previous.name
self.current_page.panel.hide()
gobject.idle_add(self.current_page.panel.unparent)
if previous.need_loading:
self.loading.show()
if not self.current_page.reuse:
gobject.idle_add(self.current_page.panel.destroy)
if name is None or page.name == name:
break
self.current_page = page
gobject.idle_add(self.display_page, previous, icons)
def evt_refresh_page(self, event):
gobject.idle_add(self.do_refresh_page, event)
self.unregister_event('refresh_page')
def do_refresh_page(self, event):
name = self.current_page.name
logger.info('Page %r refresh requested.', name)
self.current_page.panel.hide()
self.current_page.panel.unparent()
gobject.idle_add(self.current_page.panel.destroy)
new_page = self.available_pages[name]
self.iconbar.clear(keep_back=True)
if new_page.need_loading:
self.loading.show()
gobject.idle_add(self.display_page, new_page)
self.register_event('refresh_page')
def evt_clear_history(self, event):
for page, icons in self.history:
gobject.idle_add(page.panel.destroy)
self.history = list()
def evt_request_quit(self, event):
self.evt_request_quit = self.evt_request_quit_fake
logger.info('Quit requested.')
try:
prepare_quit = getattr(self.current_page, "prepare_quit", None)
if prepare_quit:
if not callable(prepare_quit):
prepare_quit = getattr(self.current_page.panel, prepare_quit, None)
if callable(prepare_quit):
logger.info('prepare_quit callback found')
prepare_quit()
except Exception, e:
logger.warning("Failed to call prepare_quit method in page %s: %s", self.current_page, e)
self.launch_event('wizard_quit')
def evt_request_quit_fake(self, event):
logger.error('Quit request rejected.')
def evt_request_session(self, event):
self.launch_event('dispatch_session', self.session)
def evt_update_session(self, event):
self.session.update(event)
self.launch_event('dispatch_session', self.session)
def do_remove(self, actor):
logger.info.debug('Panel "%s" removed.', actor.__name__)
def do_get_preferred_width(self, for_height):
import touchwizard
width = float(touchwizard.canvas_width)
return width, width
def do_get_preferred_height(self, for_width):
import touchwizard
height = float(touchwizard.canvas_height)
return height, height
def do_allocate(self, box, flags):
canvas_width = box.x2 - box.x1
canvas_height = box.y2 - box.y1
infobar_height = round(self.infobar.get_preferred_height(canvas_width)[1])
infobar_box = clutter.ActorBox()
infobar_box.x1 = 0
infobar_box.y1 = 0
infobar_box.x2 = canvas_width
infobar_box.y2 = infobar_height
self.infobar.allocate(infobar_box, flags)
iconbar_height = round(self.iconbar.get_preferred_height(canvas_width)[1])
iconbar_box = clutter.ActorBox()
iconbar_box.x1 = 0
iconbar_box.y1 = canvas_height - iconbar_height
iconbar_box.x2 = canvas_width
iconbar_box.y2 = canvas_height
self.iconbar.allocate(iconbar_box, flags)
loading_box = clutter.ActorBox()
loading_box.x1 = self.loading_padding
loading_box.y1 = infobar_height + self.loading_padding
loading_box.x2 = canvas_width - self.loading_padding
loading_box.y2 = canvas_height - iconbar_height - self.loading_padding
self.loading.allocate(loading_box, flags)
panel_box = clutter.ActorBox()
panel_box.x1 = 0
panel_box.y1 = infobar_height
panel_box.x2 = canvas_width
panel_box.y2 = canvas_height - iconbar_height
if self.background is not None:
self.background.allocate(panel_box, flags)
if self.current_page is not None:
self.current_page.panel.allocate(panel_box, flags)
clutter.Actor.do_allocate(self, box, flags)
def do_foreach(self, func, data=None):
children = [self.infobar, self.iconbar, self.loading]
if self.background:
children.append(self.background)
if self.current_page:
children.append(self.current_page.panel)
for child in children:
func(child, data)
def do_paint(self):
if self.background:
self.background.paint()
self.iconbar.paint()
if self.current_page:
self.current_page.panel.paint()
self.infobar.paint()
self.loading.paint()
def do_pick(self, color):
self.do_paint()
def quick_launch(page, width=None, height=None, overlay=None, main_loop_run_cb=None, main_loop_stop_cb=None):
if not logging._handlers:
# Install a default log handler if none set
import sys
logging.basicConfig(level=logging.DEBUG,
format='%(asctime)s %(name)-12s %(levelname)s %(message)s',
stream=sys.stderr)
logger.info('Initializing touchwizard app.')
import touchwizard
stage = clutter.Stage()
if width == None and height == None:
width = touchwizard.canvas_width
height = touchwizard.canvas_height
else:
touchwizard.canvas_width = width
touchwizard.canvas_height = height
stage.set_size(width, height)
if page is not None:
stage.set_title(page.title)
canvas = Canvas(page)
stage.add(canvas)
if overlay is not None:
logger.info('Adding overlay %s', overlay)
stage.add(overlay)
overlay.show()
stage.show()
main_loop_name = 'External'
if main_loop_run_cb is None:
main_loop_run_cb = clutter.main
main_loop_name = 'Clutter'
if main_loop_stop_cb is None:
main_loop_stop_cb = clutter.main_quit
def quit(*args):
logger.info('Quitting %s main loop by stage destroy', main_loop_name)
main_loop_stop_cb()
import sys
gobject.timeout_add_seconds(2, sys.exit)
stage.connect('destroy', quit)
class Quitter(easyevent.Listener):
def __init__(self):
easyevent.Listener.__init__(self)
self.register_event('wizard_quit')
def evt_wizard_quit(self, event):
logging.info('Quitting %s main loop by touchwizard button', main_loop_name)
main_loop_stop_cb()
import sys
gobject.timeout_add_seconds(2, sys.exit)
Quitter()
logger.info('Running %s main loop.', main_loop_name)
main_loop_run_cb()
if __name__ == '__main__':
quick_launch(None)
| gpl-3.0 | 398,874,673,450,197,570 | 36.093541 | 130 | 0.593215 | false |
Solomoriah/gdmodule | demo/gddemo.py | 1 | 1024 | #!/usr/bin/env python
import gd, os, cStringIO, urllib2
os.environ["GDFONTPATH"] = "."
FONT = "Pacifico"
def simple():
im = gd.image((200, 200))
white = im.colorAllocate((255, 255, 255))
black = im.colorAllocate((0, 0, 0))
red = im.colorAllocate((255, 0, 0))
blue = im.colorAllocate((0, 0, 255))
im.colorTransparent(white)
im.interlace(1)
im.rectangle((0,0),(199,199),black)
im.arc((100,100),(195,175),0,360,blue)
im.fill((100,100),red)
print im.get_bounding_rect(FONT, 12.0, 0.0, (10, 100), "Hello Python")
im.string_ttf(FONT, 20.0, 0.0, (10, 100), "Hello Python", black)
f=open("xx.png","w")
im.writePng(f)
f.close()
f=open("xx.jpg", "w")
im.writeJpeg(f,100)
f.close()
f=cStringIO.StringIO()
im.writePng(f)
print "PNG size:", len(f.getvalue())
f.close()
f = urllib2.urlopen("http://www.gnu.org/graphics/gnu-head-sm.jpg")
im = gd.image(f, "jpg")
f.close()
print "GNU Image Size:", im.size()
simple()
| bsd-3-clause | 8,312,121,099,719,976,000 | 20.787234 | 74 | 0.583008 | false |
mbreese/tabql | tabql/__init__.py | 1 | 4777 | # Copyright (c) 2014, Marcus Breese
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without modification,
# are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright notice, this
# list of conditions and the following disclaimer in the documentation and/or
# other materials provided with the distribution.
#
# * Neither the names of the authors nor contributors may not be used to endorse or
# promote products derived from this software without specific prior written
# permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
# ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
# ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import sys
import os
import sqlite3
import tempfile
import tab
class TabQL(object):
def __init__(self, fnames, dbfname=None, noheader=False, headercomment=False, tmpdir=None, verbose=False):
self.fnames = fnames
self.noheader = noheader
self.headercomment = headercomment
self.verbose = verbose
if tmpdir == None:
if 'TMPDIR' in os.environ:
self.tmpdir = os.environ['TMPDIR']
elif 'TMP' in os.environ:
self.tmpdir = os.environ['TMP']
else:
self.tmpdir = '/tmp'
else:
self.tmpdir = tmpdir
if dbfname:
self.dbfname = dbfname
self._istmpdb = False
else:
tmp = tempfile.NamedTemporaryFile(prefix='.tmp', suffix='.db', dir=tmpdir)
self.dbfname = tmp.name
tmp.close()
self._istmpdb = True
self.__log('Using SQLite database: %s' % self.dbfname)
self.conn = sqlite3.connect(self.dbfname)
self.__setup()
def __log(self, msg):
if self.verbose:
sys.stderr.write('%s\n' % msg)
sys.stderr.flush()
def __setup(self):
for i, (file_type, tablename, fname) in enumerate(self.fnames):
self.__log('Importing table %s from %s' % (tablename, fname))
if file_type == '-tab':
reader = tab.TabReader(fname, noheader=self.noheader, headercomment=self.headercomment)
coldefs = ["'%s' %s" % (x,y) for x,y in zip(reader.headers, reader.coltypes)]
schema = 'CREATE TABLE %s (%s);' % (tablename, ','.join(coldefs))
if self.verbose:
sys.stderr.write('%s\n' % schema)
self.conn.execute(schema)
self.conn.commit()
buffer = []
sql = 'INSERT INTO %s (%s) VALUES (%s)' % (tablename, ','.join(["'%s'" % x for x in reader.headers]), ','.join(['?',] * len(reader.headers)))
i=0
for cols in reader.get_values():
i += 1
buffer.append(cols)
if len(buffer) > 1000:
self.conn.executemany(sql, buffer)
self.conn.commit()
buffer = []
if buffer:
self.conn.executemany(sql, buffer)
self.conn.commit()
self.__log('%s rows imported' % i)
def close(self):
if self._istmpdb:
self.__log('Removing SQLite database: %s' % self.dbfname)
os.unlink(self.dbfname)
def execute(self, query, args=()):
if not self.conn:
self.conn = sqlite3.connect(self.dbfname)
c = self.conn.cursor()
self.__log('Query: %s' % query)
try:
colnames = None
for row in c.execute(query, args):
if not colnames:
colnames = [x[0] for x in c.description]
yield (colnames, row)
except sqlite3.OperationalError, e:
sys.stderr.write('SQL Error: %s\n' % e.message)
return
c.close()
self.conn.close()
| bsd-3-clause | 3,301,811,955,980,845,600 | 35.465649 | 157 | 0.589491 | false |
jwlin/web-crawler-tutorial | ch4/google_finance_api.py | 1 | 2251 | import requests
import json
from datetime import datetime, timedelta
def get_stock(query):
# query 可以是多支股票, 如 TPE:2330,TPE:2498, 不同股票以 , 分開
resp = requests.get('http://finance.google.com/finance/info?client=ig&q=' + query)
if resp.status_code == 200:
# 移除回傳資料開頭的 //
# 剩下的資料是一個 list of dict, 每個 dict 是一支股票的資訊
return json.loads(resp.text.replace('//', ''))
else:
return None
def get_stock_history(stock_id, stock_mkt):
resp = requests.get('http://www.google.com/finance/getprices?q=' + stock_id + '&x=' + stock_mkt + '&i=86400&p=1M')
''' e.g.,
EXCHANGE%3DTPE
MARKET_OPEN_MINUTE=540
MARKET_CLOSE_MINUTE=810
INTERVAL=86400
COLUMNS=DATE,CLOSE,HIGH,LOW,OPEN,VOLUME
DATA=
TIMEZONE_OFFSET=480
a1488346200,186,188.5,186,188.5,46176000
1,186,188.5,185,188,39914000
2,184,185,184,184.5,28085000
5,183.5,184.5,183.5,184,12527000
...
'''
index = -1
lines = resp.text.split('\n')
for line in lines:
# 'a' 開頭表示股價資訊起始列
if line.startswith('a'):
index = lines.index(line)
break
if index > 0:
lines = lines[index:]
# 找出起始行日期
unix_time = int(lines[0].split(',')[0][1:])
init_time = datetime.fromtimestamp(unix_time)
rows = list()
# 處理第一列
first_row = lines[0].split(',')
first_row[0] = init_time
rows.append(first_row)
# 處理剩餘列
for l in lines[1:]:
if l:
row = l.split(',')
delta = int(row[0])
row[0] = init_time + timedelta(days=delta)
rows.append(row)
return rows
else:
return None
if __name__ == '__main__':
query = 'TPE:2330'
print(query, '即時股價')
stocks = get_stock(query)
print(stocks[0])
print('-----')
stock_id, stock_mkt = '2330', 'TPE'
print(stock_mkt, stock_id, '歷史股價 (Date, Close, High, Low, Open, Volume)')
rows = get_stock_history('2330', 'TPE')
for row in rows:
print(row[0].strftime("%Y/%m/%d"), row[1:])
| mit | -177,420,440,146,556,900 | 28.097222 | 118 | 0.556086 | false |
jodogne/OrthancMirror | OrthancServer/Resources/Samples/Python/ArchiveStudiesInTimeRange.py | 1 | 3416 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# Orthanc - A Lightweight, RESTful DICOM Store
# Copyright (C) 2012-2016 Sebastien Jodogne, Medical Physics
# Department, University Hospital of Liege, Belgium
# Copyright (C) 2017-2021 Osimis S.A., Belgium
#
# This program is free software: you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import os
import os.path
import sys
import RestToolbox
def PrintHelp():
print('Download ZIP archives for all the studies generated '
'during a given time range (according to the StudyDate tag)\n')
print('Usage: %s <URL> <StartDate> <EndDate> <TargetFolder>\n' % sys.argv[0])
print('Example: %s http://127.0.0.1:8042/ 20150101 20151231 /tmp/\n' % sys.argv[0])
exit(-1)
def CheckIsDate(date):
if len(date) != 8 or not date.isdigit():
print '"%s" is not a valid date!\n' % date
exit(-1)
if len(sys.argv) != 5:
PrintHelp()
URL = sys.argv[1]
START = sys.argv[2]
END = sys.argv[3]
TARGET = sys.argv[4]
CheckIsDate(START)
CheckIsDate(END)
def GetTag(tags, key):
if key in tags:
return tags[key]
else:
return 'No%s' % key
# Loop over the studies
for studyId in RestToolbox.DoGet('%s/studies' % URL):
# Retrieve the DICOM tags of the current study
study = RestToolbox.DoGet('%s/studies/%s' % (URL, studyId))['MainDicomTags']
# Retrieve the DICOM tags of the parent patient of this study
# Case 1: Baseline version
patient = RestToolbox.DoGet('%s/studies/%s/patient' % (URL, studyId))['MainDicomTags']
# Case 2: Tweaked version that can be used if several patients
# share the same "Patient ID", but have different "Patient Name"
# (which is invalid according to the DICOM standard).
# https://groups.google.com/d/msg/orthanc-users/58AxIkxFbZs/N6Knub8MAgAJ
# patient = RestToolbox.DoGet('%s/studies/%s' % (URL, studyId)) ['PatientMainDicomTags']
# Check that the StudyDate tag lies within the given range
studyDate = study['StudyDate'][:8]
if studyDate >= START and studyDate <= END:
# Create a filename
filename = '%s - %s %s - %s.zip' % (GetTag(study, 'StudyDate'),
GetTag(patient, 'PatientID'),
GetTag(patient, 'PatientName'),
GetTag(study, 'StudyDescription'))
# Remove any non-ASCII character in the filename
filename = filename.encode('ascii', errors = 'replace').translate(None, r"'\/:*?\"<>|!=").strip()
# Download the ZIP archive of the study
print('Downloading %s' % filename)
zipContent = RestToolbox.DoGet('%s/studies/%s/archive' % (URL, studyId))
# Write the ZIP archive at the proper location
with open(os.path.join(TARGET, filename), 'wb') as f:
f.write(zipContent)
| gpl-3.0 | -516,811,386,712,827,460 | 35.731183 | 105 | 0.650761 | false |
mjn19172/Savu | savu/plugins/denoise_bregman_filter.py | 1 | 2211 | # Copyright 2014 Diamond Light Source Ltd.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
.. module:: denoise using the split bregman method
:platform: Unix
:synopsis: A plugin to denoise 2D slices of data by using the Split Bregman
to solve the Total Variation ROF model
.. moduleauthor:: Imanol Luengo <scientificsoftware@diamond.ac.uk>
"""
import logging
from skimage.restoration import denoise_tv_bregman
from savu.plugins.filter import Filter
from savu.plugins.driver.cpu_plugin import CpuPlugin
from savu.plugins.utils import register_plugin
@register_plugin
class DenoiseBregmanFilter(Filter, CpuPlugin):
"""
Split Bregman method for solving the denoising Total Variation ROF model.
:param weight: Denoising factor. Default: 2.0
:param max_iterations: Total number of iterations. Default: 100.
:param error_threshold: Convergence threshold. Default: 0.001.
:param isotropic: Isotropic or Anisotropic filtering. Default: False.
"""
def __init__(self):
logging.debug("Starting Denoise Bregman Filter")
super(DenoiseBregmanFilter, self).__init__("DenoiseBregmanFilter")
def get_filter_padding(self):
return {}
def get_max_frames(self):
return 8
def filter_frame(self, data, params):
data = data[0]
logging.debug("Running Denoise")
weight = self.parameters['weight']
max_iter = self.parameters['max_iterations']
eps = self.parameters['error_threshold']
isotropic = self.parameters['isotropic']
return denoise_tv_bregman(data[0, ...], weight, max_iter=max_iter,
eps=eps, isotropic=isotropic)
| apache-2.0 | 4,520,722,341,952,091,600 | 34.111111 | 78 | 0.699231 | false |
ColumbiaCMB/kid_readout | apps/data_taking_scripts/2017-05-jpl-lf-n1-optical/single-horn/sweep_and_stream_bb_4.py | 1 | 3439 |
from kid_readout.interactive import *
from kid_readout.equipment import hardware
from kid_readout.measurement import acquire
from kid_readout.roach import analog
from kid_readout.equipment import agilent_33220
import time
fg = agilent_33220.FunctionGenerator(addr=('192.168.1.135', 5025))
fg.set_load_ohms(1000)
fg.set_dc_voltage(0)
fg.enable_output(False)
ri = Roach2Baseband()
ri.set_modulation_output('high')
initial_f0s = np.load('/data/readout/resonances/2017-06-JPL-8x8-LF-N1_single_horn_4.npy')/1e6
nf = len(initial_f0s)
atonce = 4
if nf % atonce > 0:
print "extending list of resonators to make a multiple of ", atonce
initial_f0s = np.concatenate((initial_f0s, np.arange(1, 1 + atonce - (nf % atonce)) + initial_f0s.max()))
print len(initial_f0s)
nsamp = 2**20
offsets = np.arange(-16,16)*512./nsamp
last_f0s = initial_f0s
for heater_voltage in np.sqrt(np.linspace(0,4**2,16)):
fg.set_dc_voltage(heater_voltage)
if heater_voltage == 0:
print "heater voltage is 0 V, skipping wait"
else:
print "waiting 20 minutes", heater_voltage
time.sleep(1200)
fg.enable_output(True)
for dac_atten in [35]:
ri.set_dac_atten(dac_atten)
tic = time.time()
ncf = new_nc_file(suffix='%d_dB_load_heater_%.3f_V' % (dac_atten, heater_voltage))
swpa = acquire.run_sweep(ri, tone_banks=last_f0s[None,:] + offsets[:,None], num_tone_samples=nsamp,
length_seconds=0, verbose=True,
description='bb sweep')
print "resonance sweep done", (time.time()-tic)/60.
ncf.write(swpa)
current_f0s = []
for sidx in range(last_f0s.shape[0]):
swp = swpa.sweep(sidx)
res = swp.resonator
print res.f_0, res.Q, res.current_result.redchi, (last_f0s[sidx]*1e6-res.f_0)
if np.abs(res.f_0 - last_f0s[sidx]*1e6) > 200e3:
current_f0s.append(last_f0s[sidx]*1e6)
print "using original frequency for ",last_f0s[sidx]
else:
current_f0s.append(res.f_0)
print "fits complete", (time.time()-tic)/60.
current_f0s = np.array(current_f0s)/1e6
current_f0s.sort()
if np.any(np.diff(current_f0s)<0.031):
print "problematic resonator collision:",current_f0s
print "deltas:",np.diff(current_f0s)
problems = np.flatnonzero(np.diff(current_f0s)<0.031)+1
current_f0s[problems] = (current_f0s[problems-1] + current_f0s[problems+1])/2.0
if np.any(np.diff(current_f0s)<0.031):
print "repeated problematic resonator collision:",current_f0s
print "deltas:",np.diff(current_f0s)
problems = np.flatnonzero(np.diff(current_f0s)<0.031)+1
current_f0s[problems] = (current_f0s[problems-1] + current_f0s[problems+1])/2.0
ri.set_tone_freqs(current_f0s,nsamp)
ri.select_fft_bins(range(last_f0s.shape[0]))
last_f0s = current_f0s
raw_input("turn off compressor")
meas = ri.get_measurement(num_seconds=30.,description='stream with bb')
raw_input("turn on compressor")
ncf.write(meas)
print "dac_atten %f heater voltage %.3f V done in %.1f minutes" % (dac_atten, heater_voltage, (time.time()-tic)/60.)
ncf.close()
raw_input("check sweeps fit before going to next voltage step")
ri.set_dac_atten(20)
| bsd-2-clause | 2,862,236,436,444,174,300 | 39.458824 | 124 | 0.624891 | false |
slackapi/python-slackclient | slack_sdk/web/legacy_base_client.py | 1 | 22083 | """A Python module for interacting with Slack's Web API."""
import asyncio
import copy
import hashlib
import hmac
import io
import json
import logging
import mimetypes
import urllib
import uuid
import warnings
from http.client import HTTPResponse
from ssl import SSLContext
from typing import BinaryIO, Dict, List
from typing import Optional, Union
from urllib.error import HTTPError
from urllib.parse import urlencode
from urllib.request import Request, urlopen, OpenerDirector, ProxyHandler, HTTPSHandler
import aiohttp
from aiohttp import FormData, BasicAuth
import slack_sdk.errors as err
from slack_sdk.errors import SlackRequestError
from .async_internal_utils import _files_to_data, _get_event_loop, _request_with_session
from .deprecation import show_2020_01_deprecation
from .internal_utils import (
convert_bool_to_0_or_1,
get_user_agent,
_get_url,
_build_req_args,
)
from .legacy_slack_response import LegacySlackResponse as SlackResponse
class LegacyBaseClient:
BASE_URL = "https://www.slack.com/api/"
def __init__(
self,
token: Optional[str] = None,
base_url: str = BASE_URL,
timeout: int = 30,
loop: Optional[asyncio.AbstractEventLoop] = None,
ssl: Optional[SSLContext] = None,
proxy: Optional[str] = None,
run_async: bool = False,
use_sync_aiohttp: bool = False,
session: Optional[aiohttp.ClientSession] = None,
headers: Optional[dict] = None,
user_agent_prefix: Optional[str] = None,
user_agent_suffix: Optional[str] = None,
):
self.token = None if token is None else token.strip()
self.base_url = base_url
self.timeout = timeout
self.ssl = ssl
self.proxy = proxy
self.run_async = run_async
self.use_sync_aiohttp = use_sync_aiohttp
self.session = session
self.headers = headers or {}
self.headers["User-Agent"] = get_user_agent(
user_agent_prefix, user_agent_suffix
)
self._logger = logging.getLogger(__name__)
self._event_loop = loop
def api_call( # skipcq: PYL-R1710
self,
api_method: str,
*,
http_verb: str = "POST",
files: dict = None,
data: Union[dict, FormData] = None,
params: dict = None,
json: dict = None, # skipcq: PYL-W0621
headers: dict = None,
auth: dict = None,
) -> Union[asyncio.Future, SlackResponse]:
"""Create a request and execute the API call to Slack.
Args:
api_method (str): The target Slack API method.
e.g. 'chat.postMessage'
http_verb (str): HTTP Verb. e.g. 'POST'
files (dict): Files to multipart upload.
e.g. {image OR file: file_object OR file_path}
data: The body to attach to the request. If a dictionary is
provided, form-encoding will take place.
e.g. {'key1': 'value1', 'key2': 'value2'}
params (dict): The URL parameters to append to the URL.
e.g. {'key1': 'value1', 'key2': 'value2'}
json (dict): JSON for the body to attach to the request
(if files or data is not specified).
e.g. {'key1': 'value1', 'key2': 'value2'}
headers (dict): Additional request headers
auth (dict): A dictionary that consists of client_id and client_secret
Returns:
(SlackResponse)
The server's response to an HTTP request. Data
from the response can be accessed like a dict.
If the response included 'next_cursor' it can
be iterated on to execute subsequent requests.
Raises:
SlackApiError: The following Slack API call failed:
'chat.postMessage'.
SlackRequestError: Json data can only be submitted as
POST requests.
"""
api_url = _get_url(self.base_url, api_method)
if isinstance(auth, dict):
auth = BasicAuth(auth["client_id"], auth["client_secret"])
elif isinstance(auth, BasicAuth):
headers["Authorization"] = auth.encode()
headers = headers or {}
headers.update(self.headers)
req_args = _build_req_args(
token=self.token,
http_verb=http_verb,
files=files,
data=data,
params=params,
json=json, # skipcq: PYL-W0621
headers=headers,
auth=auth,
ssl=self.ssl,
proxy=self.proxy,
)
show_2020_01_deprecation(api_method)
if self.run_async or self.use_sync_aiohttp:
if self._event_loop is None:
self._event_loop = _get_event_loop()
future = asyncio.ensure_future(
self._send(http_verb=http_verb, api_url=api_url, req_args=req_args),
loop=self._event_loop,
)
if self.run_async:
return future
if self.use_sync_aiohttp:
# Using this is no longer recommended - just keep this for backward-compatibility
return self._event_loop.run_until_complete(future)
else:
return self._sync_send(api_url=api_url, req_args=req_args)
# =================================================================
# aiohttp based async WebClient
# =================================================================
async def _send(
self, http_verb: str, api_url: str, req_args: dict
) -> SlackResponse:
"""Sends the request out for transmission.
Args:
http_verb (str): The HTTP verb. e.g. 'GET' or 'POST'.
api_url (str): The Slack API url. e.g. 'https://slack.com/api/chat.postMessage'
req_args (dict): The request arguments to be attached to the request.
e.g.
{
json: {
'attachments': [{"pretext": "pre-hello", "text": "text-world"}],
'channel': '#random'
}
}
Returns:
The response parsed into a SlackResponse object.
"""
open_files = _files_to_data(req_args)
try:
if "params" in req_args:
# True/False -> "1"/"0"
req_args["params"] = convert_bool_to_0_or_1(req_args["params"])
res = await self._request(
http_verb=http_verb, api_url=api_url, req_args=req_args
)
finally:
for f in open_files:
f.close()
data = {
"client": self,
"http_verb": http_verb,
"api_url": api_url,
"req_args": req_args,
"use_sync_aiohttp": self.use_sync_aiohttp,
}
return SlackResponse(**{**data, **res}).validate()
async def _request(self, *, http_verb, api_url, req_args) -> Dict[str, any]:
"""Submit the HTTP request with the running session or a new session.
Returns:
A dictionary of the response data.
"""
return await _request_with_session(
current_session=self.session,
timeout=self.timeout,
logger=self._logger,
http_verb=http_verb,
api_url=api_url,
req_args=req_args,
)
# =================================================================
# urllib based WebClient
# =================================================================
def _sync_send(self, api_url, req_args) -> SlackResponse:
params = req_args["params"] if "params" in req_args else None
data = req_args["data"] if "data" in req_args else None
files = req_args["files"] if "files" in req_args else None
_json = req_args["json"] if "json" in req_args else None
headers = req_args["headers"] if "headers" in req_args else None
token = params.get("token") if params and "token" in params else None
auth = (
req_args["auth"] if "auth" in req_args else None
) # Basic Auth for oauth.v2.access / oauth.access
if auth is not None:
if isinstance(auth, BasicAuth):
headers["Authorization"] = auth.encode()
elif isinstance(auth, str):
headers["Authorization"] = auth
else:
self._logger.warning(
f"As the auth: {auth}: {type(auth)} is unsupported, skipped"
)
body_params = {}
if params:
body_params.update(params)
if data:
body_params.update(data)
return self._urllib_api_call(
token=token,
url=api_url,
query_params={},
body_params=body_params,
files=files,
json_body=_json,
additional_headers=headers,
)
def _request_for_pagination(self, api_url, req_args) -> Dict[str, any]:
"""This method is supposed to be used only for SlackResponse pagination
You can paginate using Python's for iterator as below:
for response in client.conversations_list(limit=100):
# do something with each response here
"""
response = self._perform_urllib_http_request(url=api_url, args=req_args)
return {
"status_code": int(response["status"]),
"headers": dict(response["headers"]),
"data": json.loads(response["body"]),
}
def _urllib_api_call(
self,
*,
token: str = None,
url: str,
query_params: Dict[str, str] = {},
json_body: Dict = {},
body_params: Dict[str, str] = {},
files: Dict[str, io.BytesIO] = {},
additional_headers: Dict[str, str] = {},
) -> SlackResponse:
"""Performs a Slack API request and returns the result.
:param token: Slack API Token (either bot token or user token)
:param url: a complete URL (e.g., https://www.slack.com/api/chat.postMessage)
:param query_params: query string
:param json_body: json data structure (it's still a dict at this point),
if you give this argument, body_params and files will be skipped
:param body_params: form params
:param files: files to upload
:param additional_headers: request headers to append
:return: API response
"""
files_to_close: List[BinaryIO] = []
try:
# True/False -> "1"/"0"
query_params = convert_bool_to_0_or_1(query_params)
body_params = convert_bool_to_0_or_1(body_params)
if self._logger.level <= logging.DEBUG:
def convert_params(values: dict) -> dict:
if not values or not isinstance(values, dict):
return {}
return {
k: ("(bytes)" if isinstance(v, bytes) else v)
for k, v in values.items()
}
headers = {
k: "(redacted)" if k.lower() == "authorization" else v
for k, v in additional_headers.items()
}
self._logger.debug(
f"Sending a request - url: {url}, "
f"query_params: {convert_params(query_params)}, "
f"body_params: {convert_params(body_params)}, "
f"files: {convert_params(files)}, "
f"json_body: {json_body}, "
f"headers: {headers}"
)
request_data = {}
if files is not None and isinstance(files, dict) and len(files) > 0:
if body_params:
for k, v in body_params.items():
request_data.update({k: v})
for k, v in files.items():
if isinstance(v, str):
f: BinaryIO = open(v.encode("utf-8", "ignore"), "rb")
files_to_close.append(f)
request_data.update({k: f})
elif isinstance(v, (bytearray, bytes)):
request_data.update({k: io.BytesIO(v)})
else:
request_data.update({k: v})
request_headers = self._build_urllib_request_headers(
token=token or self.token,
has_json=json is not None,
has_files=files is not None,
additional_headers=additional_headers,
)
request_args = {
"headers": request_headers,
"data": request_data,
"params": body_params,
"files": files,
"json": json_body,
}
if query_params:
q = urlencode(query_params)
url = f"{url}&{q}" if "?" in url else f"{url}?{q}"
response = self._perform_urllib_http_request(url=url, args=request_args)
if response.get("body", None): # skipcq: PTC-W0039
try:
response_body_data: dict = json.loads(response["body"])
except json.decoder.JSONDecodeError as e:
message = f"Failed to parse the response body: {str(e)}"
raise err.SlackApiError(message, response)
else:
response_body_data: dict = None
if query_params:
all_params = copy.copy(body_params)
all_params.update(query_params)
else:
all_params = body_params
request_args["params"] = all_params # for backward-compatibility
return SlackResponse(
client=self,
http_verb="POST", # you can use POST method for all the Web APIs
api_url=url,
req_args=request_args,
data=response_body_data,
headers=dict(response["headers"]),
status_code=response["status"],
use_sync_aiohttp=False,
).validate()
finally:
for f in files_to_close:
if not f.closed:
f.close()
def _perform_urllib_http_request(
self, *, url: str, args: Dict[str, Dict[str, any]]
) -> Dict[str, any]:
"""Performs an HTTP request and parses the response.
:param url: a complete URL (e.g., https://www.slack.com/api/chat.postMessage)
:param args: args has "headers", "data", "params", and "json"
"headers": Dict[str, str]
"data": Dict[str, any]
"params": Dict[str, str],
"json": Dict[str, any],
:return: dict {status: int, headers: Headers, body: str}
"""
headers = args["headers"]
if args["json"]:
body = json.dumps(args["json"])
headers["Content-Type"] = "application/json;charset=utf-8"
elif args["data"]:
boundary = f"--------------{uuid.uuid4()}"
sep_boundary = b"\r\n--" + boundary.encode("ascii")
end_boundary = sep_boundary + b"--\r\n"
body = io.BytesIO()
data = args["data"]
for key, value in data.items():
readable = getattr(value, "readable", None)
if readable and value.readable():
filename = "Uploaded file"
name_attr = getattr(value, "name", None)
if name_attr:
filename = (
name_attr.decode("utf-8")
if isinstance(name_attr, bytes)
else name_attr
)
if "filename" in data:
filename = data["filename"]
mimetype = (
mimetypes.guess_type(filename)[0] or "application/octet-stream"
)
title = (
f'\r\nContent-Disposition: form-data; name="{key}"; filename="{filename}"\r\n'
+ f"Content-Type: {mimetype}\r\n"
)
value = value.read()
else:
title = f'\r\nContent-Disposition: form-data; name="{key}"\r\n'
value = str(value).encode("utf-8")
body.write(sep_boundary)
body.write(title.encode("utf-8"))
body.write(b"\r\n")
body.write(value)
body.write(end_boundary)
body = body.getvalue()
headers["Content-Type"] = f"multipart/form-data; boundary={boundary}"
headers["Content-Length"] = len(body)
elif args["params"]:
body = urlencode(args["params"])
headers["Content-Type"] = "application/x-www-form-urlencoded"
else:
body = None
if isinstance(body, str):
body = body.encode("utf-8")
# NOTE: Intentionally ignore the `http_verb` here
# Slack APIs accepts any API method requests with POST methods
try:
# urllib not only opens http:// or https:// URLs, but also ftp:// and file://.
# With this it might be possible to open local files on the executing machine
# which might be a security risk if the URL to open can be manipulated by an external user.
# (BAN-B310)
if url.lower().startswith("http"):
req = Request(method="POST", url=url, data=body, headers=headers)
opener: Optional[OpenerDirector] = None
if self.proxy is not None:
if isinstance(self.proxy, str):
opener = urllib.request.build_opener(
ProxyHandler({"http": self.proxy, "https": self.proxy}),
HTTPSHandler(context=self.ssl),
)
else:
raise SlackRequestError(
f"Invalid proxy detected: {self.proxy} must be a str value"
)
# NOTE: BAN-B310 is already checked above
resp: Optional[HTTPResponse] = None
if opener:
resp = opener.open(req, timeout=self.timeout) # skipcq: BAN-B310
else:
resp = urlopen( # skipcq: BAN-B310
req, context=self.ssl, timeout=self.timeout
)
charset = resp.headers.get_content_charset() or "utf-8"
body: str = resp.read().decode(charset) # read the response body here
return {"status": resp.code, "headers": resp.headers, "body": body}
raise SlackRequestError(f"Invalid URL detected: {url}")
except HTTPError as e:
resp = {"status": e.code, "headers": e.headers}
if e.code == 429:
# for compatibility with aiohttp
resp["headers"]["Retry-After"] = resp["headers"]["retry-after"]
# read the response body here
charset = e.headers.get_content_charset() or "utf-8"
body: str = e.read().decode(charset)
resp["body"] = body
return resp
except Exception as err:
self._logger.error(f"Failed to send a request to Slack API server: {err}")
raise err
def _build_urllib_request_headers(
self, token: str, has_json: bool, has_files: bool, additional_headers: dict
) -> Dict[str, str]:
headers = {"Content-Type": "application/x-www-form-urlencoded"}
headers.update(self.headers)
if token:
headers.update({"Authorization": "Bearer {}".format(token)})
if additional_headers:
headers.update(additional_headers)
if has_json:
headers.update({"Content-Type": "application/json;charset=utf-8"})
if has_files:
# will be set afterwards
headers.pop("Content-Type", None)
return headers
# =================================================================
@staticmethod
def validate_slack_signature(
*, signing_secret: str, data: str, timestamp: str, signature: str
) -> bool:
"""
Slack creates a unique string for your app and shares it with you. Verify
requests from Slack with confidence by verifying signatures using your
signing secret.
On each HTTP request that Slack sends, we add an X-Slack-Signature HTTP
header. The signature is created by combining the signing secret with the
body of the request we're sending using a standard HMAC-SHA256 keyed hash.
https://api.slack.com/docs/verifying-requests-from-slack#how_to_make_a_request_signature_in_4_easy_steps__an_overview
Args:
signing_secret: Your application's signing secret, available in the
Slack API dashboard
data: The raw body of the incoming request - no headers, just the body.
timestamp: from the 'X-Slack-Request-Timestamp' header
signature: from the 'X-Slack-Signature' header - the calculated signature
should match this.
Returns:
True if signatures matches
"""
warnings.warn(
"As this method is deprecated since slackclient 2.6.0, "
"use `from slack.signature import SignatureVerifier` instead",
DeprecationWarning,
)
format_req = str.encode(f"v0:{timestamp}:{data}")
encoded_secret = str.encode(signing_secret)
request_hash = hmac.new(encoded_secret, format_req, hashlib.sha256).hexdigest()
calculated_signature = f"v0={request_hash}"
return hmac.compare_digest(calculated_signature, signature)
| mit | -1,541,434,596,766,754,300 | 39.743542 | 125 | 0.525744 | false |
akhmadMizkat/odoo | addons/base_setup/res_config.py | 1 | 3628 | # -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from openerp.osv import fields, osv
class base_config_settings(osv.osv_memory):
_name = 'base.config.settings'
_inherit = 'res.config.settings'
_columns = {
'group_multi_company': fields.boolean('Manage multiple companies',
help='Work in multi-company environments, with appropriate security access between companies.',
implied_group='base.group_multi_company'),
'module_share': fields.boolean('Allow documents sharing',
help="""Share or embbed any screen of Odoo."""),
'module_portal': fields.boolean('Activate the customer portal',
help="""Give your customers access to their documents."""),
'module_auth_oauth': fields.boolean('Use external authentication providers (OAuth)'),
'module_base_import': fields.boolean("Allow users to import data from CSV/XLS/XLSX/ODS files"),
'module_google_drive': fields.boolean('Attach Google documents to any record',
help="""This installs the module google_docs."""),
'module_google_calendar': fields.boolean('Allow the users to synchronize their calendar with Google Calendar',
help="""This installs the module google_calendar."""),
'module_inter_company_rules': fields.boolean('Manage Inter Company',
help="""This installs the module inter_company_rules.\n Configure company rules to automatically create SO/PO when one of your company sells/buys to another of your company."""),
'company_share_partner': fields.boolean('Share partners to all companies',
help="Share your partners to all companies defined in your instance.\n"
" * Checked : Partners are visible for every companies, even if a company is defined on the partner.\n"
" * Unchecked : Each company can see only its partner (partners where company is defined). Partners not related to a company are visible for all companies."),
'group_multi_currency': fields.boolean('Allow multi currencies',
implied_group='base.group_multi_currency',
help="Allows to work in a multi currency environment"),
}
def open_company(self, cr, uid, ids, context=None):
user = self.pool.get('res.users').browse(cr, uid, uid, context)
return {
'type': 'ir.actions.act_window',
'name': 'My Company',
'view_type': 'form',
'view_mode': 'form',
'res_model': 'res.company',
'res_id': user.company_id.id,
'target': 'current',
}
def get_default_company_share_partner(self, cr, uid, fields, context=None):
partner_rule = self.pool['ir.model.data'].xmlid_to_object(cr, uid, 'base.res_partner_rule', context=context)
return {
'company_share_partner': not bool(partner_rule.active)
}
def set_default_company_share_partner(self, cr, uid, ids, context=None):
partner_rule = self.pool['ir.model.data'].xmlid_to_object(cr, uid, 'base.res_partner_rule', context=context)
for wizard in self.browse(cr, uid, ids, context=context):
self.pool['ir.rule'].write(cr, uid, [partner_rule.id], {'active': not bool(wizard.company_share_partner)}, context=context)
# Empty class but required since it's overrided by sale & crm
class sale_config_settings(osv.osv_memory):
_name = 'sale.config.settings'
_inherit = 'res.config.settings'
_columns = {
}
| gpl-3.0 | -6,858,198,202,764,836,000 | 53.149254 | 190 | 0.63699 | false |
bsgbryan/Ardus | node_modules/microtime/build/c4che/default.cache.py | 1 | 1405 | AR = '/usr/bin/ar'
ARFLAGS = 'rcs'
CCFLAGS = ['-g']
CCFLAGS_MACBUNDLE = ['-fPIC']
CCFLAGS_NODE = ['-D_LARGEFILE_SOURCE', '-D_FILE_OFFSET_BITS=64']
CC_VERSION = ('4', '2', '1')
COMPILER_CXX = 'g++'
CPP = '/usr/bin/cpp'
CPPFLAGS_NODE = ['-D_GNU_SOURCE', '-DEV_MULTIPLICITY=0']
CPPPATH_NODE = '/usr/local/include/node'
CPPPATH_ST = '-I%s'
CXX = ['/usr/bin/g++']
CXXDEFINES_ST = '-D%s'
CXXFLAGS = ['-g']
CXXFLAGS_DEBUG = ['-g']
CXXFLAGS_NODE = ['-D_LARGEFILE_SOURCE', '-D_FILE_OFFSET_BITS=64']
CXXFLAGS_RELEASE = ['-O2']
CXXLNK_SRC_F = ''
CXXLNK_TGT_F = ['-o', '']
CXX_NAME = 'gcc'
CXX_SRC_F = ''
CXX_TGT_F = ['-c', '-o', '']
DEST_CPU = 'x86_64'
DEST_OS = 'darwin'
FULLSTATIC_MARKER = '-static'
LIBDIR = '/Users/maynardb/.node_libraries'
LIBPATH_NODE = '/usr/local/lib'
LIBPATH_ST = '-L%s'
LIB_ST = '-l%s'
LINKFLAGS_MACBUNDLE = ['-bundle', '-undefined', 'dynamic_lookup']
LINK_CXX = ['/usr/bin/g++']
NODE_PATH = '/Users/maynardb/.node_libraries'
PREFIX = '/usr/local'
PREFIX_NODE = '/usr/local'
RANLIB = '/usr/bin/ranlib'
RPATH_ST = '-Wl,-rpath,%s'
SHLIB_MARKER = ''
SONAME_ST = ''
STATICLIBPATH_ST = '-L%s'
STATICLIB_MARKER = ''
STATICLIB_ST = '-l%s'
macbundle_PATTERN = '%s.bundle'
program_PATTERN = '%s'
shlib_CXXFLAGS = ['-fPIC', '-compatibility_version', '1', '-current_version', '1']
shlib_LINKFLAGS = ['-dynamiclib']
shlib_PATTERN = 'lib%s.dylib'
staticlib_LINKFLAGS = []
staticlib_PATTERN = 'lib%s.a'
| mit | -3,397,733,078,171,443,700 | 28.270833 | 82 | 0.625623 | false |
bollu/polymage | sandbox/apps/python/img_proc/interpolate/init.py | 2 | 2104 | import sys
import os.path
from PIL import Image
import numpy as np
from arg_parser import parse_args
from printer import print_header, print_usage, print_line
def init_images(app_data):
print("[init.py] : initializing images...")
app_args = app_data['app_args']
# input image:
img_path = app_args.img_file
image = np.array(Image.open(img_path))
img_path2 = app_args.alpha_file
alpha = np.array(Image.open(img_path2))
if image.shape[0] != alpha.shape[0] or image.shape[1] != alpha.shape[1]:
print("Please use alpha image with the same shape as the image")
sys.exit(0)
R = image.shape[0]
C = image.shape[1]
image_flip = np.rollaxis(image, 2)
# add alpha channel to image along with other colour channels
imgalpha = np.append(image_flip, alpha)
imgalpha = imgalpha.reshape(4, R, C)
imgalpha_region = imgalpha[0:4, 0:R, 0:C]
# add ghost region
imgalpha_ghost = np.empty((4, R+2, C+2), np.float32)
imgalpha_ghost[0:4, 1:R+1, 1:C+1] = imgalpha_region
# convert input image to floating point
imgalpha_f = np.float32(imgalpha_ghost) / 255.0
# result array
res = np.empty((3, R, C), np.float32)
img_data = {}
img_data['IN'] = imgalpha_f
img_data['OUT'] = res
app_data['img_data'] = img_data
app_data['R'] = R
app_data['C'] = C
return
def get_input(app_data):
# parse the command-line arguments
app_args = parse_args()
app_data['app_args'] = app_args
app_data['mode'] = app_args.mode
app_data['runs'] = int(app_args.runs)
app_data['graph_gen'] = bool(app_args.graph_gen)
app_data['timer'] = app_args.timer
# storage optimization
app_data['optimize_storage'] = bool(app_args.optimize_storage)
# early freeing of allocated arrays
app_data['early_free'] = bool(app_args.early_free)
# pool allocate option
app_data['pool_alloc'] = bool(app_args.pool_alloc)
return
def init_all(app_data):
pipe_data = {}
app_data['pipe_data'] = pipe_data
get_input(app_data)
init_images(app_data)
return
| apache-2.0 | -8,058,180,493,064,463,000 | 24.658537 | 76 | 0.63308 | false |
mbohlool/client-python | kubernetes/client/models/v1beta2_deployment_strategy.py | 1 | 4295 | # coding: utf-8
"""
Kubernetes
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen)
OpenAPI spec version: v1.8.2
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from pprint import pformat
from six import iteritems
import re
class V1beta2DeploymentStrategy(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'rolling_update': 'V1beta2RollingUpdateDeployment',
'type': 'str'
}
attribute_map = {
'rolling_update': 'rollingUpdate',
'type': 'type'
}
def __init__(self, rolling_update=None, type=None):
"""
V1beta2DeploymentStrategy - a model defined in Swagger
"""
self._rolling_update = None
self._type = None
self.discriminator = None
if rolling_update is not None:
self.rolling_update = rolling_update
if type is not None:
self.type = type
@property
def rolling_update(self):
"""
Gets the rolling_update of this V1beta2DeploymentStrategy.
Rolling update config params. Present only if DeploymentStrategyType = RollingUpdate.
:return: The rolling_update of this V1beta2DeploymentStrategy.
:rtype: V1beta2RollingUpdateDeployment
"""
return self._rolling_update
@rolling_update.setter
def rolling_update(self, rolling_update):
"""
Sets the rolling_update of this V1beta2DeploymentStrategy.
Rolling update config params. Present only if DeploymentStrategyType = RollingUpdate.
:param rolling_update: The rolling_update of this V1beta2DeploymentStrategy.
:type: V1beta2RollingUpdateDeployment
"""
self._rolling_update = rolling_update
@property
def type(self):
"""
Gets the type of this V1beta2DeploymentStrategy.
Type of deployment. Can be \"Recreate\" or \"RollingUpdate\". Default is RollingUpdate.
:return: The type of this V1beta2DeploymentStrategy.
:rtype: str
"""
return self._type
@type.setter
def type(self, type):
"""
Sets the type of this V1beta2DeploymentStrategy.
Type of deployment. Can be \"Recreate\" or \"RollingUpdate\". Default is RollingUpdate.
:param type: The type of this V1beta2DeploymentStrategy.
:type: str
"""
self._type = type
def to_dict(self):
"""
Returns the model properties as a dict
"""
result = {}
for attr, _ in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""
Returns the string representation of the model
"""
return pformat(self.to_dict())
def __repr__(self):
"""
For `print` and `pprint`
"""
return self.to_str()
def __eq__(self, other):
"""
Returns true if both objects are equal
"""
if not isinstance(other, V1beta2DeploymentStrategy):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""
Returns true if both objects are not equal
"""
return not self == other
| apache-2.0 | -6,132,782,709,722,817,000 | 26.88961 | 105 | 0.570664 | false |
wangtianqi1993/machine-learning-project | DeepLearning/tensorflow_test.py | 1 | 1477 | # !/usr/bin/env python
# -*- coding: utf-8 -*-
__author__ = 'wtq'
import tensorflow as tf
from data_set.tensorflow_data import mnist_input_data
mnist = mnist_input_data.read_data_sets("MNIST_data/", one_hot=True)
def tensor_flow_test():
"""
:return:
"""
matrix1 = tf.constant([[3, 3]])
matrix2 = tf.constant([[2], [2]])
product = tf.matmul(matrix1, matrix2)
sess = tf.Session()
result = sess.run(product)
print result
sess.close()
def mnist_frist():
"""
this is a simple mnist_test
:return:
"""
x = tf.placeholder("float", [None, 784])
w = tf.Variable(tf.zeros([784, 10]))
b = tf.Variable(tf.zeros([10]))
y = tf.nn.softmax(tf.matmul(x, w) + b)
y_ = tf.placeholder("float", [None, 10])
cross_entropy = -tf.reduce_sum(y_*tf.log(y))
train_step = tf.train.GradientDescentOptimizer(0.01).minimize(cross_entropy)
init = tf.initialize_all_variables()
sess = tf.Session()
sess.run(init)
# start training model
for i in range(1000):
batch_xs, batch_ys = mnist.train.next_batch(100)
sess.run(train_step, feed_dict={x: batch_xs, y_: batch_ys})
# test model
correct_prediction = tf.equal(tf.argmax(y, 1), tf.argmax(y_, 1))
accuracy = tf.reduce_mean(tf.cast(correct_prediction, "float"))
print sess.run(accuracy, feed_dict={x: mnist.test.images, y_: mnist.test.labels})
if __name__ == '__main__':
# tensor_flow_test()
mnist_frist()
| gpl-3.0 | -4,340,738,443,261,843,500 | 24.912281 | 85 | 0.608666 | false |
csb-toolbox/CSB | csb/test/cases/statistics/__init__.py | 1 | 1408 |
import numpy
import numpy.random
import csb.test as test
from csb.statistics import Cumulative
from csb.statistics import kurtosis, skewness, autocorrelation
@test.functional
class TestStatFunction(test.Case):
def testCumulative(self):
from scipy.stats import norm
x = numpy.linspace(-5., 5., 200)
samples = numpy.random.normal(size=100000)
cumula = Cumulative(samples)
c = cumula(x)
cx = norm.cdf(x)
for i in range(199):
self.assertAlmostEqual(cx[i], c[i], delta=1e-2)
def testKurtosis(self):
samples = numpy.random.normal(size=100000)
self.assertAlmostEqual(kurtosis(samples), 0., delta=1e-1)
samples = numpy.random.uniform(-2., 2., size=100000)
self.assertAlmostEqual(kurtosis(samples), -1.2, delta=1e-1)
def testSkewness(self):
samples = numpy.random.gamma(2., 0.5, size=100000)
self.assertAlmostEqual(skewness(samples), 2. / numpy.sqrt(2.), delta=1e-1)
def testAutorcorrelation(self):
x = numpy.random.normal(size=1000) + numpy.sin(numpy.linspace(0., 2 * numpy.pi, 1000))
n = 10
ac = autocorrelation(x, n)
self.assertAlmostEqual(ac[0], 1., delta=1e-1)
def testEntropy(self):
pass
def testCircvar(self):
pass
def testCircmean(self):
pass
| mit | -3,661,153,303,134,718,000 | 23.701754 | 94 | 0.613636 | false |
nojero/pod | src/pod/equivalence.py | 1 | 7197 |
import sat
import ptnet
import z3
from util import *
from encoding import *
class MergingEquivalence :
def __init__ (self, domain) :
self.domain = domain
def is_in_domain (self, it) :
for x in it :
if x not in self.domain :
raise LookupError, "'%s' is not in the domain" % repr (x)
def are_merged (self, x, y) :
self.is_in_domain ([x, y])
return x == y
def class_of (self, x) :
self.is_in_domain ([x])
return [x]
def classes (self) :
return [[x] for x in self.domain]
def assert_is_equivalence (self) :
# we asser that
# - every class is disjoint from any other class
# - every element of the domain is in at least one class
# to do it we just iterate through all elements of all classes, and
# watch if we see two times the same element, checking at the end
# that we saw all elements of the domain
e2c = {}
for c in self.classes () :
for e in c :
if e in e2c :
# already seen!
raise AssertionError, \
"Element '%s' is two classes, %s and %s" % \
(repr (e), long_list (c, 5), long_list (e2c[e], 5))
e2c[e] = c
seen = set (e2c.keys ())
if not self.domain <= seen :
print 'seen', seen
print 'domain', self.domain
raise AssertionError, \
"The set of classes contains less elements than the domain!"
if not seen <= self.domain :
print 'seen', seen
print 'domain', self.domain
raise AssertionError, \
"The set of classes contains more elements than the domain!"
def __repr__ (self) :
return str (self.classes ())
def __str__ (self) :
return repr (self)
class Smt2MergingEquivalence (MergingEquivalence) :
def __init__ (self, domain, enc) :
MergingEquivalence.__init__ (self, domain)
self.enc = enc
self.model = enc.z3.model ()
def are_merged (self, x, y) :
self.is_in_domain ([x, y])
if isinstance (x, ptnet.Condition) :
assert (isinstance (y, ptnet.Condition))
vx = self.enc.smt_varmap (x)
vy = self.enc.smt_varmap (y)
# if we didn't generate variable for one of them
# then it is surely possible to have one that
# has the same value as the other, ie, we merge
if (vx == None or vy == None) : return True
return self.model[vx].as_long () == self.model[vy].as_long ()
else :
assert (isinstance (x, ptnet.Event))
assert (isinstance (y, ptnet.Event))
if x.label != y.label : return False
vx = self.enc.smt_varmap (x)
vy = self.enc.smt_varmap (y)
assert (vx != None)
assert (vy != None)
return self.model[vx].as_long () == self.model[vy].as_long ()
def class_of (self, x) :
raise RuntimeError
def classes (self) :
raise RuntimeError
def __str__ (self) :
return str (self.model)
class ComputedMergingEquivalence (MergingEquivalence) :
def __init__ (self, domain) :
MergingEquivalence.__init__ (self, domain)
self.__tag2class = {}
self.__class2tags = {}
self.__mem2class = {}
def __merge_classes (self, c1, c2) :
# optimization: merge the smaller one into the larger one :)
if id (c1) == id (c2) : return
if len (c2) > len (c1) :
c = c1
c1 = c2
c2 = c
# move all elements of c2 into c1
c1.update (c2)
# update the pointer of all members of c2 in mem2class to point to c1
for e in c2 :
self.__mem2class[e] = c1
# same for the tags, all tags pointing to c2 must now point to c1
tagsc2 = self.__class2tags[id(c2)]
for tag in tagsc2 :
self.__tag2class[tag] = c1
# all tags of c2 are now tags of c1
self.__class2tags[id(c1)].update (tagsc2)
del self.__class2tags[id(c2)]
return c1
def tag_class (self, x, tag) :
# find x's class, or create a new one
self.is_in_domain ([x])
try :
c = self.__mem2class[x]
except KeyError :
c = self.__mem2class[x] = set ([x])
self.__class2tags[id(c)] = set ()
# if the tag is new and unknown, update the tables
if tag not in self.__tag2class :
self.__tag2class[tag] = c
self.__class2tags[id(c)].add (tag)
else :
# if it is not new, it already pointed to some class and we
# need to merge x's class and that class
c = self.__merge_classes (c, self.__tag2class[tag])
return c
def __memb_is_known (self, it) :
for x in it :
if x not in self.__mem2class :
raise LookupError, "No equivalence class defined for '%s'" % repr (x)
def __tag_is_known (self, it) :
for tag in it :
if tag not in self.__tag2class :
raise LookupError, "No equivalence class defined for tag '%s'" % repr (tag)
def are_merged (self, x, y) :
self.is_in_domain ([x, y])
self.__memb_is_known ([x, y])
if id (x) == id (y) : return True
return id (self.__mem2class[x]) == id (self.__mem2class[y])
def class_of (self, x ) :
return self.class_of_member (x)
def class_of_member (self, x) :
self.is_in_domain ([x])
self.__memb_is_known ([x])
return self.__mem2class[x]
def class_of_tag (self, tag) :
self.__tag_is_known ([tag])
return self.__tag2class[tag]
def classes (self) :
return list (set (tuple (x) for x in self.__tag2class.values ()))
class Santi2MergingEquivalence (MergingEquivalence) :
def __init__ (self, domain, model) :
MergingEquivalence.__init__ (self, domain)
self.model = model
def are_merged (self, x, y) :
self.is_in_domain ([x, y])
return self.model[z3.Int(repr(x))].as_long() == self.model[z3.Int(repr(y))].as_long()
def class_of (self, x ) :
#cuando me preguntan la clase de algo, la devuelvo
return [self.model[z3.Int(repr(x))].as_long()]
def classes (self) :
#separo las condiciones de los eventos
dt_c = {}
dt_e = {}
for x in self.domain:
#Sucio, lo deberia hacer mas lindo
# una condicion es algo tipo 'c14', un evento '2:e8'
#igual, esto es solo en mi ejemplo
if 'c' in repr(x):
clase = dt_c.setdefault(self.model[z3.Int(repr(x))].as_long(),[])
clase.append(x)
else:
clase = dt_e.setdefault(self.model[z3.Int(repr(x))].as_long(),[])
clase.append(x)
#devuelvo todas las clases
return dt_e.values() + dt_c.values()
class IdentityMergingEquivalence (MergingEquivalence) :
pass
# vi:ts=4:sw=4:et:
| gpl-3.0 | 4,782,201,330,291,516,000 | 32.474419 | 93 | 0.530777 | false |
onshape-public/onshape-clients | python/onshape_client/oas/models/bt_default_unit_info.py | 1 | 4695 | # coding: utf-8
"""
Onshape REST API
The Onshape REST API consumed by all clients. # noqa: E501
The version of the OpenAPI document: 1.113
Contact: api-support@onshape.zendesk.com
Generated by: https://openapi-generator.tech
"""
from __future__ import absolute_import
import re # noqa: F401
import sys # noqa: F401
import six # noqa: F401
import nulltype # noqa: F401
from onshape_client.oas.model_utils import ( # noqa: F401
ModelComposed,
ModelNormal,
ModelSimple,
date,
datetime,
file_type,
int,
none_type,
str,
validate_get_composed_info,
)
class BTDefaultUnitInfo(ModelNormal):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Attributes:
allowed_values (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
with a capitalized key describing the allowed value and an allowed
value. These dicts store the allowed enum values.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
discriminator_value_class_map (dict): A dict to go from the discriminator
variable value to the discriminator class name.
validations (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
that stores validations for max_length, min_length, max_items,
min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
inclusive_minimum, and regex.
additional_properties_type (tuple): A tuple of classes accepted
as additional properties values.
"""
allowed_values = {}
validations = {}
additional_properties_type = None
@staticmethod
def openapi_types():
"""
This must be a class method so a model may have properties that are
of type self, this ensures that we don't create a cyclic import
Returns
openapi_types (dict): The key is attribute name
and the value is attribute type.
"""
return {
"key": (str,), # noqa: E501
"value": (str,), # noqa: E501
}
@staticmethod
def discriminator():
return None
attribute_map = {
"key": "key", # noqa: E501
"value": "value", # noqa: E501
}
@staticmethod
def _composed_schemas():
return None
required_properties = set(
[
"_data_store",
"_check_type",
"_from_server",
"_path_to_item",
"_configuration",
]
)
def __init__(
self,
_check_type=True,
_from_server=False,
_path_to_item=(),
_configuration=None,
**kwargs
): # noqa: E501
"""bt_default_unit_info.BTDefaultUnitInfo - a model defined in OpenAPI
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_from_server (bool): True if the data is from the server
False if the data is from the client (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
key (str): [optional] # noqa: E501
value (str): [optional] # noqa: E501
"""
self._data_store = {}
self._check_type = _check_type
self._from_server = _from_server
self._path_to_item = _path_to_item
self._configuration = _configuration
for var_name, var_value in six.iteritems(kwargs):
if (
var_name not in self.attribute_map
and self._configuration is not None
and self._configuration.discard_unknown_keys
and self.additional_properties_type is None
):
# discard variable.
continue
setattr(self, var_name, var_value)
| mit | 1,394,035,064,264,031,000 | 31.157534 | 79 | 0.570394 | false |
facebookresearch/faiss | benchs/bench_index_flat.py | 1 | 2187 | #!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import time
import os
import numpy as np
import faiss
from faiss.contrib.datasets import SyntheticDataset
os.system("grep -m1 'model name' < /proc/cpuinfo")
def format_tab(x):
return "\n".join("\t".join("%g" % xi for xi in row) for row in x)
faiss.cvar.distance_compute_min_k_reservoir = 5
# for have_threads in True, False:
for have_threads in False, :
if have_threads:
# good config for Intel(R) Xeon(R) CPU E5-2698 v4 @ 2.20GHz
nthread = 32
else:
nthread = 1
faiss.omp_set_num_threads(nthread)
print("************ nthread=", nthread)
for nq in 100, 10000:
print("*********** nq=", nq)
if nq == 100:
nrun = 500
unit = "ms"
else:
nrun = 20
unit = "s"
restab = []
for d in 16, 32, 64, 128:
print("========== d=", d)
nb = 10000
# d = 32
ds = SyntheticDataset(d, 0, nb, nq)
print(ds)
index = faiss.IndexFlatL2(d)
index.add(ds.get_database())
nrun = 10
restab1 = []
restab.append(restab1)
for k in 1, 10, 100:
times = []
for run in range(nrun):
t0 = time.time()
index.search(ds.get_queries(), k)
t1 = time.time()
if run >= nrun // 5: # the rest is considered warmup
times.append((t1 - t0))
times = np.array(times)
if unit == "ms":
times *= 1000
print("search k=%3d t=%.3f ms (± %.4f)" % (
k, np.mean(times), np.std(times)))
else:
print("search k=%3d t=%.3f s (± %.4f)" % (
k, np.mean(times), np.std(times)))
restab1.append(np.mean(times))
print("restab=\n", format_tab(restab))
| mit | -6,713,802,029,258,484,000 | 24.114943 | 72 | 0.474142 | false |
dit/dit | dit/inference/counts.py | 1 | 6393 | """
Non-cython methods for getting counts and distributions from data.
"""
import numpy as np
__all__ = (
'counts_from_data',
'distribution_from_data',
'get_counts',
)
try: # cython
from .pycounts import counts_from_data, distribution_from_data
except ImportError: # no cython
from boltons.iterutils import windowed_iter
from collections import Counter, defaultdict
from itertools import product
from .. import modify_outcomes
from ..exceptions import ditException
def counts_from_data(data, hLength, fLength, marginals=True, alphabet=None, standardize=True):
"""
Returns conditional counts from `data`.
To obtain counts for joint distribution only, use fLength=0.
Parameters
----------
data : NumPy array
The data used to calculate morphs. Note: `data` cannot be a generator.
Also, if standardize is True, then data can be any indexable iterable,
such as a list or tuple.
hLength : int
The maxmimum history word length used to calculate morphs.
fLength : int
The length of future words that defines the morph.
marginals : bool
If True, then the morphs for all histories words from L=0 to L=hLength
are calculated. If False, only histories of length L=hLength are
calculated.
alphabet : list
The alphabet to use when creating the morphs. If `None`, then one is
obtained from `data`. If not `None`, then the provided alphabet
supplements what appears in the data. So the data is always scanned
through in order to get the proper alphabet.
standardize : bool
The algorithm requires that the symbols in data be standardized to
a canonical alphabet consisting of integers from 0 to k-1, where k
is the alphabet size. If `data` is already standard, then an extra
pass through the data can be avoided by setting `standardize` to
`False`, but note: if `standardize` is False, then data MUST be a
NumPy array.
Returns
-------
histories : list
A list of observed histories, corresponding to the rows in `cCounts`.
cCounts : NumPy array
A NumPy array representing conditional counts. The rows correspond to
the observed histories, so this is sparse. The number of rows in this
array cannot be known in advance, but the number of columns will be
equal to the alphabet size raised to the `fLength` power.
hCounts : NumPy array
A 1D array representing the count of each history word.
alphabet : tuple
The ordered tuple representing the alphabet of the data. If `None`,
the one is created from the data.
Notes
-----
This requires three complete passes through the data. One to obtain
the full alphabet. Another to standardize the data. A final pass to
obtain the counts.
This is implemented densely. So during the course of the algorithm,
we work with a large array containing a row for each possible history.
Only the rows corresponding to observed histories are returned.
"""
try:
data = list(map(tuple, data))
except TypeError:
pass
counts = Counter(windowed_iter(data, hLength + fLength))
cond_counts = defaultdict(lambda: defaultdict(int))
for word, count in counts.items():
cond_counts[word[:hLength]][word[hLength:]] += count
histories = sorted(counts.keys())
alphabet = set(alphabet) if alphabet is not None else set()
alphabet = tuple(sorted(alphabet.union(*[set(hist) for hist in histories])))
cCounts = np.empty((len(histories), len(alphabet)**fLength))
for i, hist in enumerate(histories):
for j, future in enumerate(product(alphabet, repeat=fLength)):
cCounts[i, j] = cond_counts[hist][future]
hCounts = cCounts.sum(axis=1)
return histories, cCounts, hCounts, alphabet
def distribution_from_data(d, L, trim=True, base=None):
"""
Returns a distribution over words of length `L` from `d`.
The returned distribution is the naive estimate of the distribution,
which assigns probabilities equal to the number of times a particular
word appeared in the data divided by the total number of times a word
could have appeared in the data.
Roughly, it corresponds to the stationary distribution of a maximum
likelihood estimate of the transition matrix of an (L-1)th order Markov
chain.
Parameters
----------
d : list
A list of symbols to be converted into a distribution.
L : integer
The length of the words for the distribution.
trim : bool
If true, then words with zero probability are trimmed from the
distribution.
base : int or string
The desired base of the returned distribution. If `None`, then the
value of `dit.ditParams['base']` is used.
"""
from dit import ditParams, Distribution
try:
d = list(map(tuple, d))
except TypeError:
pass
if base is None:
base = ditParams['base']
words, _, counts, _ = counts_from_data(d, L, 0)
# We turn the counts to probabilities
pmf = counts / counts.sum()
dist = Distribution(words, pmf, trim=trim)
dist.set_base(base)
if L == 1:
try:
dist = modify_outcomes(dist, lambda o: o[0])
except ditException:
pass
return dist
def get_counts(data, length):
"""
Count the occurrences of all words of `length` in `data`.
Parameters
----------
data : iterable
The sequence of samples
length : int
The length to group samples into.
Returns
-------
counts : np.array
Array with the count values.
"""
hists, _, counts, _ = counts_from_data(data, length, 0)
mask = np.array([len(h) == length for h in hists])
counts = counts[mask]
return counts
| bsd-3-clause | -8,556,384,628,051,415,000 | 33.556757 | 98 | 0.613796 | false |
yelizariev/addons-yelizariev | web_debranding/__manifest__.py | 1 | 1144 | # Copyright 2015-2020 Ivan Yelizariev <https://it-projects.info/team/yelizariev>
# Copyright 2017 Ilmir Karamov <https://it-projects.info/team/ilmir-k>
# Copyright 2018-2019 Kolushov Alexandr <https://it-projects.info/team/KolushovAlexandr>
# Copyright 2018 Ildar Nasyrov <https://it-projects.info/team/iledarn>
# Copyright 2018 WohthaN <https://github.com/WohthaN>
# Copyright 2019 Eugene Molotov <https://github.com/em230418>
# License LGPL-3.0 or later (http://www.gnu.org/licenses/lgpl.html).
{
"name": "Backend debranding",
"version": "12.0.1.0.29",
"author": "IT-Projects LLC, Ivan Yelizariev",
"license": "LGPL-3",
"category": "Debranding",
"images": ["images/web_debranding.png"],
"website": "https://twitter.com/yelizariev",
"price": 250.00,
"currency": "EUR",
"depends": ["web", "mail", "access_settings_menu"],
"data": ["data.xml", "views.xml", "js.xml", "pre_install.xml"],
"qweb": ["static/src/xml/web.xml"],
"post_load": "post_load",
"auto_install": False,
"uninstall_hook": "uninstall_hook",
"installable": True,
"saas_demo_title": "Backend debranding demo",
}
| lgpl-3.0 | -7,172,393,133,491,129,000 | 43 | 88 | 0.664336 | false |
cprov/snapcraft | tests/unit/commands/__init__.py | 1 | 3601 | # -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
#
# Copyright (C) 2015-2018 Canonical Ltd
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import json
import os
from textwrap import dedent
from click.testing import CliRunner
from snapcraft import storeapi
from snapcraft.cli._runner import run
from tests import fixture_setup, unit
_sample_keys = [
{
"name": "default",
"sha3-384": (
"vdEeQvRxmZ26npJCFaGnl-VfGz0lU2jZZkWp_s7E-RxVCNtH2_mtjcxq2NkDKkIp"
),
},
{
"name": "another",
"sha3-384": (
"JsfToV5hO2eN9l89pYYCKXUioTERrZIIHUgQQd47jW8YNNBskupiIjWYd3KXLY_D"
),
},
]
def get_sample_key(name):
for key in _sample_keys:
if key["name"] == name:
return key
raise KeyError(name)
def mock_snap_output(command, *args, **kwargs):
if command == ["snap", "keys", "--json"]:
return json.dumps(_sample_keys)
elif command[:2] == ["snap", "export-key"]:
if not command[2].startswith("--account="):
raise AssertionError("Unhandled command: {}".format(command))
account_id = command[2][len("--account=") :]
name = command[3]
# This isn't a full account-key-request assertion, but it's enough
# for testing.
return dedent(
"""\
type: account-key-request
account-id: {account_id}
name: {name}
public-key-sha3-384: {sha3_384}
"""
).format(
account_id=account_id, name=name, sha3_384=get_sample_key(name)["sha3-384"]
)
else:
raise AssertionError("Unhandled command: {}".format(command))
class CommandBaseTestCase(unit.TestCase):
def setUp(self):
super().setUp()
self.runner = CliRunner()
def run_command(self, args, **kwargs):
return self.runner.invoke(run, args, catch_exceptions=False, **kwargs)
class LifecycleCommandsBaseTestCase(CommandBaseTestCase):
yaml_template = """name: {step}-test
version: 1.0
summary: test {step}
description: if the {step} is successful the state file will be updated
confinement: strict
grade: stable
parts:
{parts}"""
yaml_part = """ {step}{iter:d}:
plugin: nil"""
def make_snapcraft_yaml(self, step, n=1, yaml_part=None, create=False):
if not yaml_part:
yaml_part = self.yaml_part
parts = "\n".join([yaml_part.format(step=step, iter=i) for i in range(n)])
super().make_snapcraft_yaml(self.yaml_template.format(step=step, parts=parts))
parts = []
for i in range(n):
part_dir = os.path.join(self.parts_dir, "{}{}".format(step, i))
state_dir = os.path.join(part_dir, "state")
parts.append({"part_dir": part_dir, "state_dir": state_dir})
return parts
class StoreCommandsBaseTestCase(CommandBaseTestCase):
def setUp(self):
super().setUp()
self.fake_store = fixture_setup.FakeStore()
self.useFixture(self.fake_store)
self.client = storeapi.StoreClient()
| gpl-3.0 | -9,074,133,153,076,883,000 | 29.260504 | 87 | 0.630103 | false |
Leopardob/Kistie | kcode/kcore/kmaya/kattrs/KstAttrs.py | 1 | 9982 | '''
K.I.S.T.I.E (Keep, It, Simple, Take, It, Easy)
Created on 1 Jan 2013
@author: Leonardo Bruni, leo.b2003@gmail.com
Kistie Attrs Class lib
This Kistie implementation i's part of project 'Kistie_Autorig' by Leonardo Bruni, leo.b2003@gmail.com
'''
import maya.cmds as cmds
# Import KstOut
import kcode.kcore.KstOut as _KstOut_
reload(_KstOut_)
KstOut = _KstOut_.KstOut()
class KstAttrs(object):
# Debug module name variable
_debug = 'KstAttrs'
def __init__(self):
KstOut.debug(KstAttrs._debug, 'Kistie Maya Attrs function module loaded...')
# Lock attr function
def lock_attr(self, obj_name, attr_name):
'''
Desc:
Lock maya attr
Parameter:
obj_name = object name that contains the attr
attr_name = attr name
Return value
'''
cmds.setAttr(obj_name+'.'+attr_name, l=True, k=False)
# Unlock attr function
def unlock_attr(self, obj_name, attr_name):
'''
Desc:
Unlock maya attr
Parameter:
obj_name = object name that contains the attr
attr_name = attr name
Return value
'''
cmds.setAttr(obj_name+'.'+attr_name, l=False, k=True)
# Set function for maya attributes
def set_attr(self, obj_name, attr_name, attr_value):
'''
Desc:
Set maya attribute
Parameter:
obj_name = object name that contains the attr
attr_name = attr name
attr_value = attr value to set
Return value
'''
cmds.setAttr(obj_name+'.'+attr_name, attr_value)
return attr_value
# Get function for maya attributes
def get_attr(self, obj_name, attr_name):
'''
Desc:
Get maya attribute
Parameter:
obj_name = object name that contains the attr
attr_name = attr name
Return value
'''
attr_value = cmds.getAttr(obj_name+'.'+attr_name)
return attr_value
@staticmethod
def create_float_attr(obj, attr_name, default_value=0, limit_min=False, limit_max=False, min=0, max=1):
'''
Desc:
Make float attr
Parameter:
obj = object to attach attr
attr_name = attr name
default_value = attr default value
limit_min = attr min value
limit_max = attr max value
min = min value
max = max value
Return string
obj.attrname
'''
cmds.addAttr(obj, shortName=attr_name, longName=attr_name, dv=default_value, attributeType='float', min=min, max=max)
cmds.setAttr(obj+'.'+attr_name, e=True, keyable=True)
return obj+'.'+attr_name
@staticmethod
def create_double_attr(obj, attr_name, default_value=0, limit_min=False, limit_max=False, min=0, max=1):
'''
Desc:
Make double attr
Parameter:
obj = object to attach attr
attr_name = attr name
default_value = attr default value
limit_min = attr min value
limit_max = attr max value
min = min value
max = max value
Return string
obj.attrname
'''
cmds.addAttr(obj, shortName=attr_name, longName=attr_name, dv=default_value, attributeType='double')
cmds.setAttr(obj+'.'+attr_name, e=True, keyable=True)
return obj+'.'+attr_name
@staticmethod
def create_vector_attr(obj, attr_name, default_value=[0,0,0]):
'''
Desc:
Make vector attr
Parameter:
obj = object to attach attr
attr_name = attr name
default_value = attr default vector
Return string
obj.attrname
'''
cmds.addAttr(obj, shortName=attr_name, longName=attr_name, attributeType='double3')
cmds.addAttr(obj, shortName=attr_name+'X', longName=attr_name+'X', attributeType='double', p=attr_name)
cmds.addAttr(obj, shortName=attr_name+'Y', longName=attr_name+'Y', attributeType='double', p=attr_name)
cmds.addAttr(obj, shortName=attr_name+'Z', longName=attr_name+'Z', attributeType='double', p=attr_name)
cmds.setAttr(obj+'.'+attr_name, e=True, keyable=True)
cmds.setAttr(obj+'.'+attr_name+'X', e=True, keyable=True)
cmds.setAttr(obj+'.'+attr_name+'Y', e=True, keyable=True)
cmds.setAttr(obj+'.'+attr_name+'Z', e=True, keyable=True)
return obj+'.'+attr_name
@staticmethod
def create_bool_attr(obj, attr_name, value=False):
'''
Desc:
Make bool attr
Parameter:
obj = object to attach attr
attr_name = attr name
default_value = attr default bool
Return string
obj.attrname
'''
cmds.addAttr(obj, shortName=attr_name, longName=attr_name, attributeType='bool')
cmds.setAttr(obj+'.'+attr_name, e=True, keyable=True)
cmds.setAttr(obj+'.'+attr_name, value)
return obj+'.'+attr_name
@staticmethod
def create_string_attr(obj, attr_name, str):
'''
Desc:
Make string attr
Parameter:
obj = object to attach attr
attr_name = attr name
str = string value
Return string
obj.attrname
'''
#print('current_obj: ', obj)
#print('attr_name: ', attr_name)
#print('str', str)
# Check if current attribute exists, if not, will add
if not cmds.attributeQuery(attr_name, node=obj, exists = True):
cmds.addAttr(obj, shortName=attr_name, longName=attr_name, dt='string')
cmds.setAttr(obj+'.'+attr_name, e=True, keyable=True)
cmds.setAttr(obj+'.'+attr_name, str, type='string')
else:
KstOut.debug(KstAttrs._debug, 'Attribute %s already exists on node %s, skipped' % (attr_name, obj))
return obj+'.'+attr_name
@staticmethod
def create_enum_attr(obj, attr_name, enum_list):
'''
Desc:
Make enum attr
Parameter:
obj = object to attach attr
attr_name = attr name
enum_list = enum value list
Return string
obj.attrname
'''
cmds.addAttr(obj, shortName=attr_name, longName=attr_name, attributeType='enum', en=enum_list)
cmds.setAttr(obj+'.'+attr_name, e=True, keyable=True)
return obj+'.'+attr_name
@staticmethod
def create_matrix_attr(obj, attr_name, matrix):
'''
Desc:
Make matrix attr
Parameter:
obj = object to attach attr
attr_name = attr name
matrix = matrix
Return matrix
obj.attrname
'''
KstOut.debug(KstAttrs._debug, 'Matrix attr, not implemented yet!')
pass
@staticmethod
def create_separator_attr(obj, attr_name, enum_list='_'*16+':'):
'''
Desc:
Make separator attr
Parameter:
obj = object to attach attr
attr_name = attr name
enum_list = enum value list
Return string
obj.attrname
'''
cmds.addAttr(obj, shortName=attr_name, longName=attr_name, attributeType='enum', en=enum_list)
cmds.setAttr(obj+'.'+attr_name, e=True, keyable=True, lock=True)
return obj+'.'+attr_name
@staticmethod
def read_message_attr(obj_attr_name, *args):
'''
Desc:
Read a message attr
Parameter:
obj = object that contain message attr
attr_name = attr name
args = other inputs
Return string
obj.attrname
'''
# Object
obj = str(obj_attr_name).split('.')[0]
# Attr name
attr_name = str(obj_attr_name).split('.')[1]
# Connections
connections = cmds.listConnections(obj+'.'+attr_name, s=1)
return connections[0]
def create_tag_attr(self, obj, tag_name, tag_value):
'''
Desc:
Create a tag for selected object
Parameter:
obj = object that contain tag
tag = tag name
value = tag value
Return:
obj.tag_name
'''
# Check if obj is valid
if (obj):
if not cmds.attributeQuery(tag_name, node=obj, exists = True):
cmds.addAttr(obj, shortName=tag_name, longName=tag_name, dt='string')
cmds.setAttr(obj+'.'+tag_name, e=True, keyable=False)
cmds.setAttr(obj+'.'+tag_name, tag_value, type='string')
KstAttrs.lock_attr(self, obj, tag_name)
else:
pass
#print('Attribute %s already exists on node %s, skipped' % (tag_name, obj))
return obj+'.'+tag_name
def __get__(self, instance, owner):
'''
:param instance:
:param owner:
:return:
'''
return self.getValue(instance)
def __set__(self, instance, value):
'''
:param instance:
:param value:
:return:
'''
if not self.checkDataType(value):
return
self.setValue(instance, value)
def setValue(self, instance, value):
'''
:param instance:
:return:
'''
raise NotImplementedError()
def getValue(self, instance):
'''
:param value:
:return:
'''
raise NotImplementedError()
def checkDataType(self, value):
'''
:param value:
:return:
'''
if type(self.data_type).__name__ != 'list':
if type(value).__name__ != self.data_type:
raise ValueError("Attribute : expected {x} got {y})".format(x=self.data_type, y=type(value).__name__))
else:
return True
else:
if type(value).__name__ not in self.data_type:
raise ValueError("Attribute : expected {x} got {y}".format(
x=self.data_type, y=type(value).__name__))
else:
return 1 | bsd-3-clause | -6,074,279,414,227,202,000 | 27.121127 | 125 | 0.560108 | false |
evenmarbles/mlpy | mlpy/knowledgerep/cbr/similarity.py | 1 | 17713 | from __future__ import division, print_function, absolute_import
import math
import numpy as np
from abc import ABCMeta, abstractmethod
from sklearn.neighbors import NearestNeighbors
from sklearn.cluster import KMeans
from sklearn.metrics.pairwise import cosine_similarity
from sklearn.neighbors.dist_metrics import METRIC_MAPPING
class Stat(object):
"""The similarity statistics container.
The similarity statistics is a container to pass the
calculated measure of similarity between the case
identified by the case id and the query case between
functions.
Parameters
----------
case_id : int
The case's id.
similarity : float
The similarity measure.
"""
__slots__ = ('_case_id', '_similarity')
@property
def case_id(self):
"""The case's id.
Returns
-------
int :
The case's id
"""
return self._case_id
@property
def similarity(self):
"""The similarity measure.
Returns
-------
float :
The similarity measure.
"""
return self._similarity
def __init__(self, case_id, similarity=None):
self._case_id = case_id
self._similarity = similarity
class SimilarityFactory(object):
"""The similarity factory.
An instance of a similarity model can be created by passing
the similarity model type.
Examples
--------
>>> from mlpy.knowledgerep.cbr.similarity import SimilarityFactory
>>> SimilarityFactory.create('float', **{})
"""
@staticmethod
def create(_type, **kwargs):
"""
Create a feature of the given type.
Parameters
----------
_type : str
The feature type. Valid feature types are:
knn
A k-nearest-neighbor algorithm is used to determine similarity
between cases (:class:`NeighborSimilarity`). The value
``n_neighbors`` must be specified.
radius-n
Similarity between cases is determined by the nearest neighbors
within a radius (:class:`NeighborSimilarity`). The value ``radius``
must be specified.
kmeans
Similarity is determined by a KMeans clustering algorithm
(:class:`KMeansSimilarity`). The value ``n_clusters`` must be specified.
exact-match
Only exact matches are considered similar (:class:`ExactMatchSimilarity`).
cosine
A cosine similarity measure is used to determine similarity between
cases (:class:`CosineSimilarity`).
kwargs : dict, optional
Non-positional arguments to pass to the class of the given type
for initialization.
Returns
-------
ISimilarity :
A similarity instance of the given type.
"""
try:
if _type == "knn":
kwargs["n_neighbors"] = kwargs["method_params"]
elif _type == "radius-n":
kwargs["radius"] = kwargs["method_params"]
elif _type == "kmeans":
kwargs["n_cluster"] = kwargs["method_params"]
elif _type == "cosine":
kwargs["threshold"] = kwargs["method_params"]
del kwargs["method_params"]
return {
"knn": NeighborSimilarity,
"radius-n": NeighborSimilarity,
"kmeans": KMeansSimilarity,
"exact-match": ExactMatchSimilarity,
"cosine": CosineSimilarity,
}[_type](**kwargs)
except KeyError:
return None
class ISimilarity(object):
"""The similarity model interface.
The similarity model keeps an internal indexing structure of
the relevant case data to efficiently computing the similarity
measure between data points.
Notes
-----
All similarity models must inherit from this class.
"""
__metaclass__ = ABCMeta
def __init__(self):
#: The indexing structure
self._indexing_structure = None
#: The mapping of the data points to their case ids
self._id_map = None
""":ivar: dict"""
@abstractmethod
def build_indexing_structure(self, data, id_map):
"""Build the indexing structure.
Parameters
----------
data : ndarray[ndarray[float]]
The raw data points to be indexed.
id_map : dict[int, int]
The mapping from the data points to their case ids.
Raises
------
NotImplementedError
If the child class does not implement this function.
"""
raise NotImplementedError
@abstractmethod
def compute_similarity(self, data_point):
"""Computes the similarity.
Computes the similarity between the data point and the data in
the indexing structure returning the results in a collection of
similarity statistics (:class:`Stat`).
Parameters
----------
data_point : list[float]
The raw data point to compare against the data points stored in the
indexing structure.
Returns
-------
list[Stat] :
A collection of similarity statistics.
Raises
------
NotImplementedError
If the child class does not implement this function.
"""
raise NotImplementedError
class NeighborSimilarity(ISimilarity):
"""The neighborhood similarity model.
The neighbor similarity model determines similarity between the data
in the indexing structure and the query data by using the nearest
neighbor algorithm :class:`sklearn.neighbors.NearestNeighbors`.
Both a k-neighbors classifier and a radius-neighbor-classifier are implemented.
To choose between the classifiers either `n_neighbors` or `radius` must be
specified.
Parameters
----------
n_neighbors : int
The number of data points considered to be closest neighbors.
radius : int
The radius around the query data point, within which the data points
are considered closest neighbors.
algorithm : str
The internal indexing structure of the training data. Defaults to
`kd-tree`.
metric : str
The metric used to compute the distances between pairs of points.
Refer to :class:`sklearn.neighbors.DistanceMetric` for valid
identifiers. Default is `euclidean`.
metric_params : dict
Parameters relevant to the specified metric.
Raises
------
UserWarning :
If the either both or none of `n_neighbors` and `radius` are given.
See Also
--------
:class:`sklearn.neighbors.KNeighborsClassifier`, :class:`sklearn.neighbors.RadiusNeighborsClassifier`
"""
def __init__(self, n_neighbors=None, radius=None, algorithm=None, metric=None, metric_params=None):
super(NeighborSimilarity, self).__init__()
if (n_neighbors is not None and radius is not None) or not (n_neighbors is None or radius is None):
raise UserWarning("Exactly one of n_neighbors or radius must be initialized.")
self._n_neighbors = n_neighbors
self._radius = radius
if algorithm is not None:
if algorithm not in ["ball_tree", "kd_tree", "brute", "auto"]:
raise ValueError("%s is not a valid retrieval algorithm" % algorithm)
self._algorithm = algorithm
else:
self._algorithm = "kd_tree"
if metric is not None:
if metric not in METRIC_MAPPING:
raise ValueError("%s is not a valid retrieval metric" % metric)
self._metric = metric
else:
self._metric = "euclidean"
self._metric_params = metric_params if metric_params is not None else 2
def build_indexing_structure(self, data, id_map):
"""Build the indexing structure.
Build the indexing structure by fitting the data according to the
specified algorithm.
Parameters
----------
data : ndarray[ndarray[float]]
The raw data points to be indexed.
id_map : dict[int, int]
The mapping from the data points to their case ids.
"""
self._id_map = id_map
if self._n_neighbors is not None:
self._indexing_structure = NearestNeighbors(n_neighbors=self._n_neighbors, algorithm=self._algorithm,
metric=self._metric, p=self._metric_params).fit(data)
else:
self._indexing_structure = NearestNeighbors(radius=self._radius, algorithm=self._algorithm,
metric=self._metric, p=self._metric_params).fit(data)
def compute_similarity(self, data_point):
"""Computes the similarity.
Computes the similarity between the data point and the data in
the indexing structure using the :class:`sklearn.neighbors.NearestNeighbors`
algorithm. The results are returned in a collection of similarity statistics
(:class:`Stat`).
Parameters
----------
data_point : list[float]
The raw data point to compare against the data points stored in the
indexing structure.
Returns
-------
list[Stat] :
A collection of similarity statistics.
"""
if self._n_neighbors is not None:
# noinspection PyProtectedMember
raw_data = self._indexing_structure._fit_X
if len(raw_data) < self._n_neighbors:
result = []
for i, feat in enumerate(raw_data):
dist = np.linalg.norm(np.asarray(data_point) - np.asarray(feat))
result.append(Stat(self._id_map[i], dist))
# noinspection PyShadowingNames
result = sorted(result, key=lambda x: x.similarity)
else:
d, key_lists = self._indexing_structure.kneighbors(data_point)
result = [Stat(self._id_map[x], d[0][i]) for i, x in enumerate(key_lists[0])]
else:
d, key_lists = self._indexing_structure.radius_neighbors(data_point)
result = [Stat(self._id_map[x], d[0][i]) for i, x in enumerate(key_lists[0])]
return result
class KMeansSimilarity(ISimilarity):
"""The KMeans similarity model.
The KMeans similarity model determines similarity between the data in the
indexing structure and the query data by using the :class:`sklearn.cluster.KMeans`
algorithm.
Parameters
----------
n_cluster : int
The number of clusters to fit the raw data in.
"""
def __init__(self, n_cluster=None):
super(KMeansSimilarity, self).__init__()
self._n_cluster = n_cluster if n_cluster is None else 8
def build_indexing_structure(self, data, id_map):
"""Build the indexing structure.
Build the indexing structure by fitting the data into `n_cluster`
clusters.
Parameters
----------
data : ndarray[ndarray[float]]
The raw data points to be indexed.
id_map : dict[int, int]
The mapping from the data points to their case ids.
"""
self._id_map = id_map
self._indexing_structure = KMeans(init='k-means++', n_clusters=self._n_cluster, n_init=10).fit(data)
def compute_similarity(self, data_point):
"""Computes the similarity.
Computes the similarity between the data point and the data in
the indexing structure using the :class:`sklearn.cluster.KMeans`
clustering algorithm. The results are returned in a collection
of similarity statistics (:class:`Stat`).
Parameters
----------
data_point : list[float]
The raw data point to compare against the data points stored in the
indexing structure.
Returns
-------
list[Stat] :
A collection of similarity statistics.
"""
label = self._indexing_structure.predict(data_point)
result = []
try:
# noinspection PyTypeChecker,PyUnresolvedReferences
key_lists = np.nonzero(self._indexing_structure.labels_ == label[0])[0]
result = [Stat(self._id_map[x]) for x in key_lists]
except IndexError:
pass
return result
class ExactMatchSimilarity(ISimilarity):
"""The exact match similarity model.
The exact match similarity model considered only exact matches between
the data in the indexing structure and the query data as similar.
"""
# noinspection PyUnusedLocal
def __init__(self, **kwargs):
super(ExactMatchSimilarity, self).__init__()
def build_indexing_structure(self, data, id_map):
"""Build the indexing structure.
To determine exact matches a brute-force algorithm is used thus
the data remains as is and no special indexing structure is
implemented.
Parameters
----------
data : ndarray[ndarray[float]]
The raw data points to be indexed.
id_map : dict[int, int]
The mapping from the data points to their case ids.
.. todo::
It might be worth looking into a more efficient way of determining
exact matches.
"""
self._id_map = id_map
self._indexing_structure = data
def compute_similarity(self, data_point):
"""Computes the similarity.
Computes the similarity between the data point and the data in
the indexing structure identifying exact matches. The results are
returned in a collection of similarity statistics (:class:`Stat`).
Parameters
----------
data_point : list[float]
The raw data point to compare against the data points stored in the
indexing structure.
Returns
-------
list[Stat] :
A collection of similarity statistics.
"""
result = []
for i, feat in enumerate(self._indexing_structure):
total = 0
for j, val in enumerate(data_point):
total += math.pow(val - feat[j], 2)
if total == 0.0:
result.append(Stat(self._id_map[i]))
return result
class CosineSimilarity(ISimilarity):
"""The cosine similarity model.
Cosine similarity is a measure of similarity between two vectors of an inner
product space that measures the cosine of the angle between them. The cosine
of 0 degree is 1, and it is less than 1 for any other angle. It is thus a
judgement of orientation and not magnitude: tow vectors with the same
orientation have a cosine similarity of 1, two vectors at 90 degrees have a
similarity of 0, and two vectors diametrically opposed have a similarity of -1,
independent of their magnitude [1]_.
The cosine model employs the
`cosine_similarity <http://scikit-learn.org/stable/modules/metrics.html#cosine-similarity>`_
function from the :mod:`sklearn.metrics.pairwise` module to determine similarity.
.. seealso::
`Machine Learning::Cosine Similarity for Vector Space Models (Part III)
<http://blog.christianperone.com/?p=2497>`_
References
----------
.. [1] `Wikipidia::cosine_similarity <https://en.wikipedia.org/wiki/Cosine_similarity>`_
"""
# noinspection PyUnusedLocal
def __init__(self, **kwargs):
super(CosineSimilarity, self).__init__()
def build_indexing_structure(self, data, id_map):
"""Build the indexing structure.
The cosine_similarity function from :mod:`sklearn.metrics.pairwise` takes
the raw data as input. Thus the data remains as is and no special indexing
structure is implemented.
Parameters
----------
data : ndarray[ndarray[float]]
The raw data points to be indexed.
id_map : dict[int, int]
The mapping from the data points to their case ids.
"""
self._id_map = id_map
self._indexing_structure = data
def compute_similarity(self, data_point):
"""Computes the similarity.
Computes the similarity between the data point and the data in
the indexing structure using the function :func:`cosine_similarity` from
:mod:`sklearn.metrics.pairwise`.
The resulting similarity ranges from -1 meaning exactly opposite, to 1
meaning exactly the same, with 0 indicating orthogonality (decorrelation),
and in-between values indicating intermediate similarity or dissimilarity.
The results are returned in a collection of similarity statistics (:class:`Stat`).
Parameters
----------
data_point : list[float]
The raw data point to compare against the data points stored in the
indexing structure.
Returns
-------
list[Stat] :
A collection of similarity statistics.
"""
similarity = cosine_similarity(data_point, self._indexing_structure)
if not np.any(data_point):
similarity = np.array([[float(np.array_equal(data_point, m)) for m in np.array(self._indexing_structure)]])
return [Stat(self._id_map[i], x) for i, x in enumerate(similarity[0])]
| mit | -1,097,991,111,061,715,100 | 31.985102 | 119 | 0.603342 | false |
keras-team/autokeras | autokeras/preprocessors/encoders.py | 1 | 3609 | # Copyright 2020 The AutoKeras Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import numpy as np
import tensorflow as tf
from autokeras.engine import preprocessor
class Encoder(preprocessor.TargetPreprocessor):
"""Transform labels to encodings.
# Arguments
labels: A list of labels of any type. The labels to be encoded.
"""
def __init__(self, labels, **kwargs):
super().__init__(**kwargs)
self.labels = [
label.decode("utf-8") if isinstance(label, bytes) else str(label)
for label in labels
]
def get_config(self):
return {"labels": self.labels}
def fit(self, dataset):
return
def transform(self, dataset):
"""Transform labels to integer encodings.
# Arguments
dataset: tf.data.Dataset. The dataset to be transformed.
# Returns
tf.data.Dataset. The transformed dataset.
"""
keys_tensor = tf.constant(self.labels)
vals_tensor = tf.constant(list(range(len(self.labels))))
table = tf.lookup.StaticHashTable(
tf.lookup.KeyValueTensorInitializer(keys_tensor, vals_tensor), -1
)
return dataset.map(lambda x: table.lookup(tf.reshape(x, [-1])))
class OneHotEncoder(Encoder):
def transform(self, dataset):
"""Transform labels to one-hot encodings.
# Arguments
dataset: tf.data.Dataset. The dataset to be transformed.
# Returns
tf.data.Dataset. The transformed dataset.
"""
dataset = super().transform(dataset)
eye = tf.eye(len(self.labels))
dataset = dataset.map(lambda x: tf.nn.embedding_lookup(eye, x))
return dataset
def postprocess(self, data):
"""Transform probabilities back to labels.
# Arguments
data: numpy.ndarray. The output probabilities of the classification head.
# Returns
numpy.ndarray. The original labels.
"""
return np.array(
list(
map(
lambda x: self.labels[x],
np.argmax(np.array(data), axis=1),
)
)
).reshape(-1, 1)
class LabelEncoder(Encoder):
"""Transform the labels to integer encodings."""
def transform(self, dataset):
"""Transform labels to integer encodings.
# Arguments
dataset: tf.data.Dataset. The dataset to be transformed.
# Returns
tf.data.Dataset. The transformed dataset.
"""
dataset = super().transform(dataset)
dataset = dataset.map(lambda x: tf.expand_dims(x, axis=-1))
return dataset
def postprocess(self, data):
"""Transform probabilities back to labels.
# Arguments
data: numpy.ndarray. The output probabilities of the classification head.
# Returns
numpy.ndarray. The original labels.
"""
return np.array(
list(map(lambda x: self.labels[int(round(x[0]))], np.array(data)))
).reshape(-1, 1)
| apache-2.0 | -3,504,195,265,757,688,300 | 29.075 | 85 | 0.612081 | false |