function
stringlengths 11
56k
| repo_name
stringlengths 5
60
| features
sequence |
---|---|---|
def template_flash(stitches, do_outies):
return "stitches: %s, outies: %s" % (stitches, do_outies), ["--flash", str(stitches), "--allow_outies", str(do_outies)] | knights-lab/shi7 | [
17,
7,
17,
9,
1471630281
] |
def __init__(self):
self.rebus_controller = None # type: Optional[RebusController] | openmotics/gateway | [
30,
12,
30,
27,
1481877206
] |
def send_config_change_event(msg, error=EventError.ErrorTypes.NO_ERROR, pubsub=INJECTED):
# type: (str, Dict[str, Any], PubSub) -> None
event = EsafeEvent(EsafeEvent.Types.CONFIG_CHANGE, {'type': 'apartment', 'msg': msg}, error=error)
pubsub.publish_esafe_event(PubSub.EsafeTopics.CONFIG, event) | openmotics/gateway | [
30,
12,
30,
27,
1481877206
] |
def load_apartment(apartment_id):
# type: (int) -> Optional[ApartmentDTO]
apartment_orm = Apartment.select().where(Apartment.id == apartment_id).first()
if apartment_orm is None:
return None
apartment_dto = ApartmentMapper.orm_to_dto(apartment_orm)
return apartment_dto | openmotics/gateway | [
30,
12,
30,
27,
1481877206
] |
def load_apartment_by_mailbox_id(mailbox_id):
# type: (int) -> Optional[ApartmentDTO]
apartment_orm = Apartment.select().where(Apartment.mailbox_rebus_id == mailbox_id).first()
if apartment_orm is None:
return None
apartment_dto = ApartmentMapper.orm_to_dto(apartment_orm)
return apartment_dto | openmotics/gateway | [
30,
12,
30,
27,
1481877206
] |
def load_apartment_by_doorbell_id(doorbell_id):
# type: (int) -> Optional[ApartmentDTO]
apartment_orm = Apartment.select().where(Apartment.doorbell_rebus_id == doorbell_id).first()
if apartment_orm is None:
return None
apartment_dto = ApartmentMapper.orm_to_dto(apartment_orm)
return apartment_dto | openmotics/gateway | [
30,
12,
30,
27,
1481877206
] |
def load_apartments():
# type: () -> List[ApartmentDTO]
apartments = []
for apartment_orm in Apartment.select():
apartment_dto = ApartmentMapper.orm_to_dto(apartment_orm)
apartments.append(apartment_dto)
return apartments | openmotics/gateway | [
30,
12,
30,
27,
1481877206
] |
def get_apartment_count():
# type: () -> int
return Apartment.select().count() | openmotics/gateway | [
30,
12,
30,
27,
1481877206
] |
def apartment_id_exists(apartment_id):
# type: (int) -> bool
apartments = ApartmentController.load_apartments()
ids = (x.id for x in apartments)
return apartment_id in ids | openmotics/gateway | [
30,
12,
30,
27,
1481877206
] |
def save_apartment(self, apartment_dto, send_event=True):
# type: (ApartmentDTO, bool) -> ApartmentDTO
self._check_rebus_ids(apartment_dto)
apartment_orm = ApartmentMapper.dto_to_orm(apartment_dto)
apartment_orm.save()
if send_event:
ApartmentController.send_config_change_event('save')
return ApartmentMapper.orm_to_dto(apartment_orm) | openmotics/gateway | [
30,
12,
30,
27,
1481877206
] |
def update_apartment(self, apartment_dto, send_event=True):
# type: (ApartmentDTO, bool) -> ApartmentDTO
self._check_rebus_ids(apartment_dto)
if 'id' not in apartment_dto.loaded_fields or apartment_dto.id is None:
raise RuntimeError('cannot update an apartment without the id being set')
try:
apartment_orm = Apartment.get_by_id(apartment_dto.id)
loaded_apartment_dto = ApartmentMapper.orm_to_dto(apartment_orm)
for field in apartment_dto.loaded_fields:
if field == 'id':
continue
if hasattr(apartment_dto, field):
setattr(loaded_apartment_dto, field, getattr(apartment_dto, field))
apartment_orm = ApartmentMapper.dto_to_orm(loaded_apartment_dto)
apartment_orm.save()
if send_event:
ApartmentController.send_config_change_event('update')
return ApartmentMapper.orm_to_dto(apartment_orm)
except Exception as e:
raise RuntimeError('Could not update the user: {}'.format(e)) | openmotics/gateway | [
30,
12,
30,
27,
1481877206
] |
def wrapper%(signature)s:
with ldap3mock:
return func%(funcargs)s | privacyidea/privacyidea | [
1321,
287,
1321,
217,
1401806822
] |
def _convert_objectGUID(item):
item = uuid.UUID("{{{0!s}}}".format(item)).bytes_le
item = escape_bytes(item)
return item | privacyidea/privacyidea | [
1321,
287,
1321,
217,
1401806822
] |
def __init__(self):
self._calls = [] | privacyidea/privacyidea | [
1321,
287,
1321,
217,
1401806822
] |
def __len__(self):
return len(self._calls) | privacyidea/privacyidea | [
1321,
287,
1321,
217,
1401806822
] |
def setdata(self, request, response):
self._calls.append(Call(request, response)) | privacyidea/privacyidea | [
1321,
287,
1321,
217,
1401806822
] |
def __init__(self, connection):
self.connection = connection | privacyidea/privacyidea | [
1321,
287,
1321,
217,
1401806822
] |
def __init__(self, connection):
self.standard = self.Standard(connection) | privacyidea/privacyidea | [
1321,
287,
1321,
217,
1401806822
] |
def set_directory(self, directory):
self.directory = directory | privacyidea/privacyidea | [
1321,
287,
1321,
217,
1401806822
] |
def open(read_server_info=True):
return | privacyidea/privacyidea | [
1321,
287,
1321,
217,
1401806822
] |
def start_tls(self, read_server_info=True):
self.start_tls_called = True | privacyidea/privacyidea | [
1321,
287,
1321,
217,
1401806822
] |
def delete(self, dn, controls=None):
self.result = { 'dn' : '',
'referrals' : None,
'description' : 'success',
'result' : 0,
'message' : '',
'type' : 'addResponse'}
# Check to see if the user exists in the directory
try:
index = self._find_user(dn)
except StopIteration:
# If we get here the user doesn't exist so continue
self.result["description"] = "failure"
self.result["result"] = 32
self.result["message"] = "Error no such object: {0}".format(dn)
return False
# Delete the entry object for the user
self.directory.pop(index)
# Attempt to write changes to disk
with open(DIRECTORY, 'w+') as f:
f.write(str(self.directory))
return True | privacyidea/privacyidea | [
1321,
287,
1321,
217,
1401806822
] |
def _match_greater_than_or_equal(search_base, attribute, value, candidates):
matches = list()
for entry in candidates:
dn = entry.get("dn")
if not dn.endswith(search_base):
continue
value_from_directory = entry.get("attributes").get(attribute)
if str(value_from_directory) >= str(value):
entry["type"] = "searchResEntry"
matches.append(entry)
return matches | privacyidea/privacyidea | [
1321,
287,
1321,
217,
1401806822
] |
def _match_greater_than(search_base, attribute, value, candidates):
matches = list()
for entry in candidates:
dn = entry.get("dn")
if not dn.endswith(search_base):
continue
value_from_directory = entry.get("attributes").get(attribute)
if str(value_from_directory) > str(value):
entry["type"] = "searchResEntry"
matches.append(entry)
return matches | privacyidea/privacyidea | [
1321,
287,
1321,
217,
1401806822
] |
def _match_less_than_or_equal(search_base, attribute, value, candidates):
matches = list()
for entry in candidates:
dn = entry.get("dn")
if not dn.endswith(search_base):
continue
value_from_directory = entry.get("attributes").get(attribute)
if str(value_from_directory) <= str(value):
entry["type"] = "searchResEntry"
matches.append(entry)
return matches | privacyidea/privacyidea | [
1321,
287,
1321,
217,
1401806822
] |
def _match_less_than(search_base, attribute, value, candidates):
matches = list()
for entry in candidates:
dn = entry.get("dn")
if not dn.endswith(search_base):
continue
value_from_directory = entry.get("attributes").get(attribute)
if str(value_from_directory) < str(value):
entry["type"] = "searchResEntry"
matches.append(entry)
return matches | privacyidea/privacyidea | [
1321,
287,
1321,
217,
1401806822
] |
def _match_equal_to(search_base, attribute, value, candidates):
matches = list()
match_using_regex = False
if "*" in value:
match_using_regex = True
#regex = check_escape(value)
regex = value.replace('*', '.*')
regex = "^{0}$".format(regex)
for entry in candidates:
dn = to_unicode(entry.get("dn"))
if attribute not in entry.get("attributes") or not dn.endswith(search_base):
continue
values_from_directory = entry.get("attributes").get(attribute)
if isinstance(values_from_directory, list):
for item in values_from_directory:
if attribute == "objectGUID":
item = _convert_objectGUID(item)
if match_using_regex:
m = re.match(regex, str(item), re.I)
if m:
entry["type"] = "searchResEntry"
matches.append(entry)
else:
if item == value:
entry["type"] = "searchResEntry"
matches.append(entry)
else:
if attribute == "objectGUID":
values_from_directory = _convert_objectGUID(values_from_directory)
if match_using_regex:
m = re.match(regex, str(values_from_directory), re.I)
if m:
entry["type"] = "searchResEntry"
matches.append(entry)
else:
# The value, which we compare is unicode, so we convert
# the values_from_directory to unicode rather than str.
if isinstance(values_from_directory, bytes):
values_from_directory = values_from_directory.decode(
"utf-8")
elif type(values_from_directory) == int:
values_from_directory = u"{0!s}".format(values_from_directory)
if value == values_from_directory:
entry["type"] = "searchResEntry"
matches.append(entry)
return matches | privacyidea/privacyidea | [
1321,
287,
1321,
217,
1401806822
] |
def _match_notequal_to(search_base, attribute, value, candidates):
matches = list()
match_using_regex = False
if "*" in value:
match_using_regex = True
#regex = check_escape(value)
regex = value.replace('*', '.*')
regex = "^{0}$".format(regex)
for entry in candidates:
found = False
dn = entry.get("dn")
if not dn.endswith(search_base):
continue
values_from_directory = entry.get("attributes").get(attribute)
if isinstance(values_from_directory, list):
for item in values_from_directory:
if attribute == "objectGUID":
item = _convert_objectGUID(item)
if match_using_regex:
m = re.match(regex, str(item), re.I)
if m:
found = True
else:
if item == value:
found = True
if found is False:
entry["type"] = "searchResEntry"
matches.append(entry)
else:
if attribute == "objectGUID":
values_from_directory = _convert_objectGUID(values_from_directory)
if match_using_regex:
m = re.match(regex, str(values_from_directory), re.I)
if not m:
entry["type"] = "searchResEntry"
matches.append(entry)
else:
if str(value) != str(values_from_directory):
entry["type"] = "searchResEntry"
matches.append(entry)
return matches | privacyidea/privacyidea | [
1321,
287,
1321,
217,
1401806822
] |
def _parse_filter():
op = pyparsing.oneOf('! & |')
lpar = pyparsing.Literal('(').suppress()
rpar = pyparsing.Literal(')').suppress()
k = pyparsing.Word(pyparsing.alphanums)
# NOTE: We may need to expand on this list, but as this is not a real
# LDAP server we should be OK.
# Value to contain:
# numbers, upper/lower case letters, astrisk, at symbol, minus, full
# stop, backslash or a space
v = pyparsing.Word(pyparsing.alphanums + "-*@.\\ äöü")
rel = pyparsing.oneOf("= ~= >= <=")
expr = pyparsing.Forward()
atom = pyparsing.Group(lpar + op + expr + rpar) \
| pyparsing.Combine(lpar + k + rel + v + rpar)
expr << atom + pyparsing.ZeroOrMore( expr )
return expr | privacyidea/privacyidea | [
1321,
287,
1321,
217,
1401806822
] |
def _deDuplicate(results):
found = dict()
deDuped = list()
for entry in results:
dn = entry.get("dn")
if not dn in found:
found[dn] = 1
deDuped.append(entry)
return deDuped | privacyidea/privacyidea | [
1321,
287,
1321,
217,
1401806822
] |
def _search_not(self, base, search_filter, candidates=None):
# Create empty candidates list as we need to use self.directory for
# each search
candidates = list()
this_filter = list()
index = 0
search_filter.remove("!")
for condition in search_filter:
if not isinstance(condition, list):
this_filter.append(condition)
index +=1
# Remove this_filter items from search_filter list
for condition in this_filter:
search_filter.remove(condition)
try:
search_filter = list(search_filter[0])
for sub_filter in search_filter:
if not isinstance(sub_filter, list):
candidates = self.operation.get(sub_filter)(base,
search_filter,
candidates)
else:
candidates = self.operation.get(sub_filter[0])(base,
sub_filter,
candidates)
except IndexError:
pass
candidates = self._invert_results(candidates)
for item in this_filter:
if ">=" in item:
k, v = item.split(">=")
candidates = Connection._match_less_than(base, k, v,
self.directory)
elif "<=" in item:
k, v = item.split("<=")
candidates = Connection._match_greater_than(base, k, v,
self.directory)
# Emulate AD functionality, same as "="
elif "~=" in item:
k, v = item.split("~=")
candidates = Connection._match_notequal_to(base, k, v,
self.directory)
elif "=" in item:
k, v = item.split("=")
candidates = Connection._match_notequal_to(base, k, v,
self.directory)
return candidates | privacyidea/privacyidea | [
1321,
287,
1321,
217,
1401806822
] |
def _search_or(self, base, search_filter, candidates=None):
# Create empty candidates list as we need to use self.directory for
# each search
candidates = list()
this_filter = list()
index = 0
search_filter.remove("|")
for condition in search_filter:
if not isinstance(condition, list):
this_filter.append(condition)
index +=1
# Remove this_filter items from search_filter list
for condition in this_filter:
search_filter.remove(condition)
try:
search_filter = list(search_filter[0])
for sub_filter in search_filter:
if not isinstance(sub_filter, list):
candidates += self.operation.get(sub_filter)(base,
search_filter,
candidates)
else:
candidates += self.operation.get(sub_filter[0])(base,
sub_filter,
candidates)
except IndexError:
pass
for item in this_filter:
if ">=" in item:
k, v = item.split(">=")
candidates += Connection._match_greater_than_or_equal(base, k, v,
self.directory)
elif "<=" in item:
k, v = item.split("<=")
candidates += Connection._match_less_than_or_equal(base, k, v,
self.directory)
# Emulate AD functionality, same as "="
elif "~=" in item:
k, v = item.split("~=")
candidates += Connection._match_equal_to(base, k, v,
self.directory)
elif "=" in item:
k, v = item.split("=")
candidates += Connection._match_equal_to(base, k, v,
self.directory)
return candidates | privacyidea/privacyidea | [
1321,
287,
1321,
217,
1401806822
] |
def unbind(self):
return True | privacyidea/privacyidea | [
1321,
287,
1321,
217,
1401806822
] |
def __init__(self):
self._calls = CallList()
self._server_mock = None
self.directory = []
self.exception = None
self.reset() | privacyidea/privacyidea | [
1321,
287,
1321,
217,
1401806822
] |
def setLDAPDirectory(self, directory=None):
if directory is None:
self.directory = []
else:
try:
with open(DIRECTORY, 'w+') as f:
f.write(str(directory))
self.directory = directory
except OSError as e:
raise | privacyidea/privacyidea | [
1321,
287,
1321,
217,
1401806822
] |
def _load_data(self, directory):
try:
with open(directory, 'r') as f:
data = f.read()
return literal_eval(data)
except OSError as e:
raise | privacyidea/privacyidea | [
1321,
287,
1321,
217,
1401806822
] |
def calls(self):
return self._calls | privacyidea/privacyidea | [
1321,
287,
1321,
217,
1401806822
] |
def __exit__(self, *args):
self.stop()
self.reset() | privacyidea/privacyidea | [
1321,
287,
1321,
217,
1401806822
] |
def _on_Server(self, host, port, use_ssl, connect_timeout, get_info=None,
tls=None):
# mangle request packet
return "FakeServerObject" | privacyidea/privacyidea | [
1321,
287,
1321,
217,
1401806822
] |
def start(self):
import mock
def unbound_on_Server(host, port,
use_ssl,
connect_timeout, *a, **kwargs):
return self._on_Server(host, port,
use_ssl,
connect_timeout, *a, **kwargs)
self._server_mock = mock.MagicMock()
self._server_mock.side_effect = unbound_on_Server
self._patcher = mock.patch('ldap3.Server',
self._server_mock)
self._patcher.start()
def unbound_on_Connection(server, user,
password,
auto_bind,
client_strategy,
authentication,
check_names,
auto_referrals, *a, **kwargs):
return self._on_Connection(server, user,
password,
auto_bind,
client_strategy,
authentication,
check_names,
auto_referrals, *a,
**kwargs)
self._patcher2 = mock.patch('ldap3.Connection',
unbound_on_Connection)
self._patcher2.start() | privacyidea/privacyidea | [
1321,
287,
1321,
217,
1401806822
] |
def get_server_mock(self):
return self._server_mock | privacyidea/privacyidea | [
1321,
287,
1321,
217,
1401806822
] |
def _get_priorities(self):
"""
Load priorities from parameters.
:return: dict
"""
key = 'mail.sending.job.priorities'
try:
priorities = ast.literal_eval(
self.env['ir.config_parameter'].sudo().get_param(
key, default='{}'))
# Catch exception to have a understandable error message
except (ValueError, SyntaxError):
raise exceptions.UserError(
_("Error to load the system parameter (%s) "
"of priorities") % key)
# As literal_eval can transform str into any format, check if we
# have a real dict
if not isinstance(priorities, dict):
raise exceptions.UserError(
_("Error to load the system parameter (%s) of priorities.\n"
"Invalid dictionary") % key)
return priorities | mozaik-association/mozaik | [
28,
20,
28,
4,
1421746811
] |
def __init__(self, program, timeOfDay):
super().__init__()
self._program = program
self._timeOfDay = timeOfDay | s0riak/pi-led-control | [
3,
1,
3,
2,
1468740230
] |
def setThreadStopEvent(self, threadStopEvent):
self.threadStopEvent = threadStopEvent
self._program.setThreadStopEvent(threadStopEvent) | s0riak/pi-led-control | [
3,
1,
3,
2,
1468740230
] |
def getCurrentColor(self):
return self._program.getCurrentColor() | s0riak/pi-led-control | [
3,
1,
3,
2,
1468740230
] |
def endorse_user(request, recipient_username):
recipient = get_object_or_404(Profile, user__username=recipient_username)
if recipient == request.profile:
raise Http404()
try:
endorsement = Endorsement.objects.get(
endorser=request.profile, recipient=recipient)
except Endorsement.DoesNotExist:
endorsement = None
if request.method == 'POST':
if 'delete' in request.POST and endorsement:
endorsement.delete()
messages.info(request, MESSAGES['endorsement_deleted'])
return HttpResponseRedirect(
endorsement.recipient.get_absolute_url())
form = EndorseForm(request.POST, instance=endorsement,
endorser=request.profile, recipient=recipient)
if form.is_valid():
is_new = endorsement is None
endorsement = form.save()
if is_new:
send_endorsement_notification(endorsement)
messages.info(request, MESSAGES['endorsement_saved'])
return HttpResponseRedirect(endorsement.get_absolute_url())
else:
form = EndorseForm(instance=endorsement, endorser=request.profile,
recipient=recipient)
profile = recipient # For profile_base.html.
return locals() | rfugger/villagescc | [
14,
11,
14,
12,
1337640393
] |
def endorsement(request, endorsement_id):
endorsement = get_object_or_404(Endorsement, pk=endorsement_id)
return locals() | rfugger/villagescc | [
14,
11,
14,
12,
1337640393
] |
def relationships(request):
accounts = ripple.get_user_accounts(request.profile)
return locals() | rfugger/villagescc | [
14,
11,
14,
12,
1337640393
] |
def relationship(request, partner_username):
partner = get_object_or_404(Profile, user__username=partner_username)
if partner == request.profile:
raise Http404 # Can't have relationship with yourself.
account = request.profile.account(partner)
if account:
entries = account.entries
balance = account.balance
else:
entries = []
balance = 0
profile = partner # For profile_base.html.
return locals() | rfugger/villagescc | [
14,
11,
14,
12,
1337640393
] |
def acknowledge_user(request, recipient_username):
recipient = get_object_or_404(Profile, user__username=recipient_username)
if recipient == request.profile:
raise Http404
# TODO: Don't recompute max_amount on form submit? Cache, or put in form
# as hidden field?
max_amount = ripple.max_payment(request.profile, recipient)
if request.method == 'POST':
form = AcknowledgementForm(request.POST, max_ripple=max_amount)
if form.is_valid():
acknowledgement = form.send_acknowledgement(
request.profile, recipient)
send_acknowledgement_notification(acknowledgement)
messages.info(request, MESSAGES['acknowledgement_sent'])
return HttpResponseRedirect(acknowledgement.get_absolute_url())
else:
form = AcknowledgementForm(max_ripple=max_amount, initial=request.GET)
can_ripple = max_amount > 0
profile = recipient # For profile_base.html.
return locals() | rfugger/villagescc | [
14,
11,
14,
12,
1337640393
] |
def __init__(self):
# Load available backends
for entry in pkg_resources.iter_entry_points("gosa.object.backend"):
clazz = entry.load()
ObjectBackendRegistry.backends[clazz.__name__] = clazz() | gonicus/gosa | [
13,
3,
13,
1,
1350561752
] |
def uuid2dn(self, backend, uuid, from_db_only=False):
dn = ObjectBackendRegistry.backends[backend].uuid2dn(uuid)
if dn is None and from_db_only is True:
# fallback to db
if self.__index is None:
self.__index = PluginRegistry.getInstance("ObjectIndex")
res = self.__index.search({'uuid': uuid}, {'dn': 1})
if len(res) == 1:
dn = res[0]['dn']
return dn | gonicus/gosa | [
13,
3,
13,
1,
1350561752
] |
def getInstance():
if not ObjectBackendRegistry.instance:
ObjectBackendRegistry.instance = ObjectBackendRegistry()
return ObjectBackendRegistry.instance | gonicus/gosa | [
13,
3,
13,
1,
1350561752
] |
def main():
parser = argparse.ArgumentParser()
parser.add_argument('-i', '--input', type=str, action='store', dest='input', default=None, help="Input file")
args = parser.parse_args()
stats = dict()
if args.input is None:
print "Error: No input file"
with open(args.input) as in_file:
for line in in_file.readlines():
time = int(line.split()[0])
tx_bytes = int(line.split()[1])
stats[time] = tx_bytes
stats = sorted(stats.items())
start_time = stats[0][0]
prev_tx = stats[0][1]
no_traffic_flag = True
for time, tx_bytes in stats:
if no_traffic_flag:
if tx_bytes > (prev_tx+100000):
no_traffic_flag = False
start_time, prev_tx = time, tx_bytes
else:
print (time-start_time), (tx_bytes-prev_tx)
prev_tx = tx_bytes | merlin-lang/kulfi | [
7,
6,
7,
4,
1437662351
] |
def __init__(cls, name, bases, dct):
super(MetaTest, cls).__init__(name, bases, dct)
def gen_codes():
"""Generate the 702 possible input codes"""
# First, the 1-character codes
for c in string.ascii_lowercase:
yield c
# Next, the 2-characters-with-wildcard codes
for t in itertools.product(string.ascii_lowercase, repeat=2):
yield '*'.join(t)
def tester(code):
def func(cls):
return cls.run_test(code)
return func
# Generate the test_* methods
for code in gen_codes():
setattr(cls, "test_%s" % code.replace("*", ""), tester(code)) | Cangjians/pycangjie | [
5,
12,
5,
6,
1379847624
] |
def __init__(self, name):
super().__init__(name)
self.cli_cmd = ["/usr/bin/libcangjie_cli"] + self.cli_options
self.language = (cangjie.filters.BIG5 | cangjie.filters.HKSCS |
cangjie.filters.PUNCTUATION |
cangjie.filters.CHINESE |
cangjie.filters.ZHUYIN | cangjie.filters.KANJI |
cangjie.filters.KATAKANA |
cangjie.filters.HIRAGANA |
cangjie.filters.SYMBOLS) | Cangjians/pycangjie | [
5,
12,
5,
6,
1379847624
] |
def tearDown(self):
del self.cj | Cangjians/pycangjie | [
5,
12,
5,
6,
1379847624
] |
def __init__( self, parent, log ):
wx.Panel.__init__(self, parent, wx.ID_ANY)
self.log = log
self.callback = None
self.panel = panel = wx.Panel(self, wx.ID_ANY)
topsizer = wx.BoxSizer(wx.VERTICAL)
# Difference between using PropertyGridManager vs PropertyGrid is that
# the manager supports multiple pages and a description box.
self.pg = pg = wxpg.PropertyGrid(panel,
style=wxpg.PG_SPLITTER_AUTO_CENTER |
wxpg.PG_AUTO_SORT |
wxpg.PG_TOOLBAR)
# Show help as tooltips
pg.SetExtraStyle(wxpg.PG_EX_HELP_AS_TOOLTIPS)
pg.Bind( wxpg.EVT_PG_CHANGED, self.OnPropGridChange )
pg.Bind( wxpg.EVT_PG_PAGE_CHANGED, self.OnPropGridPageChange )
pg.Bind( wxpg.EVT_PG_SELECTED, self.OnPropGridSelect )
pg.Bind( wxpg.EVT_PG_RIGHT_CLICK, self.OnPropGridRightClick )
##pg.AddPage( "Page 1 - Testing All" )
# store the property grid for future reference
self.pg = pg | reingart/gui2py | [
69,
26,
69,
4,
1375235710
] |
def load_object(self, obj, callback=None):
pg = self.pg # get the property grid reference
self.callback = callback # store the update method | reingart/gui2py | [
69,
26,
69,
4,
1375235710
] |
def edit(self, name=""):
"Programatically select a (default) property to start editing it"
# for more info see DoSelectAndEdit in propgrid.cpp
for name in (name, "label", "value", "text", "title", "filename",
"name"):
prop = self.pg.GetPropertyByName(name)
if prop is not None:
break
self.Parent.SetFocus()
self.Parent.Raise()
self.pg.SetFocus()
# give time to the ui to show the prop grid and set focus:
wx.CallLater(250, self.select, prop.GetName()) | reingart/gui2py | [
69,
26,
69,
4,
1375235710
] |
def OnPropGridChange(self, event):
p = event.GetProperty()
if DEBUG: print "change!", p
if p:
name = p.GetName()
spec = p.GetPyClientData()
if spec and 'enum' in spec.type:
value = p.GetValueAsString()
else:
value = p.GetValue()
#self.log.write(u'%s changed to "%s"\n' % (p,p.GetValueAsString()))
# if it a property child (parent.child), extract its name
if "." in name:
name = name[name.rindex(".") + 1:]
if spec and not name in self.groups:
if name == 'font': # TODO: detect property type
# create a gui font from the wx.Font
font = Font()
font.set_wx_font(value)
value = font
# expressions must be evaluated to store the python object
if spec.type == "expr":
value = eval(value)
# re-create the wx_object with the new property value
# (this is required at least to apply new styles and init specs)
if DEBUG: print "changed", self.obj.name
kwargs = {str(name): value}
wx.CallAfter(self.obj.rebuild, **kwargs)
if name == 'name':
wx.CallAfter(self.callback, **dict(name=self.obj.name)) | reingart/gui2py | [
69,
26,
69,
4,
1375235710
] |
def OnDeleteProperty(self, event):
p = self.pg.GetSelectedProperty()
if p:
self.pg.DeleteProperty(p)
else:
wx.MessageBox("First select a property to delete") | reingart/gui2py | [
69,
26,
69,
4,
1375235710
] |
def OnPropGridRightClick(self, event):
p = event.GetProperty()
if p:
self.log.write(u'%s right clicked\n' % (event.GetProperty().GetName()))
else:
self.log.write(u'Nothing right clicked\n')
#self.obj.get_parent().Refresh() | reingart/gui2py | [
69,
26,
69,
4,
1375235710
] |
def copy_clfftdll_to_package():
import shutil
shutil.copy(
os.path.join(CLFFT_DIR, 'bin', 'clFFT.dll'),
'gpyfft')
shutil.copy(
os.path.join(CLFFT_DIR, 'bin', 'StatTimer.dll'),
'gpyfft')
print("copied clFFT.dll, StatTimer.dll") | geggo/gpyfft | [
53,
19,
53,
12,
1337859567
] |
def get_version():
main_ns = {}
version_path = convert_path('gpyfft/version.py')
with open(version_path) as version_file:
exec(version_file.read(), main_ns)
version = main_ns['__version__']
return version | geggo/gpyfft | [
53,
19,
53,
12,
1337859567
] |
def setup(self):
IfcStore.purge()
bpy.ops.wm.read_homefile(app_template="")
if bpy.data.objects:
bpy.data.batch_remove(bpy.data.objects)
bpy.ops.outliner.orphans_purge(do_local_ids=True, do_linked_ids=True, do_recursive=True)
blenderbim.bim.handler.setDefaultProperties(None) | IfcOpenShell/IfcOpenShell | [
1191,
546,
1191,
377,
1439197394
] |
def setup(self):
IfcStore.purge()
bpy.ops.wm.read_homefile(app_template="")
bpy.data.batch_remove(bpy.data.objects)
bpy.ops.outliner.orphans_purge(do_local_ids=True, do_linked_ids=True, do_recursive=True)
blenderbim.bim.handler.setDefaultProperties(None)
bpy.ops.bim.create_project() | IfcOpenShell/IfcOpenShell | [
1191,
546,
1191,
377,
1439197394
] |
def subfunction(self):
run(function(self)) | IfcOpenShell/IfcOpenShell | [
1191,
546,
1191,
377,
1439197394
] |
def scenario_debug(function):
def subfunction(self):
run_debug(function(self))
return subfunction | IfcOpenShell/IfcOpenShell | [
1191,
546,
1191,
377,
1439197394
] |
def i_add_a_cube():
bpy.ops.mesh.primitive_cube_add() | IfcOpenShell/IfcOpenShell | [
1191,
546,
1191,
377,
1439197394
] |
def the_object_name_is_selected(name):
i_deselect_all_objects()
additionally_the_object_name_is_selected(name) | IfcOpenShell/IfcOpenShell | [
1191,
546,
1191,
377,
1439197394
] |
def i_deselect_all_objects():
bpy.context.view_layer.objects.active = None
bpy.ops.object.select_all(action="DESELECT") | IfcOpenShell/IfcOpenShell | [
1191,
546,
1191,
377,
1439197394
] |
def i_set_prop_to_value(prop, value):
try:
eval(f"bpy.context.{prop}")
except:
assert False, "Property does not exist"
try:
exec(f'bpy.context.{prop} = "{value}"')
except:
exec(f"bpy.context.{prop} = {value}") | IfcOpenShell/IfcOpenShell | [
1191,
546,
1191,
377,
1439197394
] |
def i_enable_prop(prop):
exec(f"bpy.context.{prop} = True") | IfcOpenShell/IfcOpenShell | [
1191,
546,
1191,
377,
1439197394
] |
def i_rename_the_object_name1_to_name2(name1, name2):
the_object_name_exists(name1).name = name2 | IfcOpenShell/IfcOpenShell | [
1191,
546,
1191,
377,
1439197394
] |
def an_ifc_file_exists():
ifc = IfcStore.get_file()
if not ifc:
assert False, "No IFC file is available"
return ifc | IfcOpenShell/IfcOpenShell | [
1191,
546,
1191,
377,
1439197394
] |
def the_object_name_does_not_exist(name):
assert bpy.data.objects.get(name) is None, "Object exists" | IfcOpenShell/IfcOpenShell | [
1191,
546,
1191,
377,
1439197394
] |
def the_object_name_is_not_an_ifc_element(name):
id = the_object_name_exists(name).BIMObjectProperties.ifc_definition_id
assert id == 0, f"The ID is {id}" | IfcOpenShell/IfcOpenShell | [
1191,
546,
1191,
377,
1439197394
] |
def the_object_name_is_not_in_the_collection_collection(name, collection):
assert collection not in [c.name for c in the_object_name_exists(name).users_collection] | IfcOpenShell/IfcOpenShell | [
1191,
546,
1191,
377,
1439197394
] |
def the_collection_name1_is_in_the_collection_name2(name1, name2):
assert bpy.data.collections.get(name2).children.get(name1) | IfcOpenShell/IfcOpenShell | [
1191,
546,
1191,
377,
1439197394
] |
def the_object_name_is_placed_in_the_collection_collection(name, collection):
obj = the_object_name_exists(name)
[c.objects.unlink(obj) for c in obj.users_collection]
bpy.data.collections.get(collection).objects.link(obj) | IfcOpenShell/IfcOpenShell | [
1191,
546,
1191,
377,
1439197394
] |
def the_object_name_is_contained_in_container_name(name, container_name):
ifc = an_ifc_file_exists()
element = ifc.by_id(the_object_name_exists(name).BIMObjectProperties.ifc_definition_id)
container = ifcopenshell.util.element.get_container(element)
if not container:
assert False, f'Object "{name}" is not in any container'
assert container.Name == container_name, f'Object "{name}" is in {container}' | IfcOpenShell/IfcOpenShell | [
1191,
546,
1191,
377,
1439197394
] |
def i_delete_the_selected_objects():
bpy.ops.object.delete()
blenderbim.bim.handler.active_object_callback() | IfcOpenShell/IfcOpenShell | [
1191,
546,
1191,
377,
1439197394
] |
def the_file_name_should_contain_value(name, value):
with open(name, "r") as f:
assert value in f.read() | IfcOpenShell/IfcOpenShell | [
1191,
546,
1191,
377,
1439197394
] |
def the_object_name1_has_no_boolean_difference_by_name2(name1, name2):
obj = the_object_name_exists(name1)
for modifier in obj.modifiers:
if modifier.type == "BOOLEAN" and modifier.object and modifier.object.name == name2:
assert False, "A boolean was found" | IfcOpenShell/IfcOpenShell | [
1191,
546,
1191,
377,
1439197394
] |
def the_object_name_is_not_voided_by_void(name, void):
ifc = IfcStore.get_file()
element = ifc.by_id(the_object_name_exists(name).BIMObjectProperties.ifc_definition_id)
for rel in element.HasOpenings:
if rel.RelatedOpeningElement.Name == void:
assert False, "A void was found" | IfcOpenShell/IfcOpenShell | [
1191,
546,
1191,
377,
1439197394
] |
def the_object_name_is_not_a_void(name):
ifc = IfcStore.get_file()
element = ifc.by_id(the_object_name_exists(name).BIMObjectProperties.ifc_definition_id)
if any(element.VoidsElements):
assert False, "A void was found" | IfcOpenShell/IfcOpenShell | [
1191,
546,
1191,
377,
1439197394
] |
def the_void_name_is_not_filled_by_filling(name, filling):
ifc = IfcStore.get_file()
element = ifc.by_id(the_object_name_exists(name).BIMObjectProperties.ifc_definition_id)
if any(rel.RelatedBuildingElement.Name == filling for rel in element.HasFillings):
assert False, "A filling was found" | IfcOpenShell/IfcOpenShell | [
1191,
546,
1191,
377,
1439197394
] |
def the_object_name_should_display_as_mode(name, mode):
assert the_object_name_exists(name).display_type == mode | IfcOpenShell/IfcOpenShell | [
1191,
546,
1191,
377,
1439197394
] |
def the_object_name_is_at_location(name, location):
obj_location = the_object_name_exists(name).location
assert (
obj_location - Vector([float(co) for co in location.split(",")])
).length < 0.1, f"Object is at {obj_location}" | IfcOpenShell/IfcOpenShell | [
1191,
546,
1191,
377,
1439197394
] |
def run(scenario):
keywords = ["Given", "When", "Then", "And", "But"]
for line in scenario.split("\n"):
for key, value in variables.items():
line = line.replace("{" + key + "}", str(value))
for keyword in keywords:
line = line.replace(keyword, "")
line = line.strip()
if not line:
continue
match = None
for definition, callback in definitions.items():
match = re.search("^" + definition + "$", line)
if match:
try:
callback(*match.groups())
except AssertionError as e:
assert False, f"Failed: {line}, with error: {e}"
break
if not match:
assert False, f"Definition not implemented: {line}"
return True | IfcOpenShell/IfcOpenShell | [
1191,
546,
1191,
377,
1439197394
] |
def __init__(self, first_name, last_name):
self.first_name = first_name
self.last_name = last_name | amitsaha/learning | [
4,
4,
4,
20,
1413605035
] |
def __init__(self, name):
self.name = name | amitsaha/learning | [
4,
4,
4,
20,
1413605035
] |
def prefetch_rule(context: models.Context):
# Make sure that we have the IAM policy in cache.
project_ids = {c.project_id for c in gke.get_clusters(context).values()}
for pid in project_ids:
iam.get_project_policy(pid) | GoogleCloudPlatform/gcpdiag | [
195,
48,
195,
16,
1626709107
] |
def get_credentials(env=None) -> tuple:
path = None
if env is None:
env = os.environ.copy()
if 'GCE_CREDENTIALS' in env:
json_credentials = env['GCE_CREDENTIALS']
elif 'GOOGLE_APPLICATION_CREDENTIALS' in env:
path = env['GOOGLE_APPLICATION_CREDENTIALS']
json_credentials = util.read_file(path)
else:
raise util.LauncherError(
'MissingParameter', 'Either GCE_CREDENTIALS or GOOGLE_APPLICATION_CREDENTIALS must be set in env')
return json_credentials, path | dcos/dcos-launch | [
16,
25,
16,
17,
1499696883
] |
def __init__(self, config: dict, env=None):
creds_string, _ = get_credentials(env)
self.gcp_wrapper = gcp.GcpWrapper(json.loads(creds_string))
self.config = config | dcos/dcos-launch | [
16,
25,
16,
17,
1499696883
] |
def deployment(self):
""" Builds a BareClusterDeployment instance with self.config, but only returns it successfully if the
corresponding real deployment (active machines) exists and doesn't contain any errors.
"""
try:
deployment = gcp.BareClusterDeployment(self.gcp_wrapper, self.config['deployment_name'],
self.config['gce_zone'])
info = deployment.get_info()
errors = info['operation'].get('error')
if errors:
raise util.LauncherError('DeploymentContainsErrors', str(errors))
return deployment
except HttpError as e:
if e.resp.status == 404:
raise util.LauncherError('DeploymentNotFound',
"The deployment you are trying to access doesn't exist") from e
raise e | dcos/dcos-launch | [
16,
25,
16,
17,
1499696883
] |
def key_helper(self):
""" Generates a public key and a private key and stores them in the config. The public key will be applied to
all the instances in the deployment later on when wait() is called.
"""
if self.config['key_helper']:
private_key, public_key = util.generate_rsa_keypair()
self.config['ssh_private_key'] = private_key.decode()
self.config['ssh_public_key'] = public_key.decode() | dcos/dcos-launch | [
16,
25,
16,
17,
1499696883
] |
def get_bootstrap_host(self) -> Host:
return list(self.deployment.hosts)[0] | dcos/dcos-launch | [
16,
25,
16,
17,
1499696883
] |
def get_describe_filters(self):
return {
"Filters": [
{"Name": "tag:Name", "Values": [self.resource.name]},
{
"Name": "instance-state-name",
"Values": [
"pending",
"running",
"shutting-down",
" stopping",
"stopped",
],
},
]
} | yaybu/touchdown | [
11,
4,
11,
17,
1410353271
] |