function
stringlengths 11
56k
| repo_name
stringlengths 5
60
| features
sequence |
---|---|---|
def build_schema(self):
"""
Returns a dictionary of all the fields on the resource and some
properties about those fields. | colinsullivan/bingo-board | [
5,
1,
5,
6,
1293081853
] |
def dehydrate_resource_uri(self, bundle):
"""
For the automatically included ``resource_uri`` field, dehydrate
the URI for the given bundle. | colinsullivan/bingo-board | [
5,
1,
5,
6,
1293081853
] |
def generate_cache_key(self, *args, **kwargs):
"""
Creates a unique-enough cache key. | colinsullivan/bingo-board | [
5,
1,
5,
6,
1293081853
] |
def get_object_list(self, request):
"""
A hook to allow making returning the list of available objects. | colinsullivan/bingo-board | [
5,
1,
5,
6,
1293081853
] |
def apply_authorization_limits(self, request, object_list):
"""
Allows the ``Authorization`` class to further limit the object list.
Also a hook to customize per ``Resource``.
"""
if hasattr(self._meta.authorization, 'apply_limits'):
object_list = self._meta.authorization.apply_limits(request, object_list) | colinsullivan/bingo-board | [
5,
1,
5,
6,
1293081853
] |
def obj_get_list(self, request=None, **kwargs):
"""
Fetches the list of objects available on the resource. | colinsullivan/bingo-board | [
5,
1,
5,
6,
1293081853
] |
def cached_obj_get_list(self, request=None, **kwargs):
"""
A version of ``obj_get_list`` that uses the cache as a means to get
commonly-accessed data faster.
"""
cache_key = self.generate_cache_key('list', **kwargs)
obj_list = self._meta.cache.get(cache_key) | colinsullivan/bingo-board | [
5,
1,
5,
6,
1293081853
] |
def obj_get(self, request=None, **kwargs):
"""
Fetches an individual object on the resource. | colinsullivan/bingo-board | [
5,
1,
5,
6,
1293081853
] |
def cached_obj_get(self, request=None, **kwargs):
"""
A version of ``obj_get`` that uses the cache as a means to get
commonly-accessed data faster.
"""
cache_key = self.generate_cache_key('detail', **kwargs)
bundle = self._meta.cache.get(cache_key) | colinsullivan/bingo-board | [
5,
1,
5,
6,
1293081853
] |
def obj_create(self, bundle, request=None, **kwargs):
"""
Creates a new object based on the provided data. | colinsullivan/bingo-board | [
5,
1,
5,
6,
1293081853
] |
def obj_update(self, bundle, request=None, **kwargs):
"""
Updates an existing object (or creates a new object) based on the
provided data. | colinsullivan/bingo-board | [
5,
1,
5,
6,
1293081853
] |
def obj_delete_list(self, request=None, **kwargs):
"""
Deletes an entire list of objects. | colinsullivan/bingo-board | [
5,
1,
5,
6,
1293081853
] |
def obj_delete(self, request=None, **kwargs):
"""
Deletes a single object. | colinsullivan/bingo-board | [
5,
1,
5,
6,
1293081853
] |
def create_response(self, request, data):
"""
Extracts the common "which-format/serialize/return-response" cycle. | colinsullivan/bingo-board | [
5,
1,
5,
6,
1293081853
] |
def is_valid(self, bundle, request=None):
"""
Handles checking if the data provided by the user is valid. | colinsullivan/bingo-board | [
5,
1,
5,
6,
1293081853
] |
def rollback(self, bundles):
"""
Given the list of bundles, delete all objects pertaining to those
bundles. | colinsullivan/bingo-board | [
5,
1,
5,
6,
1293081853
] |
def get_list(self, request, **kwargs):
"""
Returns a serialized list of resources. | colinsullivan/bingo-board | [
5,
1,
5,
6,
1293081853
] |
def get_detail(self, request, **kwargs):
"""
Returns a single serialized resource. | colinsullivan/bingo-board | [
5,
1,
5,
6,
1293081853
] |
def put_list(self, request, **kwargs):
"""
Replaces a collection of resources with another collection. | colinsullivan/bingo-board | [
5,
1,
5,
6,
1293081853
] |
def put_detail(self, request, **kwargs):
"""
Either updates an existing resource or creates a new one with the
provided data. | colinsullivan/bingo-board | [
5,
1,
5,
6,
1293081853
] |
def post_list(self, request, **kwargs):
"""
Creates a new resource/object with the provided data. | colinsullivan/bingo-board | [
5,
1,
5,
6,
1293081853
] |
def post_detail(self, request, **kwargs):
"""
Creates a new subcollection of the resource under a resource. | colinsullivan/bingo-board | [
5,
1,
5,
6,
1293081853
] |
def delete_list(self, request, **kwargs):
"""
Destroys a collection of resources/objects. | colinsullivan/bingo-board | [
5,
1,
5,
6,
1293081853
] |
def delete_detail(self, request, **kwargs):
"""
Destroys a single resource/object. | colinsullivan/bingo-board | [
5,
1,
5,
6,
1293081853
] |
def get_schema(self, request, **kwargs):
"""
Returns a serialized form of the schema of the resource. | colinsullivan/bingo-board | [
5,
1,
5,
6,
1293081853
] |
def get_multiple(self, request, **kwargs):
"""
Returns a serialized list of resources based on the identifiers
from the URL. | colinsullivan/bingo-board | [
5,
1,
5,
6,
1293081853
] |
def __new__(cls, name, bases, attrs):
new_class = super(ModelDeclarativeMetaclass, cls).__new__(cls, name, bases, attrs)
fields = getattr(new_class._meta, 'fields', [])
excludes = getattr(new_class._meta, 'excludes', [])
field_names = new_class.base_fields.keys() | colinsullivan/bingo-board | [
5,
1,
5,
6,
1293081853
] |
def should_skip_field(cls, field):
"""
Given a Django model field, return if it should be included in the
contributed ApiFields.
"""
# Ignore certain fields (related fields).
if getattr(field, 'rel'):
return True | colinsullivan/bingo-board | [
5,
1,
5,
6,
1293081853
] |
def api_field_from_django_field(cls, f, default=CharField):
"""
Returns the field type that would likely be associated with each
Django type.
"""
result = default | colinsullivan/bingo-board | [
5,
1,
5,
6,
1293081853
] |
def get_fields(cls, fields=None, excludes=None):
"""
Given any explicit fields to include and fields to exclude, add
additional fields based on the associated model.
"""
final_fields = {}
fields = fields or []
excludes = excludes or [] | colinsullivan/bingo-board | [
5,
1,
5,
6,
1293081853
] |
def build_filters(self, filters=None):
"""
Given a dictionary of filters, create the necessary ORM-level filters. | colinsullivan/bingo-board | [
5,
1,
5,
6,
1293081853
] |
def apply_sorting(self, obj_list, options=None):
"""
Given a dictionary of options, apply some ORM-level sorting to the
provided ``QuerySet``. | colinsullivan/bingo-board | [
5,
1,
5,
6,
1293081853
] |
def get_object_list(self, request):
"""
An ORM-specific implementation of ``get_object_list``. | colinsullivan/bingo-board | [
5,
1,
5,
6,
1293081853
] |
def obj_get_list(self, request=None, **kwargs):
"""
A ORM-specific implementation of ``obj_get_list``. | colinsullivan/bingo-board | [
5,
1,
5,
6,
1293081853
] |
def obj_get(self, request=None, **kwargs):
"""
A ORM-specific implementation of ``obj_get``. | colinsullivan/bingo-board | [
5,
1,
5,
6,
1293081853
] |
def obj_create(self, bundle, request=None, **kwargs):
"""
A ORM-specific implementation of ``obj_create``.
"""
bundle.obj = self._meta.object_class() | colinsullivan/bingo-board | [
5,
1,
5,
6,
1293081853
] |
def obj_update(self, bundle, request=None, **kwargs):
"""
A ORM-specific implementation of ``obj_update``.
"""
if not bundle.obj or not bundle.obj.pk:
# Attempt to hydrate data from kwargs before doing a lookup for the object.
# This step is needed so certain values (like datetime) will pass model validation.
try:
bundle.obj = self.get_object_list(request).model()
bundle.data.update(kwargs)
bundle = self.full_hydrate(bundle)
lookup_kwargs = kwargs.copy()
lookup_kwargs.update(dict(
(k, getattr(bundle.obj, k))
for k in kwargs.keys()
if getattr(bundle.obj, k) is not None))
except:
# if there is trouble hydrating the data, fall back to just
# using kwargs by itself (usually it only contains a "pk" key
# and this will work fine.
lookup_kwargs = kwargs
try:
bundle.obj = self.get_object_list(request).get(**lookup_kwargs)
except ObjectDoesNotExist:
raise NotFound("A model instance matching the provided arguments could not be found.") | colinsullivan/bingo-board | [
5,
1,
5,
6,
1293081853
] |
def obj_delete_list(self, request=None, **kwargs):
"""
A ORM-specific implementation of ``obj_delete_list``. | colinsullivan/bingo-board | [
5,
1,
5,
6,
1293081853
] |
def obj_delete(self, request=None, **kwargs):
"""
A ORM-specific implementation of ``obj_delete``. | colinsullivan/bingo-board | [
5,
1,
5,
6,
1293081853
] |
def rollback(self, bundles):
"""
A ORM-specific implementation of ``rollback``. | colinsullivan/bingo-board | [
5,
1,
5,
6,
1293081853
] |
def save_m2m(self, bundle):
"""
Handles the saving of related M2M data. | colinsullivan/bingo-board | [
5,
1,
5,
6,
1293081853
] |
def get_resource_uri(self, bundle_or_obj):
"""
Handles generating a resource URI for a single resource. | colinsullivan/bingo-board | [
5,
1,
5,
6,
1293081853
] |
def _build_reverse_url(self, name, args=None, kwargs=None):
namespaced = "%s:%s" % (self._meta.urlconf_namespace, name)
return reverse(namespaced, args=args, kwargs=kwargs) | colinsullivan/bingo-board | [
5,
1,
5,
6,
1293081853
] |
def convert_post_to_put(request):
"""
Force Django to process the PUT.
"""
if request.method == "PUT":
if hasattr(request, '_post'):
del request._post
del request._files | colinsullivan/bingo-board | [
5,
1,
5,
6,
1293081853
] |
def launch(self):
logger = self.logger
logger.debug('getDmidecode: start')
res = {} | naparuba/kunai | [
37,
6,
37,
56,
1424895275
] |
def replace_getOp(line):
line2 = line
if 'MI->getOperand(0)' in line:
line2 = line.replace('MI->getOperand(0)', 'MCInst_getOperand(MI, 0)')
elif 'MI->getOperand(1)' in line:
line2 = line.replace('MI->getOperand(1)', 'MCInst_getOperand(MI, 1)')
elif 'MI->getOperand(2)' in line:
line2 = line.replace('MI->getOperand(2)', 'MCInst_getOperand(MI, 2)')
elif 'MI->getOperand(3)' in line:
line2 = line.replace('MI->getOperand(3)', 'MCInst_getOperand(MI, 3)')
elif 'MI->getOperand(4)' in line:
line2 = line.replace('MI->getOperand(4)', 'MCInst_getOperand(MI, 4)')
elif 'MI->getOperand(5)' in line:
line2 = line.replace('MI->getOperand(5)', 'MCInst_getOperand(MI, 5)')
elif 'MI->getOperand(6)' in line:
line2 = line.replace('MI->getOperand(6)', 'MCInst_getOperand(MI, 6)')
elif 'MI->getOperand(7)' in line:
line2 = line.replace('MI->getOperand(7)', 'MCInst_getOperand(MI, 7)')
elif 'MI->getOperand(8)' in line:
line2 = line.replace('MI->getOperand(8)', 'MCInst_getOperand(MI, 8)')
return line2 | capstone-rust/capstone-rs | [
169,
64,
169,
16,
1428200612
] |
def extract_paren(line, text):
i = line.index(text)
return line[line.index('(', i)+1 : line.index(')', i)] | capstone-rust/capstone-rs | [
169,
64,
169,
16,
1428200612
] |
def extract_brackets(line):
if '<' in line:
return line[line.index('<')+1 : line.index('>')]
else:
return '' | capstone-rust/capstone-rs | [
169,
64,
169,
16,
1428200612
] |
def del_brackets(line):
if '<' in line:
return line[:line.index('<')] + line[line.index('>') + 1:]
else:
return line | capstone-rust/capstone-rs | [
169,
64,
169,
16,
1428200612
] |
def connection_made(self, transport):
self.transport = transport | 1st1/uvloop | [
9209,
537,
9209,
69,
1446956094
] |
def data_received(self, data):
self.transport.write(data) | 1st1/uvloop | [
9209,
537,
9209,
69,
1446956094
] |
def connection_made(self, transport):
self.transport = transport
# Here the buffer is intended to be copied, so that the outgoing buffer
# won't be wrongly updated by next read
self.buffer = bytearray(256 * 1024) | 1st1/uvloop | [
9209,
537,
9209,
69,
1446956094
] |
def get_buffer(self, sizehint):
return self.buffer | 1st1/uvloop | [
9209,
537,
9209,
69,
1446956094
] |
def request_account():
"""Provides a form to request an account."""
return flask.render_template("request_account.html") | ASCIT/donut | [
5,
3,
5,
14,
1422746684
] |
def request_account_submit():
"""Handles an account creation request."""
uid = flask.request.form.get("uid", None)
last_name = flask.request.form.get("last_name", None)
if uid is None or last_name is None:
flask.flash("Invalid request.")
return flask.redirect(flask.url_for("account.request_account"))
success, error_msg = helpers.handle_request_account(uid, last_name)
if success:
flask.flash(
"An email has been sent with a link to create your account.")
return flask.redirect(flask.url_for("home"))
else:
flask.flash(error_msg)
return flask.redirect(flask.url_for("account.request_account")) | ASCIT/donut | [
5,
3,
5,
14,
1422746684
] |
def create_account(create_account_key):
"""Checks the key. If valid, displays the create account page."""
user_id = auth_utils.check_create_account_key(create_account_key)
if user_id is None:
flask.current_app.logger.warn(
f'Invalid create_account_key: {create_account_key}')
flask.flash("Invalid request. Please check your link and try again.")
return flask.redirect(flask.url_for("home"))
user_data = helpers.get_user_data(user_id)
if user_data is None:
flask.flash("An unexpected error occurred. Please contact DevTeam.")
return flask.redirect(flask.url_for("home"))
return flask.render_template(
"create_account.html", user_data=user_data, key=create_account_key) | ASCIT/donut | [
5,
3,
5,
14,
1422746684
] |
def reconnect(sheet, manager, **kwargs):
"""Performs reconnections (vertex merging / splitting) following Finegan et al. 2019
kwargs overwrite their corresponding `sheet.settings` entries
Keyword Arguments
-----------------
threshold_length : the threshold length at which vertex merging is performed
p_4 : the probability per unit time to perform a detachement from a rank 4 vertex
p_5p : the probability per unit time to perform a detachement from a rank 5 or more vertex
See Also
--------
**The tricellular vertex-specific adhesion molecule Sidekick
facilitates polarised cell intercalation during Drosophila axis
extension** _Tara M Finegan, Nathan Hervieux, Alexander
Nestor-Bergmann, Alexander G. Fletcher, Guy B Blanchard, Benedicte
Sanson_ bioRxiv 704932; doi: https://doi.org/10.1101/704932
"""
sheet.settings.update(kwargs)
nv = sheet.Nv
merge_vertices(sheet)
if nv != sheet.Nv:
logger.info(f"Merged {nv - sheet.Nv+1} vertices")
nv = sheet.Nv
retval = detach_vertices(sheet)
if retval:
logger.info("Failed to detach, skipping")
if nv != sheet.Nv:
logger.info(f"Detached {sheet.Nv - nv} vertices")
manager.append(reconnect, **kwargs) | CellModels/tyssue | [
50,
17,
50,
22,
1426782444
] |
def division(sheet, manager, **kwargs):
"""Cell division happens through cell growth up to a critical volume,
followed by actual division of the face.
Parameters
----------
sheet : a `Sheet` object
manager : an `EventManager` instance
face_id : int,
index of the mother face
growth_rate : float, default 0.1
rate of increase of the prefered volume
critical_vol : float, default 2.
volume at which the cells stops to grow and devides
"""
division_spec = default_division_spec
division_spec.update(**kwargs)
face = division_spec["face"]
division_spec["critical_vol"] *= sheet.specs["face"]["prefered_vol"]
print(sheet.face_df.loc[face, "vol"], division_spec["critical_vol"])
if sheet.face_df.loc[face, "vol"] < division_spec["critical_vol"]:
increase(
sheet, "face", face, division_spec["growth_rate"], "prefered_vol", True
)
manager.append(division, **division_spec)
else:
daughter = cell_division(sheet, face, division_spec["geom"])
sheet.face_df.loc[daughter, "id"] = sheet.face_df.id.max() + 1 | CellModels/tyssue | [
50,
17,
50,
22,
1426782444
] |
def contraction(sheet, manager, **kwargs):
"""Single step contraction event."""
contraction_spec = default_contraction_spec
contraction_spec.update(**kwargs)
face = contraction_spec["face"]
if (sheet.face_df.loc[face, "area"] < contraction_spec["critical_area"]) or (
sheet.face_df.loc[face, contraction_spec["contraction_column"]]
> contraction_spec["max_contractility"]
):
return
increase(
sheet,
"face",
face,
contraction_spec["contractile_increase"],
contraction_spec["contraction_column"],
contraction_spec["multiply"],
) | CellModels/tyssue | [
50,
17,
50,
22,
1426782444
] |
def type1_transition(sheet, manager, **kwargs):
"""Custom type 1 transition event that tests if
the the shorter edge of the face is smaller than
the critical length.
"""
type1_transition_spec = default_type1_transition_spec
type1_transition_spec.update(**kwargs)
face = type1_transition_spec["face"]
edges = sheet.edge_df[sheet.edge_df["face"] == face]
if min(edges["length"]) < type1_transition_spec["critical_length"]:
exchange(sheet, face, type1_transition_spec["geom"]) | CellModels/tyssue | [
50,
17,
50,
22,
1426782444
] |
def face_elimination(sheet, manager, **kwargs):
"""Removes the face with if face_id from the sheet."""
face_elimination_spec = default_face_elimination_spec
face_elimination_spec.update(**kwargs)
remove(sheet, face_elimination_spec["face"], face_elimination_spec["geom"]) | CellModels/tyssue | [
50,
17,
50,
22,
1426782444
] |
def check_tri_faces(sheet, manager, **kwargs):
"""Three neighbourghs cell elimination
Add all cells with three neighbourghs in the manager
to be eliminated at the next time step.
Parameters
----------
sheet : a :class:`tyssue.sheet` object
manager : a :class:`tyssue.events.EventManager` object
"""
check_tri_faces_spec = default_check_tri_face_spec
check_tri_faces_spec.update(**kwargs)
tri_faces = sheet.face_df[(sheet.face_df["num_sides"] < 4)].id
manager.extend(
[
(face_elimination, {"face_id": f, "geom": check_tri_faces_spec["geom"]})
for f in tri_faces
]
) | CellModels/tyssue | [
50,
17,
50,
22,
1426782444
] |
def __init__(self):
pass | ezScrum/ezScrum | [
31,
21,
31,
47,
1383034582
] |
def is_delete(self, tableName):
reservedTableNameList = ["mantis_user_table", "mantis_tokens_table", "mantis_config_table"]
isDeleteFlag = 1
for name in reservedTableNameList:
isIdentical = cmp(tableName, name)
if isIdentical == 0:
isDeleteFlag = 0
break
return isDeleteFlag | ezScrum/ezScrum | [
31,
21,
31,
47,
1383034582
] |
def Clean_Database(self, hostUrl, account, password, databaseName):
print 'clean database1'
db = MySQLdb.connect(host=hostUrl, user=account, passwd=password, db=databaseName)
cursor = db.cursor()
cursor.execute("Show Tables from " + databaseName)
result = cursor.fetchall()
for record in result:
tableName = record[0]
isDelete = self.is_delete(tableName)
if isDelete == 0:
print "Reserve " + tableName
else :
print "TRUNCATE TABLE `" + tableName + "`"
cursor.execute("TRUNCATE TABLE `" + tableName + "`")
print 'Add admin'
cursor.execute("INSERT INTO `account` VALUES (1, 'admin', 'admin', 'example@ezScrum.tw', '21232f297a57a5a743894a0e4a801fc3', 1, 1379910191599, 1379910191599)")
cursor.execute("INSERT INTO `system` VALUES (1, 1)")
db.commit() | ezScrum/ezScrum | [
31,
21,
31,
47,
1383034582
] |
def __init__(self, name="generate" ):
if name == "generate":
self.name=socket.gethostname()
else:
self.name=name
self.type="gc"
self.decoded="" | LibraryBox-Dev/LibraryBox-core | [
63,
35,
63,
5,
1374670088
] |
def get ( self ):
# TODO Split decoded part
message_parts = string.split ( self.decoded , ";" )
if message_parts[0] != "piratebox":
return None
b64_content_part = message_parts[4] | LibraryBox-Dev/LibraryBox-core | [
63,
35,
63,
5,
1374670088
] |
def get_sendername (self):
return self.name | LibraryBox-Dev/LibraryBox-core | [
63,
35,
63,
5,
1374670088
] |
def set_message ( self , decoded):
self.decoded = decoded | LibraryBox-Dev/LibraryBox-core | [
63,
35,
63,
5,
1374670088
] |
def title(s):
return re.sub(r'#+ ', '', s) | resamsel/dbmanagr | [
4,
1,
4,
8,
1380777396
] |
def depth(s):
return len(re.match(r'(#*)', s).group(0)) | resamsel/dbmanagr | [
4,
1,
4,
8,
1380777396
] |
def resolve_archive_name(repository, archive, storage_config, local_path='borg', remote_path=None):
'''
Given a local or remote repository path, an archive name, a storage config dict, a local Borg
path, and a remote Borg path, simply return the archive name. But if the archive name is
"latest", then instead introspect the repository for the latest successful (non-checkpoint)
archive, and return its name.
Raise ValueError if "latest" is given but there are no archives in the repository.
'''
if archive != "latest":
return archive
lock_wait = storage_config.get('lock_wait', None)
full_command = (
(local_path, 'list')
+ (('--info',) if logger.getEffectiveLevel() == logging.INFO else ())
+ (('--debug', '--show-rc') if logger.isEnabledFor(logging.DEBUG) else ())
+ make_flags('remote-path', remote_path)
+ make_flags('lock-wait', lock_wait)
+ make_flags('glob-archives', BORG_EXCLUDE_CHECKPOINTS_GLOB)
+ make_flags('last', 1)
+ ('--short', repository)
)
output = execute_command(full_command, output_log_level=None, borg_local_path=local_path)
try:
latest_archive = output.strip().splitlines()[-1]
except IndexError:
raise ValueError('No archives found in the repository')
logger.debug('{}: Latest archive is {}'.format(repository, latest_archive))
return latest_archive | witten/borgmatic | [
1335,
77,
1335,
1,
1416363878
] |
def __init__(self, field, **kwargs):
super().__init__(field)
self.args.update(kwargs)
self.output_key = 'hist_logscale'
self.action = 'histogram_logscale' | stadelmanma/netl-ap-map-flow | [
3,
3,
3,
12,
1453294533
] |
def _add_subparser(cls, subparsers, parent):
r"""
Adds a specific action based sub-parser to the supplied arg_parser
instance.
"""
parser = subparsers.add_parser(cls.__name__,
aliases=['histlog'],
parents=[parent],
help=cls.__doc__)
#
parser.add_argument('scale_fact', type=float, nargs='?', default=10.0,
help='base to generate logscale from')
parser.set_defaults(func=cls) | stadelmanma/netl-ap-map-flow | [
3,
3,
3,
12,
1453294533
] |
def main():
parser = argparse.ArgumentParser()
parser.add_argument("day", help="The day of the results, with format yyyymmdd")
args = parser.parse_args()
install_autopkgtest_results_formatter()
with tempfile.TemporaryDirectory(dir=os.environ.get("HOME")) as temp_dir:
clone_results_repo(temp_dir)
format_results(temp_dir, ACTIVE_DISTROS, args.day)
commit_and_push(temp_dir, args.day) | snapcore/snapcraft | [
1092,
435,
1092,
66,
1446139395
] |
def clone_results_repo(dest_dir):
subprocess.check_call(
["git", "clone", "https://github.com/elopio/autopkgtest-results.git", dest_dir]
) | snapcore/snapcraft | [
1092,
435,
1092,
66,
1446139395
] |
def commit_and_push(repo_dir, day):
subprocess.check_call(
["git", "config", "--global", "user.email", "u1test+m-o@canonical.com"]
)
subprocess.check_call(["git", "config", "--global", "user.name", "snappy-m-o"])
subprocess.check_call(["git", "-C", repo_dir, "add", "--all"])
subprocess.check_call(
[
"git",
"-C",
repo_dir,
"commit",
"--message",
"Add the results for {}".format(day),
]
)
subprocess.check_call(
[
"git",
"-C",
repo_dir,
"push",
"https://{GH_TOKEN}@github.com/elopio/autopkgtest-results.git".format(
GH_TOKEN=os.environ.get("GH_TOKEN_PPA_AUTOPKGTEST_RESULTS")
),
]
) | snapcore/snapcraft | [
1092,
435,
1092,
66,
1446139395
] |
def __init__(self, query_params, pretty=False, **kwargs):
self.pretty = pretty
self._query_params = query_params
self._fileName = None
self._lastModified = None
self._extra_args = kwargs | belokop/indico_bare | [
1,
1,
1,
5,
1465204236
] |
def register(cls, tag, serializer):
cls.registry[tag] = serializer | belokop/indico_bare | [
1,
1,
1,
5,
1465204236
] |
def getAllFormats(cls):
return list(cls.registry) | belokop/indico_bare | [
1,
1,
1,
5,
1465204236
] |
def create(cls, dformat, query_params=None, **kwargs):
"""
A serializer factory
"""
query_params = query_params or {}
serializer = cls.registry.get(dformat)
if serializer:
return serializer(query_params, **kwargs)
else:
raise Exception("Serializer for '%s' does not exist!" % dformat) | belokop/indico_bare | [
1,
1,
1,
5,
1465204236
] |
def set_headers(self, response):
response.content_type = self.getMIMEType() | belokop/indico_bare | [
1,
1,
1,
5,
1465204236
] |
def __str__(self):
"""Provides string to display on front end for referral.
For FQHC referrals, returns referral kind and date.
For non-FQHC referrals, returns referral location and date.
"""
formatted_date = self.written_datetime.strftime("%D")
if self.kind.is_fqhc:
return "%s referral on %s" % (self.kind, formatted_date)
else:
location_names = [loc.name for loc in self.location.all()]
locations = " ,".join(location_names)
return "Referral to %s on %s" % (locations, formatted_date) | SaturdayNeighborhoodHealthClinic/osler | [
14,
9,
14,
62,
1433002833
] |
def aggregate_referral_status(referrals):
referral_status_output = ""
if referrals:
all_successful = all(referral.status == Referral.STATUS_SUCCESSFUL
for referral in referrals)
if all_successful:
referral_status_output = (dict(Referral.REFERRAL_STATUSES)
[Referral.STATUS_SUCCESSFUL])
else:
# Determine referral status based on the last FQHC referral
referral_status_output = (dict(Referral.REFERRAL_STATUSES)
[referrals.last().status])
else:
referral_status_output = Referral.NO_REFERRALS_CURRENTLY
return referral_status_output | SaturdayNeighborhoodHealthClinic/osler | [
14,
9,
14,
62,
1433002833
] |
def class_name(self):
return self.__class__.__name__ | SaturdayNeighborhoodHealthClinic/osler | [
14,
9,
14,
62,
1433002833
] |
def summary(self):
return self.contact_instructions | SaturdayNeighborhoodHealthClinic/osler | [
14,
9,
14,
62,
1433002833
] |
def admin_url(self):
return reverse(
'admin:referral_followuprequest_change',
args=(self.id,)
) | SaturdayNeighborhoodHealthClinic/osler | [
14,
9,
14,
62,
1433002833
] |
def upgrade():
# Break
op.create_table(
'breaks',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('title', sa.String(), nullable=False),
sa.Column('description', sa.Text(), nullable=False),
sa.Column('duration', sa.Interval(), nullable=False),
sa.Column('text_color', sa.String(), nullable=False),
sa.Column('background_color', sa.String(), nullable=False),
sa.Column('room_name', sa.String(), nullable=False),
sa.Column('inherit_location', sa.Boolean(), nullable=False),
sa.Column('address', sa.Text(), nullable=False),
sa.Column('venue_id', sa.Integer(), nullable=True, index=True),
sa.Column('venue_name', sa.String(), nullable=False),
sa.Column('room_id', sa.Integer(), nullable=True, index=True),
sa.CheckConstraint("(room_id IS NULL) OR (venue_name = '' AND room_name = '')",
name='no_custom_location_if_room'),
sa.CheckConstraint("(venue_id IS NULL) OR (venue_name = '')", name='no_venue_name_if_venue_id'),
sa.CheckConstraint("(room_id IS NULL) OR (venue_id IS NOT NULL)", name='venue_id_if_room_id'),
sa.CheckConstraint("NOT inherit_location OR (venue_id IS NULL AND room_id IS NULL AND venue_name = '' AND "
"room_name = '' AND address = '')", name='inherited_location'),
sa.CheckConstraint("(text_color = '') = (background_color = '')", name='both_or_no_colors'),
sa.CheckConstraint("text_color != '' AND background_color != ''", name='colors_not_empty'),
sa.ForeignKeyConstraint(['room_id'], ['roombooking.rooms.id']),
sa.ForeignKeyConstraint(['venue_id'], ['roombooking.locations.id']),
sa.ForeignKeyConstraint(['venue_id', 'room_id'], ['roombooking.rooms.location_id', 'roombooking.rooms.id']),
sa.PrimaryKeyConstraint('id'),
schema='events'
)
# TimetableEntry
op.create_table(
'timetable_entries',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('event_id', sa.Integer(), nullable=False, index=True),
sa.Column('parent_id', sa.Integer(), nullable=True, index=True),
sa.Column('session_block_id', sa.Integer(), nullable=True, index=True, unique=True),
sa.Column('contribution_id', sa.Integer(), nullable=True, index=True, unique=True),
sa.Column('break_id', sa.Integer(), nullable=True, index=True, unique=True),
sa.Column('type', PyIntEnum(TimetableEntryType), nullable=False),
sa.Column('start_dt', UTCDateTime, nullable=False),
sa.Index('ix_timetable_entries_start_dt_desc', sa.text('start_dt DESC')),
sa.CheckConstraint('type != 1 OR parent_id IS NULL', name='valid_parent'),
sa.CheckConstraint('type != 1 OR (contribution_id IS NULL AND break_id IS NULL AND '
'session_block_id IS NOT NULL)', name='valid_session_block'),
sa.CheckConstraint('type != 2 OR (session_block_id IS NULL AND break_id IS NULL AND '
'contribution_id IS NOT NULL)', name='valid_contribution'),
sa.CheckConstraint('type != 3 OR (contribution_id IS NULL AND session_block_id IS NULL AND '
'break_id IS NOT NULL)', name='valid_break'),
sa.ForeignKeyConstraint(['break_id'], ['events.breaks.id']),
sa.ForeignKeyConstraint(['contribution_id'], ['events.contributions.id']),
sa.ForeignKeyConstraint(['event_id'], ['events.events.id']),
sa.ForeignKeyConstraint(['parent_id'], ['events.timetable_entries.id']),
sa.ForeignKeyConstraint(['session_block_id'], ['events.session_blocks.id']),
sa.PrimaryKeyConstraint('id'),
schema='events'
) | belokop/indico_bare | [
1,
1,
1,
5,
1465204236
] |
def abstract_serial_server():
return AbstractSerialServer() | AdvancedClimateSystems/python-modbus | [
189,
68,
189,
36,
1444769179
] |
def set_print_granularity(p):
global print_granularity
print_granularity = p
print("%s: print granularity = %s" % (sys.argv[0], print_granularity)) | scalien/keyspace | [
44,
17,
44,
1,
1271265059
] |
def setUp(self):
super().setUp()
self.anonymous_client = APIClient()
self.different_user = UserFactory.create(password=TEST_PASSWORD)
self.different_client = APIClient()
self.staff_user = UserFactory(is_staff=True, password=TEST_PASSWORD)
self.staff_client = APIClient()
self.user = UserFactory.create(password=TEST_PASSWORD) # will be assigned to self.client by default | edx/edx-platform | [
6290,
3437,
6290,
280,
1369945238
] |
def send_post(self, client, json_data, content_type='application/json', expected_status=201):
"""
Helper method for sending a post to the server, defaulting to application/json content_type.
Verifies the expected status and returns the response.
"""
# pylint: disable=no-member
response = client.post(self.url, data=json.dumps(json_data), content_type=content_type)
assert expected_status == response.status_code
return response | edx/edx-platform | [
6290,
3437,
6290,
280,
1369945238
] |
def post_search_api(self, client, json_data, content_type='application/json', expected_status=200):
"""
Helper method for sending a post to the server, defaulting to application/merge-patch+json content_type.
Verifies the expected status and returns the response.
"""
# pylint: disable=no-member
response = client.post(self.search_api_url, data=json.dumps(json_data), content_type=content_type)
assert expected_status == response.status_code
return response | edx/edx-platform | [
6290,
3437,
6290,
280,
1369945238
] |
def send_put(self, client, json_data, content_type="application/json", expected_status=204):
"""
Helper method for sending a PUT to the server. Verifies the expected status and returns the response.
"""
response = client.put(self.url, data=json.dumps(json_data), content_type=content_type)
assert expected_status == response.status_code
return response | edx/edx-platform | [
6290,
3437,
6290,
280,
1369945238
] |
def send_delete(self, client, expected_status=204):
"""
Helper method for sending a DELETE to the server. Verifies the expected status and returns the response.
"""
response = client.delete(self.url)
assert expected_status == response.status_code
return response | edx/edx-platform | [
6290,
3437,
6290,
280,
1369945238
] |
def create_mock_verified_name(self, user):
"""
Helper method to create an approved VerifiedName entry in name affirmation.
"""
legacy_profile = UserProfile.objects.get(id=user.id)
create_verified_name(user, self.VERIFIED_NAME, legacy_profile.name, status=VerifiedNameStatus.APPROVED) | edx/edx-platform | [
6290,
3437,
6290,
280,
1369945238
] |
def _verify_profile_image_data(self, data, has_profile_image):
"""
Verify the profile image data in a GET response for self.user
corresponds to whether the user has or hasn't set a profile
image.
"""
template = '{root}/{filename}_{{size}}.{extension}'
if has_profile_image:
url_root = 'http://example-storage.com/profile-images'
filename = hashlib.md5(('secret' + self.user.username).encode('utf-8')).hexdigest()
file_extension = 'jpg'
template += '?v={}'.format(TEST_PROFILE_IMAGE_UPLOADED_AT.strftime("%s"))
else:
url_root = 'http://testserver/static'
filename = 'default'
file_extension = 'png'
template = template.format(root=url_root, filename=filename, extension=file_extension)
assert data['profile_image'] == {'has_image': has_profile_image,
'image_url_full': template.format(size=50),
'image_url_small': template.format(size=10)} | edx/edx-platform | [
6290,
3437,
6290,
280,
1369945238
] |
def setUp(self):
super().setUp()
self.url = reverse("own_username_api") | edx/edx-platform | [
6290,
3437,
6290,
280,
1369945238
] |
def test_get_username(self):
"""
Test that a client (logged in) can get her own username.
"""
self.client.login(username=self.user.username, password=TEST_PASSWORD)
self._verify_get_own_username(17) | edx/edx-platform | [
6290,
3437,
6290,
280,
1369945238
] |
def test_get_username_not_logged_in(self):
"""
Test that a client (not logged in) gets a 401
when trying to retrieve their username.
"""
# verify that the endpoint is inaccessible when not logged in
self._verify_get_own_username(13, expected_status=401) | edx/edx-platform | [
6290,
3437,
6290,
280,
1369945238
] |