function
stringlengths 11
56k
| repo_name
stringlengths 5
60
| features
sequence |
---|---|---|
def get_account_detail():
cc = check_parameters(request.json)
message = cc.get_account_detail()
logtext = log_handler(message, '获取账户信息')
no_db_logger.info(logtext)
return json.dumps(message, ensure_ascii=False) | zznn/futu-openAPI | [
38,
18,
38,
1,
1462535088
] |
def get_account_cash():
cc = check_parameters(request.json)
message = cc.get_account_cash()
logtext = log_handler(message, '获取账户现金')
no_db_logger.info(logtext)
return json.dumps(message, ensure_ascii=False) | zznn/futu-openAPI | [
38,
18,
38,
1,
1462535088
] |
def get_account_portfolio():
cc = check_parameters(request.json)
message = cc.get_account_portfolio()
logtext = log_handler(message, '获取账户持仓')
no_db_logger.info(logtext)
return json.dumps(message, ensure_ascii=False) | zznn/futu-openAPI | [
38,
18,
38,
1,
1462535088
] |
def get_list_orders():
date_begin = request.json['date_begin']
date_end = request.json['date_end']
cc = check_parameters(request.json)
message = cc.get_list_orders()
logtext = log_handler(message, '获取订单列表')
no_db_logger.info(logtext)
return json.dumps(message, ensure_ascii=False) | zznn/futu-openAPI | [
38,
18,
38,
1,
1462535088
] |
def get_list_trades():
cc = check_parameters(request.json)
message = cc.get_list_trades()
logtext = log_handler(message, '获取交易列表')
no_db_logger.info(logtext)
return json.dumps(message, ensure_ascii=False) | zznn/futu-openAPI | [
38,
18,
38,
1,
1462535088
] |
def place_order():
code = request.json['code']
quantity = request.json['quantity']
price = request.json['price']
side = request.json['side']
ltype = request.json['type']
cc = check_parameters(request.json)
message = cc.place_order(code, quantity, price, side, ltype)
logtext = log_handler(message, '下单')
no_db_logger.info(logtext)
return json.dumps(message, ensure_ascii=False) | zznn/futu-openAPI | [
38,
18,
38,
1,
1462535088
] |
def change_order():
order_id = request.json['order_id']
quantity = request.json['quantity']
price = request.json['price']
cc = check_parameters(request.json)
message = cc.change_order(order_id, quantity, price)
logtext = log_handler(message, '改单')
no_db_logger.info(logtext)
return json.dumps(message, ensure_ascii=False) | zznn/futu-openAPI | [
38,
18,
38,
1,
1462535088
] |
def cancle_order():
order_id = request.json['order_id']
cc = check_parameters(request.json)
message = cc.cancel_order(order_id)
logtext = log_handler(message, '撤单')
no_db_logger.info(logtext)
return json.dumps(message, ensure_ascii=False) | zznn/futu-openAPI | [
38,
18,
38,
1,
1462535088
] |
def save_token():
account = request.json['app_account']
appid = request.json['appid']
market = request.json['market']
token = request.json['token']
card = request.json['card']
card_desc = request.json['text']
DB_result = save_update_token(account, appid, market, token, card, False, card_desc)
if DB_result == 'success':
no_db_logger.info('token save success')
return json.dumps({'result_code':0,'error_msg':''}, ensure_ascii=False)
else:
no_db_logger.info('token save fail')
return json.dumps({'result_code':1,'error_msg':'token保存失败'}, ensure_ascii=False) | zznn/futu-openAPI | [
38,
18,
38,
1,
1462535088
] |
def delete_token():
appid = request.json['appid']
account = request.json['app_account']
DB_result = delete_tokens(account, appid)
if DB_result == 'success':
no_db_logger.info('token delete success')
return json.dumps({'result_code':0,'error_msg':''}, ensure_ascii=False)
else:
no_db_logger.info('token delete fail')
return json.dumps({'result_code':1,'error_msg':'token删除失败'}, ensure_ascii=False) | zznn/futu-openAPI | [
38,
18,
38,
1,
1462535088
] |
def list_card():
appid = request.json['appid']
account = request.json['app_account']
cards = list_cards(account, appid)
message = dict(cards=cards)
if isinstance(cards, list):
no_db_logger.info('list cards success')
return json.dumps({'result_code':0,'error_msg':'','data':message}, ensure_ascii=False)
else:
no_db_logger.info('list cards fail')
return json.dumps({'result_code':1,'error_msg':'查询账户卡号失败'}, ensure_ascii=False) | zznn/futu-openAPI | [
38,
18,
38,
1,
1462535088
] |
def test_list_extra_index(self):
"""Tries to index a non-indexable list element."""
test_config = mock_config.get_config()
with self.assertRaises(IndexError):
config_path.get_value('dict.list[0][0]', test_config) | google/ml_collections | [
676,
26,
676,
14,
1597899148
] |
def test_list_out_of_range_set(self):
"""Tries to override out-of-range value in list."""
test_config = mock_config.get_config()
with self.assertRaises(IndexError):
config_path.set_value('dict.list[2][1]', test_config, -1) | google/ml_collections | [
676,
26,
676,
14,
1597899148
] |
def test_reading_setting_existing_key_in_dict(self):
"""Tests setting non existing key from dict inside config."""
test_config = mock_config.get_config()
with self.assertRaises(KeyError):
config_path.set_value('dict.not_existing_key.key', test_config, 1) | google/ml_collections | [
676,
26,
676,
14,
1597899148
] |
def test_field_reference_types(self):
"""Tests whether types of FieldReference fields are valid."""
test_config = fieldreference_config.get_config()
paths = ['ref_nodefault', 'ref']
paths_types = [int, int]
config_types = [config_path.get_type(path, test_config) for path in paths]
self.assertEqual(paths_types, config_types) | google/ml_collections | [
676,
26,
676,
14,
1597899148
] |
def test_types(self, path, path_type):
"""Tests whether various types of objects are valid."""
test_config = mock_config.get_config()
self.assertEqual(path_type, config_path.get_type(path, test_config)) | google/ml_collections | [
676,
26,
676,
14,
1597899148
] |
def __init__(self, discovery_doc, language=None):
super(Api, self).__init__(discovery_doc, self,
wire_name=discovery_doc['name'])
name = self.values['name']
self._validator.ValidateApiName(name)
if name != 'freebase':
self._validator.ValidateApiVersion(self.values['version'])
canonical_name = self.values.get('canonicalName') or name
if not self.values.get('canonicalName'):
self.values['canonicalName'] = canonical_name
self._class_name = self.ToClassName(canonical_name, self)
# Guard against language implementor not taking care of spaces
self._class_name = self._class_name.replace(' ', '')
self._NormalizeOwnerInformation()
self._language = language
self._template_dir = None
self._surface_features = {}
self._schemas = {}
self._methods_by_name = {}
self._all_methods = []
self.SetTemplateValue('className', self._class_name)
self.SetTemplateValue('versionNoDots',
self.values['version'].replace('.', '_'))
self.SetTemplateValue('versionNoDash',
self.values['version'].replace('-', '_'))
self.SetTemplateValue('dataWrapper',
'dataWrapper' in discovery_doc.get('features', []))
self.values.setdefault('title', name)
self.values.setdefault('exponentialBackoffDefault', False)
if not self.values.get('revision'):
self.values['revision'] = 'snapshot'
self._NormalizeUrlComponents()
# Information for variant subtypes, a dictionary of the format:
#
# { 'wireName': {'discriminant': discriminant, 'value': value,
# 'schema': schema},
# ... }
#
# ... where wireName is the name of variant subtypes, discriminant
# the field name of the discriminant, value the discriminant value
# for this variant, and schema the base schema.
#
# This information cannot be stored in the referred schema at
# reading time because at the time we read it from the base
# schema, the referenced variant schemas may not yet be loaded. So
# we first store it here, and after all schemas have been loaded,
# update the schema template properties.
self._variant_info = {}
# Build data types and methods
self._SetupModules()
self.void_type = data_types.Void(self)
self._BuildSchemaDefinitions()
self._BuildResourceDefinitions()
self.SetTemplateValue('resources', self._resources)
# Make data models part of the api dictionary
self.SetTemplateValue('models', self.ModelClasses())
# Replace methods dict with Methods
self._top_level_methods = []
method_dict = self.values.get('methods') or {}
for name in sorted(method_dict):
self._top_level_methods.append(Method(self, name, method_dict[name]))
self.SetTemplateValue('methods', self._top_level_methods)
# Global parameters
self._parameters = []
param_dict = self.values.get('parameters') or {}
for name in sorted(param_dict):
parameter = Parameter(self, name, param_dict[name], self)
self._parameters.append(parameter)
if name == 'alt':
self.SetTemplateValue('alt', parameter)
self.SetTemplateValue('parameters', self._parameters)
# Auth scopes
self._authscopes = []
if (self.values.get('auth') and
self.values['auth'].get('oauth2') and
self.values['auth']['oauth2'].get('scopes')):
for value, auth_dict in sorted(
self.values['auth']['oauth2']['scopes'].iteritems()):
self._authscopes.append(AuthScope(self, value, auth_dict))
self.SetTemplateValue('authscopes', self._authscopes) | bshaffer/google-api-php-client-services | [
1,
3,
1,
1,
1456355604
] |
def all_schemas(self):
"""The dictionary of all the schema objects found in the API."""
return self._schemas | bshaffer/google-api-php-client-services | [
1,
3,
1,
1,
1456355604
] |
def _BuildResourceDefinitions(self):
"""Loop over the resources in the discovery doc and build definitions."""
self._resources = []
def_dict = self.values.get('resources') or {}
for name in sorted(def_dict):
resource = Resource(self, name, def_dict[name], parent=self)
self._resources.append(resource) | bshaffer/google-api-php-client-services | [
1,
3,
1,
1,
1456355604
] |
def _NormalizeOwnerInformation(self):
"""Ensure that owner and ownerDomain are set to sane values."""
owner_domain = self.get('ownerDomain', '')
if not owner_domain:
root_url = self.get('rootUrl')
if root_url:
owner_domain = urlparse.urlparse(root_url).hostname
# Normalize google domains.
if any(owner_domain.endswith(d) for d in _RECOGNIZED_GOOGLE_DOMAINS):
owner_domain = 'google.com'
if owner_domain:
owner_domain = utilities.SanitizeDomain(owner_domain)
else:
owner_domain = _DEFAULT_OWNER_DOMAIN
self.SetTemplateValue('ownerDomain', owner_domain)
if not self.get('ownerName'):
if owner_domain == _DEFAULT_OWNER_DOMAIN:
owner_name = _DEFAULT_OWNER_NAME
else:
owner_name = owner_domain.replace('.', '_')
self.SetTemplateValue('ownerName', owner_name)
if not self.get('owner'):
self.SetTemplateValue('owner', self['ownerName'].lower()) | bshaffer/google-api-php-client-services | [
1,
3,
1,
1,
1456355604
] |
def ModelClasses(self):
"""Return all the model classes."""
ret = set(
s for s in self._schemas.itervalues()
if isinstance(s, Schema) or isinstance(s, data_types.MapDataType))
return sorted(ret, key=operator.attrgetter('class_name')) | bshaffer/google-api-php-client-services | [
1,
3,
1,
1,
1456355604
] |
def DataTypeFromJson(self, type_dict, default_name, parent=None,
wire_name=None):
"""Returns a schema object represented by a JSON Schema dictionary.
Evaluate a JSON schema dictionary and return an appropriate schema object.
If a data type is defined in-line, then create the schema dynamically. If
the schema is a $ref to another, return the previously created schema or
a lazy reference.
If the type_dict is None, a blank schema will be created.
Args:
type_dict: A dict of the form expected of a request or response member
of a method description. See the Discovery specification for more.
default_name: The unique name to give the schema if we have to create it.
parent: The schema where I was referenced. If we cannot determine that
this is a top level schema, set the parent to this.
wire_name: The name which will identify objects of this type in data on
the wire.
Returns:
A Schema object.
"""
# new or not initialized, create a fresh one
schema = Schema.Create(self, default_name, type_dict or {}, wire_name,
parent)
# Only put it in our by-name list if it is a real object
if isinstance(schema, Schema) or isinstance(schema, data_types.MapDataType):
# Use the path to the schema as a key. This means that an anonymous class
# for the 'person' property under the schema 'Activity' will have the
# unique name 'Activity.person', rather than 'ActivityPerson'.
path = '.'.join(
[a.values.get('wireName', '<anon>') for a in schema.full_path])
_LOGGER.debug('DataTypeFromJson: add %s to cache', path)
self._schemas[path] = schema
return schema | bshaffer/google-api-php-client-services | [
1,
3,
1,
1,
1456355604
] |
def MethodByName(self, method_name):
"""Find a method by name.
Args:
method_name: (str) the full RPC name of a method defined by this API.
Returns:
Method object or None if not found.
"""
return self._methods_by_name.get(method_name) | bshaffer/google-api-php-client-services | [
1,
3,
1,
1,
1456355604
] |
def SetVariantInfo(self, ref, discriminant, value, schema):
"""Sets variant info for the given reference."""
if ref in self._variant_info:
logging.warning("Base type of '%s' changed from '%s' to '%s'. "
"This is an indication that a variant schema is used "
"from multiple base schemas and may result in an "
"inconsistent model.",
ref, self._base_type[ref].wireName, schema.wireName)
self._variant_info[ref] = {'discriminant': discriminant, 'value': value,
'schema': schema} | bshaffer/google-api-php-client-services | [
1,
3,
1,
1,
1456355604
] |
def _VisitMethod(self, method, func):
"""Visit a method, calling a function on every child.
Args:
method: (Method) The Method to visit.
func: (function) Method to call on each object.
"""
func(method)
for parameter in method.parameters:
func(parameter) | bshaffer/google-api-php-client-services | [
1,
3,
1,
1,
1456355604
] |
def _VisitSchema(self, schema, func):
"""Visit a schema tree, calling a function on every child.
Args:
schema: (Schema) The Schema to visit.
func: (function) Method to call on each object.
"""
func(schema)
func(schema.module)
for prop in schema.values.get('properties', []):
func(prop)
for child in self.children:
func(child) | bshaffer/google-api-php-client-services | [
1,
3,
1,
1,
1456355604
] |
def ToClassName(self, s, element, element_type=None):
"""Convert a name to a suitable class name in the target language.
This default implementation camel cases the string, which is appropriate
for some languages. Subclasses are encouraged to override this.
Args:
s: (str) A rosy name of data element.
element: (object) The object we are making a class name for.
element_type: (str) Deprecated. The kind of object we are making a class
name for. E.g. resource, method, schema.
TODO(user): replace type in favor of class of element, but that will
require changing the place where we call ToClassName with no element.
Returns:
A name suitable for use as a class in the generator's target language.
"""
return utilities.CamelCase(s).replace(' ', '') | bshaffer/google-api-php-client-services | [
1,
3,
1,
1,
1456355604
] |
def class_name(self):
return self.values['className'] | bshaffer/google-api-php-client-services | [
1,
3,
1,
1,
1456355604
] |
def model_module(self):
return self._model_module | bshaffer/google-api-php-client-services | [
1,
3,
1,
1,
1456355604
] |
def containing_module(self):
return self._containing_module | bshaffer/google-api-php-client-services | [
1,
3,
1,
1,
1456355604
] |
def all_methods(self):
"""All the methods in the entire API."""
return self._all_methods | bshaffer/google-api-php-client-services | [
1,
3,
1,
1,
1456355604
] |
def top_level_methods(self):
"""All the methods at the API top level (not in a resource)."""
return self._top_level_methods | bshaffer/google-api-php-client-services | [
1,
3,
1,
1,
1456355604
] |
def __init__(self, api, name, def_dict, parent=None):
"""Creates a Resource.
Args:
api: (Api) The Api which owns this Resource.
name: (string) The discovery name of the Resource.
def_dict: (dict) The discovery dictionary for this Resource.
parent: (CodeObject) The resource containing this method, if any. Top
level resources have the API as a parent.
"""
super(Resource, self).__init__(def_dict, api, parent=parent, wire_name=name)
self.ValidateName(name)
class_name = api.ToClassName(name, self, element_type='resource')
self.SetTemplateValue('className', class_name)
# Replace methods dict with Methods
self._methods = []
method_dict = self.values.get('methods') or {}
for name in sorted(method_dict):
self._methods.append(Method(api, name, method_dict[name], parent=self))
self.SetTemplateValue('methods', self._methods)
# Get sub resources
self._resources = []
r_def_dict = self.values.get('resources') or {}
for name in sorted(r_def_dict):
r = Resource(api, name, r_def_dict[name], parent=self)
self._resources.append(r)
self.SetTemplateValue('resources', self._resources) | bshaffer/google-api-php-client-services | [
1,
3,
1,
1,
1456355604
] |
def methods(self):
return self._methods | bshaffer/google-api-php-client-services | [
1,
3,
1,
1,
1456355604
] |
def methods_dict(self):
return {method['wireName']: method for method in self._methods} | bshaffer/google-api-php-client-services | [
1,
3,
1,
1,
1456355604
] |
def __init__(self, api, value, def_dict):
"""Construct an auth scope.
Args:
api: (Api) The Api which owns this Property
value: (string) The unique identifier of this scope, often a URL
def_dict: (dict) The discovery dictionary for this auth scope.
"""
super(AuthScope, self).__init__(def_dict, api, wire_name=value)
self._module = api.module
self.SetTemplateValue('value', value)
while value.endswith('/'):
value = value[:-1]
if 'description' not in self.values:
self.SetTemplateValue('description', value)
# Strip the common prefix to get a unique identifying name
if value.startswith(AuthScope.GOOGLE_PREFIX):
scope_id = value[len(AuthScope.GOOGLE_PREFIX):]
elif value.startswith(AuthScope.HTTPS_PREFIX):
# some comon scopes are are just a URL
scope_id = value[len(AuthScope.HTTPS_PREFIX):]
else:
scope_id = value
# We preserve the value stripped of the most common prefixes so we can
# use it for building constantName in templates.
self.SetTemplateValue('lastPart', scope_id)
# replace all non alphanumeric with '_' to form 'name'
name = ''.join([(c if c.isalnum() else '_') for c in scope_id.upper()])
self.SetTemplateValue('name', name) | bshaffer/google-api-php-client-services | [
1,
3,
1,
1,
1456355604
] |
def constantName(self): # pylint: disable=g-bad-name
"""Overrides default behavior of constantName."""
return self._language_model.ApplyPolicy('constant', self,
self.values['lastPart']) | bshaffer/google-api-php-client-services | [
1,
3,
1,
1,
1456355604
] |
def __init__(self, api, name, def_dict, parent=None):
"""Construct a method.
Methods in REST discovery are inside of a resource. Note that the method
name and id are calculable from each other. id will always be equal to
api_name.resource_name[.sub_resource...].method_name. At least it should
be, as that is the transformation Discovery makes from the API definition,
which is essentially a flat list of methods, into a hierarchy of resources.
Args:
api: (Api) The Api which owns this Method.
name: (string) The discovery name of the Method.
def_dict: (dict) The discovery dictionary for this Method.
parent: (CodeObject) The resource containing this Method, if any.
Raises:
ApiException: If the httpMethod type is not one we know how to
handle.
"""
super(Method, self).__init__(def_dict, api, parent=(parent or api))
# TODO(user): Fix java templates to name vs. wireName correctly. Then
# change the __init__ to have wire_name=def_dict.get('id') or name
# then eliminate this line.
self.SetTemplateValue('wireName', name)
self.ValidateName(name)
class_name = api.ToClassName(name, self, element_type='method')
if parent and class_name == parent.values['className']:
# Some languages complain when the collection name is the same as the
# method name.
class_name = '%sRequest' % class_name
# The name is the key of the dict defining use. The id field is what you
# have to use to call the method via RPC. That is unique, name might not be.
self.SetTemplateValue('name', name)
# Fix up very old discovery, which does not have an id.
if 'id' not in self.values:
self.values['id'] = name
self.SetTemplateValue('className', class_name)
http_method = def_dict.get('httpMethod', 'POST').upper()
self.SetTemplateValue('httpMethod', http_method)
self.SetTemplateValue('rpcMethod',
def_dict.get('rpcMethod') or def_dict['id'])
rest_path = def_dict.get('path') or def_dict.get('restPath')
# TODO(user): if rest_path is not set, raise a good error and fail fast.
self.SetTemplateValue('restPath', rest_path)
# Figure out the input and output types and schemas for this method.
expected_request = self.values.get('request')
if expected_request:
# TODO(user): RequestBody is only used if the schema is anonymous.
# When we go to nested models, this could be a nested class off the
# Method, making it unique without the silly name. Same for ResponseBody.
request_schema = api.DataTypeFromJson(expected_request,
'%sRequestContent' % name,
parent=self)
self.SetTemplateValue('requestType', request_schema)
expected_response = def_dict.get('response') or def_dict.get('returns')
if expected_response:
response_schema = api.DataTypeFromJson(expected_response,
'%sResponse' % name,
parent=self)
if self.values['wireName'] == 'get':
response_schema.values['associatedResource'] = parent
self.SetTemplateValue('responseType', response_schema)
else:
self.SetTemplateValue('responseType', api.void_type)
# Make sure we can handle this method type and do any fixups.
if http_method not in ['DELETE', 'GET', 'OPTIONS', 'PATCH', 'POST', 'PUT',
'PROPFIND', 'PROPPATCH', 'REPORT']:
raise ApiException('Unknown HTTP method: %s' % http_method, def_dict)
if http_method == 'GET':
self.SetTemplateValue('requestType', None)
# Replace parameters dict with Parameters. We try to order them by their
# position in the request path so that the generated code can track the
# more human readable definition, rather than the order of the parameters
# in the discovery doc.
order = self.values.get('parameterOrder', [])
req_parameters = []
opt_parameters = []
for name, def_dict in self.values.get('parameters', {}).iteritems():
param = Parameter(api, name, def_dict, self)
if name == 'alt':
# Treat the alt parameter differently
self.SetTemplateValue('alt', param)
continue
# Standard params are part of the generic request class
# We want to push all parameters that aren't declared inside
# parameterOrder after those that are.
if param.values['wireName'] in order:
req_parameters.append(param)
else:
# optional parameters are appended in the order they're declared.
opt_parameters.append(param)
# pylint: disable=g-long-lambda
req_parameters.sort(lambda x, y: cmp(order.index(x.values['wireName']),
order.index(y.values['wireName'])))
# sort optional parameters by name to avoid code churn
opt_parameters.sort(lambda x, y: cmp(x.values['wireName'], y.values['wireName']))
req_parameters.extend(opt_parameters)
self.SetTemplateValue('parameters', req_parameters)
self._InitMediaUpload(parent)
self._InitPageable(api)
api.AddMethod(self) | bshaffer/google-api-php-client-services | [
1,
3,
1,
1,
1456355604
] |
def _InitPageable(self, api):
response_type = self.values.get('responseType')
if response_type == api.void_type:
return
next_page_token_name = self.FindPageToken(
response_type.values.get('properties'))
if not next_page_token_name:
return
is_page_token_parameter = True
page_token_name = self.FindPageToken(self.optional_parameters)
if not page_token_name:
# page token may be field of request body instead of query parameter
is_page_token_parameter = False
request_type = self.values.get('requestType')
if request_type:
page_token_name = self.FindPageToken(
request_type.values.get('properties'))
if not page_token_name:
return
self.SetTemplateValue('isPageable', True)
self.SetTemplateValue('isPagingStyleStandard',
(is_page_token_parameter and
page_token_name == 'pageToken' and
next_page_token_name == 'nextPageToken')) | bshaffer/google-api-php-client-services | [
1,
3,
1,
1,
1456355604
] |
def media_upload_parameters(self):
return self.values.get('mediaUpload') | bshaffer/google-api-php-client-services | [
1,
3,
1,
1,
1456355604
] |
def parameters(self):
return self.values['parameters'] | bshaffer/google-api-php-client-services | [
1,
3,
1,
1,
1456355604
] |
def optional_parameters(self):
return [p for p in self.values['parameters'] if not p.required] | bshaffer/google-api-php-client-services | [
1,
3,
1,
1,
1456355604
] |
def required_parameters(self):
return [p for p in self.values['parameters'] if p.required] | bshaffer/google-api-php-client-services | [
1,
3,
1,
1,
1456355604
] |
def path_parameters(self):
return [p for p in self.values['parameters'] if p.location == 'path'] | bshaffer/google-api-php-client-services | [
1,
3,
1,
1,
1456355604
] |
def query_parameters(self):
return [p for p in self.values['parameters'] if p.location == 'query'] | bshaffer/google-api-php-client-services | [
1,
3,
1,
1,
1456355604
] |
def FindCodeObjectWithWireName(things, wire_name):
"""Looks for an element having the given wire_name.
Args:
things: (array of DataType) List of parameters or properties to search.
wire_name: (str) The wireName we are looking to find.
Returns:
None or element with the given wire_name.
"""
if not things: return None
for e in things:
if e.values['wireName'] == wire_name: return e
return None | bshaffer/google-api-php-client-services | [
1,
3,
1,
1,
1456355604
] |
def FindPageToken(things):
"""Looks for an element with a wireName like a page token.
Args:
things: (array of DataType) List of parameters or properties to search.
Returns:
None or page token name found.
"""
for token_name in _PAGE_TOKEN_NAMES:
if Method.FindCodeObjectWithWireName(things, token_name):
return token_name
return None | bshaffer/google-api-php-client-services | [
1,
3,
1,
1,
1456355604
] |
def optionalParameters(self): # pylint: disable=g-bad-name
return self.optional_parameters | bshaffer/google-api-php-client-services | [
1,
3,
1,
1,
1456355604
] |
def pathParameters(self): # pylint: disable=g-bad-name
return self.path_parameters | bshaffer/google-api-php-client-services | [
1,
3,
1,
1,
1456355604
] |
def __init__(self, api, name, def_dict, method):
super(Parameter, self).__init__(def_dict, api, parent=method,
wire_name=name)
self.ValidateName(name)
self.schema = api
# TODO(user): Deal with dots in names better. What we should do is:
# For x.y, x.z create a little class X, with members y and z. Then
# have the constructor method take an X.
self._repeated = self.values.get('repeated', False)
self._required = self.values.get('required', False)
self._location = (self.values.get('location')
or self.values.get('restParameterType')
or 'query')
# TODO(user): Why not just use Schema.Create here?
referenced_schema = self.values.get('$ref')
if referenced_schema:
self._data_type = (api.SchemaByName(referenced_schema) or
data_types.SchemaReference(referenced_schema, api))
elif def_dict.get('type') == 'array':
self._data_type = Schema.Create(api, name, def_dict, name, method)
elif self.values.get('enum'):
self._data_type = data_types.Enum(def_dict,
api,
name,
self.values.get('enum'),
self.values.get('enumDescriptions'),
parent=method)
self.SetTemplateValue('enumType', self._data_type)
else:
self._data_type = data_types.PrimitiveDataType(def_dict, api, parent=self)
if self._repeated:
self._data_type = data_types.ArrayDataType(name, self._data_type,
parent=self) | bshaffer/google-api-php-client-services | [
1,
3,
1,
1,
1456355604
] |
def repeated(self):
return self._repeated | bshaffer/google-api-php-client-services | [
1,
3,
1,
1,
1456355604
] |
def required(self):
return self._required | bshaffer/google-api-php-client-services | [
1,
3,
1,
1,
1456355604
] |
def location(self):
return self._location | bshaffer/google-api-php-client-services | [
1,
3,
1,
1,
1456355604
] |
def code_type(self):
return self._data_type.code_type | bshaffer/google-api-php-client-services | [
1,
3,
1,
1,
1456355604
] |
def get_list_definition(self, account_config: AccountConfig, destination_metadata: List[str]) -> Dict[str, Any]:
list_name = destination_metadata[0]
app_id = account_config.app_id | google/megalista | [
76,
48,
76,
10,
1615325858
] |
def get_row_keys(self) -> List[str]:
return ['mobile_id'] | google/megalista | [
76,
48,
76,
10,
1615325858
] |
def test_dummy_request():
from rasa.nlu.emulators.no_emulator import NoEmulator
em = NoEmulator()
norm = em.normalise_request_json({"text": ["arb text"]})
assert norm == {"text": "arb text", "time": None}
norm = em.normalise_request_json({"text": ["arb text"], "time": "1499279161658"})
assert norm == {"text": "arb text", "time": "1499279161658"} | RasaHQ/rasa_nlu | [
15758,
4259,
15758,
111,
1476448069
] |
def _map(f, *xs):
return tuple(map(f, *xs)) | google/jax | [
22193,
2080,
22193,
1296,
1540502702
] |
def _avals_to_results_handler(nrep, npart, partitions, out_avals):
handlers = [_aval_to_result_handler(npart, parts, out_aval)
for parts, out_aval in safe_zip(partitions, out_avals)]
def handler(out_bufs):
return [h(bufs) for h, bufs in zip(handlers, out_bufs)]
return handler | google/jax | [
22193,
2080,
22193,
1296,
1540502702
] |
def _sharded_callable(
fun: lu.WrappedFun, nparts: Optional[int],
in_parts: Tuple[pxla.PartitionsOrReplicated, ...],
out_parts_thunk: Callable[[], Tuple[pxla.PartitionsOrReplicated, ...]],
local_in_parts: Optional[Tuple[pxla.PartitionsOrReplicated, ...]],
local_out_parts_thunk: Callable[[], Optional[Tuple[pxla.PartitionsOrReplicated, ...]]],
local_nparts: Optional[int], name: str, *abstract_args):
nrep = 1
if local_in_parts is None:
local_in_parts = in_parts
global_abstract_args = [pxla.get_global_aval(arg, parts, lparts)
for arg, parts, lparts
in safe_zip(abstract_args, in_parts, local_in_parts)]
if logging.vlog_is_on(2):
logging.vlog(2, "abstract_args: %s", abstract_args)
logging.vlog(2, "global_abstract_args: %s", global_abstract_args)
logging.vlog(2, "in_parts: %s", in_parts)
logging.vlog(2, "local_in_parts: %s", local_in_parts)
jaxpr, global_out_avals, consts = pe.trace_to_jaxpr_final(fun, global_abstract_args)
platform = xb.get_backend().platform
if platform not in ["tpu", "gpu"]:
# TODO(skye): fall back to regular jit?
raise ValueError(f"sharded_jit not supported for {platform}")
nparts = pxla.reconcile_num_partitions(jaxpr, nparts)
assert nparts is not None
if nparts > xb.device_count():
raise ValueError(
f"sharded_jit computation requires {nparts} devices, "
f"but only {xb.device_count()} devices are available.")
if xb.local_device_count() < nparts < xb.device_count():
raise NotImplementedError(
f"sharded_jit across multiple hosts must use all available devices. "
f"Got {nparts} out of {xb.device_count()} requested devices "
f"(local device count: {xb.local_device_count()})")
if local_nparts is None:
if nparts > xb.local_device_count():
raise ValueError(
"Specify 'local_nparts' when using cross-process sharded_jit "
"and all inputs and outputs are replicated.")
else:
local_nparts = nparts
if local_nparts > xb.local_device_count():
raise ValueError(
f"sharded_jit computation requires {local_nparts} local devices, "
f"but only {xb.local_device_count()} local devices are available.")
if logging.vlog_is_on(2):
logging.vlog(2, "nparts: %d local_nparts: %d", nparts, local_nparts)
out_parts = out_parts_thunk()
local_out_parts = local_out_parts_thunk()
if local_out_parts is None:
local_out_parts = out_parts
if logging.vlog_is_on(2):
logging.vlog(2, "out_parts: %s", out_parts)
logging.vlog(2, "local_out_parts: %s", local_out_parts)
local_out_avals = [pxla.get_local_aval(out, parts, lparts)
for out, parts, lparts
in safe_zip(global_out_avals, out_parts, local_out_parts)]
log_priority = logging.WARNING if config.jax_log_compiles else logging.DEBUG
logging.log(log_priority,
"Compiling %s for %d devices with args %s.",
fun.__name__, nparts, global_abstract_args)
c = xc.XlaBuilder("spjit_{}".format(fun.__name__))
xla_consts = _map(partial(xla.pyval_to_ir_constant, c), consts)
xla_args = _xla_sharded_args(c, global_abstract_args, in_parts)
axis_env = xla.AxisEnv(nrep, (), ())
ctx = xla.TranslationContext(
c, platform, axis_env, new_name_stack(wrap_name(name, "sharded_jit")))
out_nodes = xla.jaxpr_subcomp(ctx, jaxpr, xla_consts, *xla_args)
out_tuple = xla.with_sharding(c, out_parts, xops.Tuple, c, out_nodes)
built = c.Build(out_tuple)
if nparts <= xb.local_device_count():
devices = xb.local_devices()[:nparts]
else:
assert nparts == xb.device_count()
devices = xb.devices()
device_assignment = np.array([[d for d in devices]])
device_assignment = np.reshape(device_assignment, (-1, nparts))
# device_assignment = None # TODO(skye): replace with default device assignment?
compiled = dispatch.backend_compile(
xb.get_backend(), built,
xb.get_compile_options(nrep, nparts, device_assignment))
input_specs = [
pxla.partitioned_sharding_spec(local_nparts, parts, aval)
for parts, aval in zip(local_in_parts, abstract_args)]
input_indices = [pxla.spec_to_indices(aval.shape, spec)
if spec is not None else None
for aval, spec in zip(abstract_args, input_specs)]
handle_args = partial(pxla.shard_args, compiled.local_devices(),
input_indices)
handle_outs = _avals_to_results_handler(nrep, local_nparts, # type: ignore
local_out_parts, local_out_avals)
return partial(_execute_spatially_partitioned, compiled, handle_args,
handle_outs) | google/jax | [
22193,
2080,
22193,
1296,
1540502702
] |
def _sharded_jit_lowering(ctx, *in_nodes,
in_parts, out_parts_thunk, nparts,
name, call_jaxpr, local_in_parts,
local_out_parts_thunk, local_nparts):
# We assume any extra leading in_nodes are constants and replicate them.
num_extra_nodes = len(in_nodes) - len(in_parts)
assert num_extra_nodes >= 0
in_parts = (None,) * num_extra_nodes + in_parts
args = []
for ns, sharding in safe_zip(
safe_map(mlir.wrap_singleton_ir_values, in_nodes), in_parts):
if sharding is not None:
args.append(
[mlir.wrap_with_sharding_op(n, xla.sharding_to_proto(sharding))
for n in ns])
else:
args.append(ns)
sub_ctx = ctx.module_context.replace(
name_stack=new_name_stack(wrap_name(name, "sharded_jit")))
fn = mlir.lower_jaxpr_to_fun(sub_ctx, f"sharded_jit_{name}",
core.ClosedJaxpr(call_jaxpr, ()))
output_types = safe_map(mlir.aval_to_ir_types, ctx.avals_out)
flat_output_types = util.flatten(output_types)
call = func_dialect.CallOp(flat_output_types,
ir.FlatSymbolRefAttr.get(fn.name.value),
mlir.flatten_lowering_ir_args(args))
out_nodes = util.unflatten(call.results, safe_map(len, output_types))
out_parts = out_parts_thunk()
outputs = []
for ns, sharding in safe_zip(out_nodes, out_parts):
if sharding is not None:
outputs.append(
[mlir.wrap_with_sharding_op(n, xla.sharding_to_proto(sharding))
for n in ns])
else:
outputs.append(ns)
return outputs | google/jax | [
22193,
2080,
22193,
1296,
1540502702
] |
def _xla_sharded_args(c, avals, in_parts):
xla_args = []
for i, (sharding, aval) in enumerate(safe_zip(in_parts, avals)):
param = xla.with_sharding(c, sharding, xla.parameter, c, i,
*xla.aval_to_xla_shapes(aval))
xla_args.append(param)
return xla_args | google/jax | [
22193,
2080,
22193,
1296,
1540502702
] |
def __str__(self):
return "UNCONSTRAINED" | google/jax | [
22193,
2080,
22193,
1296,
1540502702
] |
def __new__(cls, *partitions):
return tuple.__new__(PartitionSpec, partitions) | google/jax | [
22193,
2080,
22193,
1296,
1540502702
] |
def sharded_jit(
fun: Callable,
in_parts,
out_parts,
num_partitions: Optional[int] = None,
local_in_parts=None,
local_out_parts=None,
local_num_partitions=None,
static_argnums: Union[int, Iterable[int]] = (), | google/jax | [
22193,
2080,
22193,
1296,
1540502702
] |
def wrapped(*args, **kwargs):
if kwargs:
raise NotImplementedError("sharded_jit over kwargs not yet supported")
f = lu.wrap_init(fun)
if static_argnums:
if max(static_argnums) >= len(args):
raise ValueError(
f"jitted function has static_argnums={static_argnums}"
f" but was called with only {len(args)} positional "
f"argument{'s' if len(args) > 1 else ''}. "
"All static broadcasted arguments must be passed positionally.")
dyn_argnums = [i for i in range(len(args)) if i not in static_argnums]
f, args = argnums_partial(f, dyn_argnums, args)
args_flat, in_tree = tree_flatten((args, kwargs))
in_parts_flat = tuple(flatten_axes("sharded_jit in_parts",
in_tree.children()[0], in_parts))
if local_in_parts is not None:
local_in_parts_flat = tuple(flatten_axes("sharded_jit local_in_parts",
in_tree.children()[0], local_in_parts))
else:
local_in_parts_flat = None
flat_fun, out_tree = flatten_fun(f, in_tree)
# TODO(skye): having a function-typed param in a primitive seems dicey, is
# there a better way?
out_parts_thunk = HashableFunction(
lambda: tuple(flatten_axes("sharded_jit out_parts", out_tree(), out_parts)),
closure=out_parts)
if local_out_parts:
local_out_parts_thunk = HashableFunction(
lambda: tuple(flatten_axes("sharded_jit local_out_parts",
out_tree(), local_out_parts)),
closure=local_out_parts)
else:
local_out_parts_thunk = HashableFunction(lambda: None, closure=None)
out = sharded_call(
flat_fun,
*args_flat,
nparts=nparts,
in_parts=in_parts_flat,
out_parts_thunk=out_parts_thunk,
local_in_parts=local_in_parts_flat,
local_out_parts_thunk=local_out_parts_thunk,
local_nparts=local_nparts,
name=flat_fun.__name__)
return tree_unflatten(out_tree(), out) | google/jax | [
22193,
2080,
22193,
1296,
1540502702
] |
def _sharding_constraint_impl(x, partitions):
# TODO(skye): can we also prevent this from being called in other
# non-sharded_jit contexts? (e.g. pmap, control flow)
raise NotImplementedError(
"with_sharding_constraint() should only be called inside sharded_jit()") | google/jax | [
22193,
2080,
22193,
1296,
1540502702
] |
def _sharding_constraint_lowering(ctx, x_node, partitions):
return [mlir.wrap_with_sharding_op(x_node, xla.sharding_to_proto(partitions))] | google/jax | [
22193,
2080,
22193,
1296,
1540502702
] |
def get_main_page(self):
html = '''
<html>
<head>
<title>OpenFDA app</title>
</head>
<body>
<h1>OpenFDA Client</h1>
<form method='get' action='receivedrug'>
<input type='submit' value='Enviar a OpenFDA'>
</input>
</form>
<form method='get' action='searchmed'>
<input type='text' name='drug'></input>
<input type='submit' value='Buscar Medicamento'></input>
</form>
<form method='get' action='receivecompany'>
<input type='submit' value='Find companies'></input>
</form>
<form method='get' action='searchcom'>
<input type='text' name='drug'></input>
<input type='submit' value='Buscar medicinalproduct'></input>
</form>
</body>
</html>
'''
return html | varoroyo/Alvaro-Roman | [
6,
22,
6,
2,
1489416204
] |
def get_medicinalproduct(self,com_num):
conn = http.client.HTTPSConnection(self.OPENFDA_API_URL)
conn.request("GET", self.OPENFDA_API_EVENT + '?search=companynumb:'+com_num+'&limit=10')
r1 = conn.getresponse()
print(r1.status, r1.reason)
data1 = r1.read()
data = data1.decode('utf8')
events = json.loads(data)
return events | varoroyo/Alvaro-Roman | [
6,
22,
6,
2,
1489416204
] |
def get_drug(self, events):
medicamentos=[]
for event in events['results']:
medicamentos+=[event['patient']['drug'][0]['medicinalproduct']]
return medicamentos | varoroyo/Alvaro-Roman | [
6,
22,
6,
2,
1489416204
] |
def drug_page(self,medicamentos):
s=''
for drug in medicamentos:
s += "<li>"+drug+"</li>"
html='''
<html>
<head></head>
<body>
<ul>
%s
</ul>
</body>
</html>''' %(s)
return html | varoroyo/Alvaro-Roman | [
6,
22,
6,
2,
1489416204
] |
def __init__(self, alarm_mgr, intf_id, port_type_name):
super(OltLosAlarm, self).__init__(alarm_mgr, object_type='olt LOS',
alarm='OLT_LOS',
alarm_category=AlarmEventCategory.OLT,
alarm_type=AlarmEventType.COMMUNICATION,
alarm_severity=AlarmEventSeverity.MAJOR)
# Added port type to indicate if alarm was on NNI or PON
self._intf_id = intf_id
self._port_type_name = port_type_name | opencord/voltha | [
73,
117,
73,
17,
1484694318
] |
def __init__(self,frame, model):
self.frame = frame
self.model = model
self.ui = None | hanya/MRI | [
42,
7,
42,
9,
1327506446
] |
def set_ui(self, ui):
self.ui = ui | hanya/MRI | [
42,
7,
42,
9,
1327506446
] |
def get_imple_name(self):
return self.ui.pages.get_imple_name() | hanya/MRI | [
42,
7,
42,
9,
1327506446
] |
def getTitle(self):
return self.frame.getTitle() | hanya/MRI | [
42,
7,
42,
9,
1327506446
] |
def dispose(self):
self.frame = None
self.model = None | hanya/MRI | [
42,
7,
42,
9,
1327506446
] |
def removeEventListener(self, aListener):
pass | hanya/MRI | [
42,
7,
42,
9,
1327506446
] |
def attachFrame(self, frame):
self.frame = frame | hanya/MRI | [
42,
7,
42,
9,
1327506446
] |
def suspend(self, Suspend):
return True | hanya/MRI | [
42,
7,
42,
9,
1327506446
] |
def getViewData(self):
""" Returns current instance inspected. """
return self.ui.main.current.target | hanya/MRI | [
42,
7,
42,
9,
1327506446
] |
def restoreViewData(self, Data):
pass | hanya/MRI | [
42,
7,
42,
9,
1327506446
] |
def getFrame(self):
return self.frame | hanya/MRI | [
42,
7,
42,
9,
1327506446
] |
def getStatusIndicator(self):
pass | hanya/MRI | [
42,
7,
42,
9,
1327506446
] |
def queryDispatch(self, url, name, flags):
pass | hanya/MRI | [
42,
7,
42,
9,
1327506446
] |
def getImplementationName(self):
return self.IMPLE_NAME | hanya/MRI | [
42,
7,
42,
9,
1327506446
] |
def supportsService(self, name):
return name == self.IMPLE_NAME | hanya/MRI | [
42,
7,
42,
9,
1327506446
] |
def __init__(self,
stage_1_dir,
configuration: Configuration = Configuration(),
transformation_funcs=(),
filter_funcs=(),
user_defined_split_range=(),
num_shards=1000,
seed=0,
dataset_size='small',
deduplicate=False):
# dataset_size can only be 'small', 'med' or 'large'.
valid_dataset_size = {'small', 'med', 'large'}
if dataset_size not in valid_dataset_size:
raise ValueError('{} not in {}'.format(dataset_size,
str(valid_dataset_size)))
if dataset_size == 'small':
urls = self._URLS_SMALL
elif dataset_size == 'med':
urls = self._URLS_MED
else:
urls = self._URLS_LARGE
self.dataset_size = dataset_size
super().__init__(self._DATASET_NAME, urls, self._GIT_URL,
self._DATASET_DESCRIPTION, stage_1_dir,
transformation_funcs=transformation_funcs,
filter_funcs=filter_funcs,
user_defined_split_range=user_defined_split_range,
num_shards=num_shards, seed=seed,
configuration=configuration, deduplicate=deduplicate) | google-research/plur | [
78,
14,
78,
8,
1638474751
] |
def get_all_raw_data_paths(self):
"""Get paths to all raw data."""
# Get the filenames depending on the dataset size.
if self.dataset_size == 'small':
train_file = os.path.join(
self.code2seq_extracted_dir, 'java-small.train.c2s')
validation_file = os.path.join(
self.code2seq_extracted_dir, 'java-small.val.c2s')
test_file = os.path.join(
self.code2seq_extracted_dir, 'java-small.test.c2s')
elif self.dataset_size == 'med':
train_file = os.path.join(
self.code2seq_extracted_dir, 'java-med.train.c2s')
validation_file = os.path.join(
self.code2seq_extracted_dir, 'java-med.val.c2s')
test_file = os.path.join(
self.code2seq_extracted_dir, 'java-med.test.c2s')
else:
train_file = os.path.join(
self.code2seq_extracted_dir, 'java-large.train.c2s')
validation_file = os.path.join(
self.code2seq_extracted_dir, 'java-large.val.c2s')
test_file = os.path.join(
self.code2seq_extracted_dir, 'java-large.test.c2s')
return [train_file, validation_file, test_file] | google-research/plur | [
78,
14,
78,
8,
1638474751
] |
def _construct_token_subtree(self, graph_to_output_example, token,
cur_node_id, token_root_name):
# pylint: disable=line-too-long
"""Construct the token subtree in a AST path.
We create a node for each subtoken in the token, all subtokens are connected
to the next subtoken via the 'NEXT_SUBTOKEN' edge. All subtokens are
connected to the token root node via the 'SUBTOKEN' edge. See the draw.io
figure mentioned in the class doc for the visualization.
Args:
graph_to_output_example: A GraphToOutputExample instance.
token: Starting or ending token in the AST path.
cur_node_id: Next available node id.
token_root_name: Node type and label for the token root node.
Returns:
A tuple of graph_to_output_example, cur_node_id, token_node_id.
graph_to_output_example is updated with the token subtree, cur_node_id is
the next available node id after all the token subtree nodes are added,
and token_node_id is the node id of the root token node.
"""
subtokens = token.split('|')
subtoken_node_ids = []
prev_subtoken_id = -1
# Create a node each subtoken.
for subtoken in subtokens:
graph_to_output_example.add_node(cur_node_id, 'SUBTOKEN', subtoken)
subtoken_node_ids.append(cur_node_id)
# Connects to the previous subtoken node
if prev_subtoken_id != -1:
graph_to_output_example.add_edge(prev_subtoken_id, cur_node_id,
'NEXT_SUBTOKEN')
prev_subtoken_id = cur_node_id
cur_node_id += 1
# Add a root node for the token subtree.
graph_to_output_example.add_node(cur_node_id, token_root_name,
token_root_name)
token_node_id = cur_node_id
cur_node_id += 1
# Connect all subtoken nodes to the token subtree root node.
for node_id in subtoken_node_ids:
graph_to_output_example.add_edge(token_node_id, node_id, 'SUBTOKEN')
return graph_to_output_example, cur_node_id, token_node_id | google-research/plur | [
78,
14,
78,
8,
1638474751
] |
def raw_data_to_graph_to_output_example(self, raw_data):
# pylint: disable=line-too-long
"""Convert raw data to the unified GraphToOutputExample data structure.
The Code2Seq raw data contains the target function name, and the sampled
AST paths. Each AST path starts and ends with a token, and a series of
AST nodes that connects the two tokens. We use _construct_token_subtree
to build the token subtree and _construct_ast_nodes_subtree to build the
AST nodes subtree. Then, all AST paths' nodes are connected to a AST root
node.
All AST root nodes are connected to a single code2seq root node.
https://drive.google.com/file/d/1-cH0FzYIMikgTkUpzVkEZDGjoiqBB9C1/view?usp=sharing
shows an example of such a graph and the original AST path.
Args:
raw_data: A dictionary with 'split', 'target_label' and 'ast_paths' as keys.
The value of the 'split' field is the split (train/valid/test) that the
data belongs to. The value of the 'target_label' field is the function
name. The value of the 'ast_paths' field is a list of AST paths.
Raises:
GraphToOutputExampleNotValidError if the GraphToOutputExample is not
valid.
Returns:
A dictionary with keys 'split' and 'GraphToOutputExample'. Values are the
split(train/validation/test) the data belongs to, and the
GraphToOutputExample instance.
"""
# pylint: enable=line-too-long
split = raw_data['split']
target_label = raw_data['target_label']
ast_paths = raw_data['ast_paths']
graph_to_output_example = GraphToOutputExample()
cur_node_id = 0
ast_path_root_node_ids = []
# This is the root node of all AST path nodes.
graph_to_output_example.add_node(cur_node_id, 'C2C_ROOT', 'C2C_ROOT')
c2c_root_node_id = cur_node_id
cur_node_id += 1
for ast_path in ast_paths:
# The start_token subtree
start_token = ast_path[0]
graph_to_output_example, cur_node_id, start_token_node_id = (
self._construct_token_subtree(
graph_to_output_example, start_token, cur_node_id, 'START_TOKEN'))
# The ast_nodes subtree
ast_nodes = ast_path[1]
graph_to_output_example, cur_node_id, ast_node_ids = (
self._construct_ast_nodes_subtree(
graph_to_output_example, ast_nodes, cur_node_id))
# The end_token subtree
end_token = ast_path[2]
graph_to_output_example, cur_node_id, end_token_node_id = (
self._construct_token_subtree(
graph_to_output_example, end_token, cur_node_id, 'END_TOKEN'))
# Connects the start_token root node with the first node in the
# ast_nodes subtree.
graph_to_output_example.add_edge(
start_token_node_id, ast_node_ids[0], 'START_AST_PATH')
# Connects the end_token root node with the last node in the
# ast_nodes subtree.
graph_to_output_example.add_edge(
end_token_node_id, ast_node_ids[-1], 'END_AST_PATH')
# Add a root AST path node representing the AST path.
graph_to_output_example.add_node(
cur_node_id, 'ROOT_AST_PATH', 'ROOT_AST_PATH')
ast_path_root_node_id = cur_node_id
ast_path_root_node_ids.append(ast_path_root_node_id)
cur_node_id += 1
# Connects the root AST path node with the start_token and end_token
# subtree.
graph_to_output_example.add_edge(
ast_path_root_node_id, start_token_node_id, 'START_TOKEN')
graph_to_output_example.add_edge(
ast_path_root_node_id, end_token_node_id, 'END_TOKEN')
# Connects the root AST path node with all nodes in the ast_nodes subtree.
for node_id in ast_node_ids:
graph_to_output_example.add_edge(ast_path_root_node_id, node_id,
'AST_NODE')
# Connects the code2seq root node with all AST path root node.
for ast_path_root_node_id in ast_path_root_node_ids:
graph_to_output_example.add_edge(c2c_root_node_id, ast_path_root_node_id,
'AST_PATH')
for subtoken in target_label.split('|'):
graph_to_output_example.add_token_output(subtoken)
for transformation_fn in self.transformation_funcs:
graph_to_output_example = transformation_fn(graph_to_output_example)
if not graph_to_output_example.check_if_valid():
raise GraphToOutputExampleNotValidError(
'Invalid GraphToOutputExample found {}'.format(
graph_to_output_example))
for filter_fn in self.filter_funcs:
if not filter_fn(graph_to_output_example):
graph_to_output_example = None
break
return {'split': split, 'GraphToOutputExample': graph_to_output_example} | google-research/plur | [
78,
14,
78,
8,
1638474751
] |
def __init__(self, random_split_fn, use_random_split):
self.random_split_fn = random_split_fn
self.use_random_split = use_random_split | google-research/plur | [
78,
14,
78,
8,
1638474751
] |
def _get_split(self, file_path):
"""Get the data split based on the filename suffix."""
if file_path.endswith('train.c2s'):
return constants.TRAIN_SPLIT_NAME
elif file_path.endswith('val.c2s'):
return constants.VALIDATION_SPLIT_NAME
else:
return constants.TEST_SPLIT_NAME | google-research/plur | [
78,
14,
78,
8,
1638474751
] |
def setup_uncased(model_config):
"""Setup the uncased bert model.
Args:
model_config: The model configuration to be loaded.
Returns:
tokenizer: The tokenizer to be used to convert between tokens and ids.
model: The model that has been initialized.
device: The device to be used in this run.
embedding_map: Holding all token embeddings.
"""
# Load pre-trained model tokenizer (vocabulary)
tokenizer = tokenization.BertTokenizer.from_pretrained(model_config)
# Load pre-trained model (weights)
model = modeling.BertModel.from_pretrained(model_config)
_ = model.eval()
# Set up the device in use
device = torch.device('cuda:0' if torch.cuda.is_available() else 'cpu')
print('device : ', device)
model = model.to(device)
# Initialize the embedding map
embedding_map = embeddings_helper.EmbeddingMap(device, model)
return tokenizer, model, device, embedding_map | PAIR-code/interpretability | [
82,
17,
82,
1,
1559850451
] |
def __init__(self): # pylint: disable=super-init-not-called
pass | oppia/oppia | [
4927,
2897,
4927,
1181,
1439511374
] |
def setUp(self):
super(BaseSuggestionUnitTests, self).setUp()
self.base_suggestion = MockInvalidSuggestion() | oppia/oppia | [
4927,
2897,
4927,
1181,
1439511374
] |
def test_base_class_get_change_list_for_accepting_suggestion_raises_error(
self):
with self.assertRaisesRegex(
NotImplementedError,
'Subclasses of BaseSuggestion should implement '
'get_change_list_for_accepting_suggestion.'):
self.base_suggestion.get_change_list_for_accepting_suggestion() | oppia/oppia | [
4927,
2897,
4927,
1181,
1439511374
] |
def test_base_class_populate_old_value_of_change_raises_error(self):
with self.assertRaisesRegex(
NotImplementedError,
'Subclasses of BaseSuggestion should implement'
' populate_old_value_of_change.'):
self.base_suggestion.populate_old_value_of_change() | oppia/oppia | [
4927,
2897,
4927,
1181,
1439511374
] |
def test_base_class_get_all_html_content_strings(self):
with self.assertRaisesRegex(
NotImplementedError,
'Subclasses of BaseSuggestion should implement'
' get_all_html_content_strings.'):
self.base_suggestion.get_all_html_content_strings() | oppia/oppia | [
4927,
2897,
4927,
1181,
1439511374
] |