function
stringlengths 11
56k
| repo_name
stringlengths 5
60
| features
sequence |
---|---|---|
def test_add_exists(self, isfile_mock, deploy_mock, validate_mock):
isfile_mock.return_value = True
catalog.add('tpch')
filenames = ['tpch.properties']
deploy_mock.assert_called_with(filenames,
get_catalog_directory(),
constants.REMOTE_CATALOG_DIR,
PRESTO_STANDALONE_USER_GROUP)
validate_mock.assert_called_with(filenames) | prestodb/presto-admin | [
170,
102,
170,
63,
1432266042
] |
def test_add_all(self, mock_validate, listdir_mock, isdir_mock,
deploy_mock):
catalogs = ['tpch.properties', 'another.properties']
listdir_mock.return_value = catalogs
catalog.add()
deploy_mock.assert_called_with(catalogs,
get_catalog_directory(),
constants.REMOTE_CATALOG_DIR,
PRESTO_STANDALONE_USER_GROUP) | prestodb/presto-admin | [
170,
102,
170,
63,
1432266042
] |
def test_add_all_fails_if_dir_not_there(self, isdir_mock, deploy_mock):
isdir_mock.return_value = False
self.assertRaisesRegexp(ConfigFileNotFoundError,
r'Cannot add catalogs because directory .+'
r' does not exist',
catalog.add)
self.assertFalse(deploy_mock.called) | prestodb/presto-admin | [
170,
102,
170,
63,
1432266042
] |
def test_remove(self, local_rm_mock, exists_mock, sudo_mock):
script = ('if [ -f /etc/presto/catalog/tpch.properties ] ; '
'then rm /etc/presto/catalog/tpch.properties ; '
'else echo "Could not remove catalog \'tpch\'. '
'No such file \'/etc/presto/catalog/tpch.properties\'"; fi')
exists_mock.return_value = True
fabric.api.env.host = 'localhost'
catalog.remove('tpch')
sudo_mock.assert_called_with(script)
local_rm_mock.assert_called_with(get_catalog_directory() +
'/tpch.properties') | prestodb/presto-admin | [
170,
102,
170,
63,
1432266042
] |
def test_remove_failure(self, exists_mock, sudo_mock):
exists_mock.return_value = False
fabric.api.env.host = 'localhost'
out = _AttributeString()
out.succeeded = False
sudo_mock.return_value = out
self.assertRaisesRegexp(SystemExit,
'\\[localhost\\] Failed to remove catalog tpch.',
catalog.remove,
'tpch') | prestodb/presto-admin | [
170,
102,
170,
63,
1432266042
] |
def test_remove_no_such_file(self, exists_mock, sudo_mock):
exists_mock.return_value = False
fabric.api.env.host = 'localhost'
error_msg = ('Could not remove catalog tpch: No such file ' +
os.path.join(get_catalog_directory(), 'tpch.properties'))
out = _AttributeString(error_msg)
out.succeeded = True
sudo_mock.return_value = out
self.assertRaisesRegexp(SystemExit,
'\\[localhost\\] %s' % error_msg,
catalog.remove,
'tpch') | prestodb/presto-admin | [
170,
102,
170,
63,
1432266042
] |
def test_warning_if_connector_dir_empty(self, isdir_mock, listdir_mock):
isdir_mock.return_value = True
listdir_mock.return_value = []
catalog.add()
self.assertEqual('\nWarning: Directory %s is empty. No catalogs will'
' be deployed\n\n' % get_catalog_directory(),
self.test_stderr.getvalue()) | prestodb/presto-admin | [
170,
102,
170,
63,
1432266042
] |
def test_add_permission_denied(self, isdir_mock, listdir_mock):
isdir_mock.return_value = True
error_msg = ('Permission denied')
listdir_mock.side_effect = OSError(13, error_msg)
fabric.api.env.host = 'localhost'
self.assertRaisesRegexp(SystemExit, '\[localhost\] %s' % error_msg,
catalog.add) | prestodb/presto-admin | [
170,
102,
170,
63,
1432266042
] |
def test_remove_os_error(self, remove_file_mock, remove_mock):
fabric.api.env.host = 'localhost'
error = OSError(13, 'Permission denied')
remove_mock.side_effect = error
self.assertRaisesRegexp(OSError, 'Permission denied',
catalog.remove, 'tpch') | prestodb/presto-admin | [
170,
102,
170,
63,
1432266042
] |
def test_deploy_files(self, put_mock, create_dir_mock):
local_dir = '/my/local/dir'
remote_dir = '/my/remote/dir'
catalog.deploy_files(['a', 'b'], local_dir, remote_dir,
PRESTO_STANDALONE_USER_GROUP)
create_dir_mock.assert_called_with(remote_dir, PRESTO_STANDALONE_USER_GROUP)
put_mock.assert_any_call('/my/local/dir/a', remote_dir, use_sudo=True,
mode=0600)
put_mock.assert_any_call('/my/local/dir/b', remote_dir, use_sudo=True,
mode=0600) | prestodb/presto-admin | [
170,
102,
170,
63,
1432266042
] |
def test_validate(self, open_mock, is_file_mock):
is_file_mock.return_value = True
file_obj = open_mock.return_value.__enter__.return_value
file_obj.read.return_value = 'connector.noname=example'
self.assertRaisesRegexp(ConfigurationError,
'Catalog configuration example.properties '
'does not contain connector.name',
catalog.add, 'example') | prestodb/presto-admin | [
170,
102,
170,
63,
1432266042
] |
def test_validate_fail(self, is_file_mock):
is_file_mock.return_value = True
self.assertRaisesRegexp(
SystemExit,
'Error validating ' + os.path.join(get_catalog_directory(), 'example.properties') + '\n\n'
'Underlying exception:\n No such file or directory',
catalog.add, 'example') | prestodb/presto-admin | [
170,
102,
170,
63,
1432266042
] |
def __init__(self, region, name, retention_in_days=7):
super(LogGroup, self).__init__()
self.region = region
self.name = name
self.retention_in_days = retention_in_days | GoogleCloudPlatform/PerfKitBenchmarker | [
1785,
474,
1785,
248,
1405617806
] |
def _Delete(self):
"""Delete the log group."""
delete_cmd = util.AWS_PREFIX + [
'--region', self.region,
'logs', 'delete-log-group',
'--log-group-name', self.name
]
vm_util.IssueCommand(delete_cmd, raise_on_failure=False) | GoogleCloudPlatform/PerfKitBenchmarker | [
1785,
474,
1785,
248,
1405617806
] |
def _PostCreate(self):
"""Set the retention policy."""
put_cmd = util.AWS_PREFIX + [
'--region', self.region,
'logs', 'put-retention-policy',
'--log-group-name', self.name,
'--retention-in-days', str(self.retention_in_days)
]
vm_util.IssueCommand(put_cmd) | GoogleCloudPlatform/PerfKitBenchmarker | [
1785,
474,
1785,
248,
1405617806
] |
def JOB_STATES(state):
if state == 'failed':
return BOLD() + RED() + state + ENDC()
elif state == 'done':
return BOLD() + GREEN() + state + ENDC()
elif state in ['running', 'in_progress']:
return GREEN() + state + ENDC()
elif state == 'partially_failed':
return RED() + state + ENDC()
else:
return YELLOW() + state + ENDC() | dnanexus/dx-toolkit | [
74,
76,
74,
71,
1340756627
] |
def get_size_str(size):
"""
Formats a byte size as a string.
The returned string is no more than 9 characters long.
"""
if size is None:
return "0 " + SIZE_LEVEL[0]
if size == 0:
magnitude = 0
level = 0
else:
magnitude = math.floor(math.log(size, 10))
level = int(min(math.floor(magnitude // 3), 4))
return ('%d' if level == 0 else '%.2f') % (float(size) / 2**(level*10)) + ' ' + SIZE_LEVEL[level] | dnanexus/dx-toolkit | [
74,
76,
74,
71,
1340756627
] |
def get_io_desc(parameter, include_class=True, show_opt=True, app_help_version=False):
# For interactive help, format array:CLASS inputs as:
# -iNAME=CLASS [-iNAME=... [...]] # If input is required (needs >=1 inputs)
# [-iNAME=CLASS [...]] # If input is optional (needs >=0 inputs
if app_help_version and parameter["class"].startswith("array"):
scalar_parameter = parameter.copy()
# Munge the parameter dict (strip off "array:" to turn it into a
# scalar) and recurse
scalar_parameter["class"] = scalar_parameter["class"][6:]
if "default" in parameter or parameter.get("optional"):
return "[" + get_io_desc(scalar_parameter, include_class=include_class, show_opt=False, app_help_version=app_help_version) + " [-i%s=... [...]]]" % (parameter["name"],)
else:
return get_io_desc(scalar_parameter, include_class=include_class, show_opt=False, app_help_version=app_help_version) + " [-i%s=... [...]]" % (parameter["name"],)
desc = ""
is_optional = False
if show_opt:
if "default" in parameter or parameter.get("optional"):
is_optional = True
desc += "["
desc += ('-i' if app_help_version else '') + parameter["name"]
include_parens = include_class or 'type' in parameter or 'default' in parameter
if include_parens:
desc += ("=" if app_help_version else " ") + "("
is_first = True
if include_class:
desc += parameter["class"]
is_first = False
if "type" in parameter:
if not is_first:
desc += ", "
else:
is_first = False
desc += "type " + parse_typespec(parameter["type"])
if "default" in parameter:
if not is_first:
desc += ', '
desc += 'default=' + json.dumps(parameter['default'])
if include_parens:
desc += ")"
if show_opt and is_optional:
desc += "]"
return desc | dnanexus/dx-toolkit | [
74,
76,
74,
71,
1340756627
] |
def is_job_ref(thing, reftype=dict):
'''
:param thing: something that might be a job-based object reference hash
:param reftype: type that a job-based object reference would be (default is dict)
'''
return isinstance(thing, reftype) and \
((len(thing) == 2 and \
isinstance(thing.get('field'), basestring) and \
isinstance(thing.get('job'), basestring)) or \
(len(thing) == 1 and \
isinstance(thing.get('$dnanexus_link'), reftype) and \
isinstance(thing['$dnanexus_link'].get('field'), basestring) and \
isinstance(thing['$dnanexus_link'].get('job'), basestring))) | dnanexus/dx-toolkit | [
74,
76,
74,
71,
1340756627
] |
def get_field_from_jbor(thing):
'''
:returns: Output field name from a JBOR
Assumes :func:`is_job_ref` evaluates to True
'''
if '$dnanexus_link' in thing:
return thing['$dnanexus_link']['field']
else:
return thing['field'] | dnanexus/dx-toolkit | [
74,
76,
74,
71,
1340756627
] |
def is_metadata_ref(thing, reftype=dict):
return isinstance(thing, reftype) and \
len(thing) == 1 and \
isinstance(thing.get('$dnanexus_link'), reftype) and \
isinstance(thing['$dnanexus_link'].get('metadata'), basestring) | dnanexus/dx-toolkit | [
74,
76,
74,
71,
1340756627
] |
def io_val_to_str(val):
if is_job_ref(val):
# Job-based object references
return jbor_to_str(val)
elif isinstance(val, dict) and '$dnanexus_link' in val:
# DNAnexus link
if isinstance(val['$dnanexus_link'], basestring):
# simple link
return val['$dnanexus_link']
elif 'project' in val['$dnanexus_link'] and 'id' in val['$dnanexus_link']:
return val['$dnanexus_link']['project'] + ':' + val['$dnanexus_link']['id']
else:
return json.dumps(val)
elif isinstance(val, list):
if len(val) == 0:
return '[]'
else:
return '[ ' + ', '.join([io_val_to_str(item) for item in val]) + ' ]'
elif isinstance(val, dict):
return '{ ' + ', '.join([key + ': ' + io_val_to_str(value) for key, value in val.items()]) + ' }'
else:
return json.dumps(val) | dnanexus/dx-toolkit | [
74,
76,
74,
71,
1340756627
] |
def get_io_field(io_hash, defaults=None, delim='=', highlight_fields=()):
def highlight_value(key, value):
if key in highlight_fields:
return YELLOW() + value + ENDC()
else:
return value
if defaults is None:
defaults = {}
if io_hash is None:
return '-'
if len(io_hash) == 0 and len(defaults) == 0:
return '-'
if get_delimiter() is not None:
return ('\n' + get_delimiter()).join([(key + delim + highlight_value(key, io_val_to_str(value))) for key, value in io_hash.items()] +
[('[' + key + delim + io_val_to_str(value) + ']') for key, value in defaults.items()])
else:
lines = [fill(key + ' ' + delim + ' ' + highlight_value(key, io_val_to_str(value)),
initial_indent=' ' * FIELD_NAME_WIDTH,
subsequent_indent=' ' * (FIELD_NAME_WIDTH + 1),
break_long_words=False)
for key, value in io_hash.items()]
lines.extend([fill('[' + key + ' ' + delim + ' ' + io_val_to_str(value) + ']',
initial_indent=' ' * FIELD_NAME_WIDTH,
subsequent_indent=' ' * (FIELD_NAME_WIDTH + 1),
break_long_words=False)
for key, value in defaults.items()])
return '\n'.join(lines)[FIELD_NAME_WIDTH:] | dnanexus/dx-toolkit | [
74,
76,
74,
71,
1340756627
] |
def render_bundleddepends(thing):
from ..bindings.search import find_one_data_object
from ..exceptions import DXError
bundles = []
for item in thing:
bundle_asset_record = dxpy.DXFile(item["id"]["$dnanexus_link"]).get_properties().get("AssetBundle")
asset = None
if bundle_asset_record:
asset = dxpy.DXRecord(bundle_asset_record)
if asset:
try:
bundles.append(asset.describe().get("name") + " (" + asset.get_id() + ")")
except DXError:
asset = None
if not asset:
bundles.append(item["name"] + " (" + item["id"]["$dnanexus_link"] + ")")
return bundles | dnanexus/dx-toolkit | [
74,
76,
74,
71,
1340756627
] |
def render_stage(title, stage, as_stage_of=None):
lines_to_print = []
if stage['name'] is not None:
lines_to_print.append((title, "{name} ({id})".format(name=stage['name'], id=stage['id'])))
else:
lines_to_print.append((title, stage['id']))
lines_to_print.append((' Executable', stage['executable'] + \
(" (" + RED() + "inaccessible" + ENDC() + ")" \
if stage.get('accessible') is False else "")))
if 'execution' in stage:
is_cached_result = as_stage_of is not None and 'parentAnalysis' in stage['execution'] and \
stage['execution']['parentAnalysis'] != as_stage_of
execution_id_str = stage['execution']['id']
if is_cached_result:
execution_id_str = "[" + execution_id_str + "]"
if 'state' in stage['execution']:
lines_to_print.append((' Execution', execution_id_str + ' (' + JOB_STATES(stage['execution']['state']) + ')'))
else:
lines_to_print.append((' Execution', execution_id_str))
if is_cached_result:
lines_to_print.append((' Cached from', stage['execution']['parentAnalysis']))
for line in lines_to_print:
print_field(line[0], line[1]) | dnanexus/dx-toolkit | [
74,
76,
74,
71,
1340756627
] |
def render_timestamp(timestamp):
return datetime.datetime.fromtimestamp(timestamp//1000).ctime() | dnanexus/dx-toolkit | [
74,
76,
74,
71,
1340756627
] |
def print_field(label, value):
if get_delimiter() is not None:
sys.stdout.write(label + get_delimiter() + value + '\n')
else:
sys.stdout.write(
label + " " * (FIELD_NAME_WIDTH-len(label)) + fill(value,
subsequent_indent=' '*FIELD_NAME_WIDTH,
width_adjustment=-FIELD_NAME_WIDTH) +
'\n') | dnanexus/dx-toolkit | [
74,
76,
74,
71,
1340756627
] |
def print_list_field(label, values):
print_field(label, ('-' if len(values) == 0 else DELIMITER(', ').join(values))) | dnanexus/dx-toolkit | [
74,
76,
74,
71,
1340756627
] |
def print_project_desc(desc, verbose=False):
recognized_fields = [
'id', 'class', 'name', 'summary', 'description', 'protected', 'restricted', 'created', 'modified',
'dataUsage', 'sponsoredDataUsage', 'tags', 'level', 'folders', 'objects', 'permissions', 'properties',
'appCaches', 'billTo', 'version', 'createdBy', 'totalSponsoredEgressBytes', 'consumedSponsoredEgressBytes',
'containsPHI', 'databaseUIViewOnly', 'region', 'storageCost', 'pendingTransfer','atSpendingLimit',
# Following are app container-specific
'destroyAt', 'project', 'type', 'app', 'appName'
]
# Basic metadata
print_field("ID", desc["id"])
print_field("Class", desc["class"])
if "name" in desc:
print_field("Name", desc["name"])
if 'summary' in desc:
print_field("Summary", desc["summary"])
if 'description' in desc and (verbose or 'summary' not in desc):
print_field("Description", desc['description'])
if 'version' in desc and verbose:
print_field("Version", str(desc['version']))
# Ownership and permissions
if 'billTo' in desc:
print_field("Billed to", desc['billTo'][5 if desc['billTo'].startswith('user-') else 0:])
if 'pendingTransfer' in desc and (verbose or desc['pendingTransfer'] is not None):
print_json_field('Pending transfer to', desc['pendingTransfer'])
if "level" in desc:
print_field("Access level", desc["level"])
if 'region' in desc:
print_field('Region', desc['region'])
# Project settings
if 'protected' in desc:
print_json_field("Protected", desc["protected"])
if 'restricted' in desc:
print_json_field("Restricted", desc["restricted"])
if 'containsPHI' in desc:
print_json_field('Contains PHI', desc['containsPHI'])
if 'databaseUIViewOnly' in desc and desc['databaseUIViewOnly']:
print_json_field('Database UI View Only', desc['databaseUIViewOnly'])
# Usage
print_field("Created", render_timestamp(desc['created']))
if 'createdBy' in desc:
print_field("Created by", desc['createdBy']['user'][desc['createdBy']['user'].find('-') + 1:])
print_field("Last modified", render_timestamp(desc['modified']))
print_field("Data usage", ('%.2f' % desc["dataUsage"]) + ' GB')
if 'sponsoredDataUsage' in desc:
print_field("Sponsored data", ('%.2f' % desc["sponsoredDataUsage"]) + ' GB')
if 'storageCost' in desc:
print_field("Storage cost", "$%.3f/month" % desc["storageCost"])
if 'totalSponsoredEgressBytes' in desc or 'consumedSponsoredEgressBytes' in desc:
total_egress_str = '%.2f GB' % (desc['totalSponsoredEgressBytes'] / 1073741824.,) \
if 'totalSponsoredEgressBytes' in desc else '??'
consumed_egress_str = '%.2f GB' % (desc['consumedSponsoredEgressBytes'] / 1073741824.,) \
if 'consumedSponsoredEgressBytes' in desc else '??'
print_field('Sponsored egress',
('%s used of %s total' % (consumed_egress_str, total_egress_str)))
if 'atSpendingLimit' in desc:
print_json_field("At spending limit?", desc['atSpendingLimit'])
# Misc metadata
if "objects" in desc:
print_field("# Files", str(desc["objects"]))
if "folders" in desc:
print_list_field("Folders", desc["folders"])
if "permissions" in desc:
print_list_field(
"Permissions",
[key[5 if key.startswith('user-') else 0:] + ':' + value for key, value in desc["permissions"].items()]
)
if 'tags' in desc:
print_list_field("Tags", desc["tags"])
if "properties" in desc:
print_list_field("Properties", [key + '=' + value for key, value in desc["properties"].items()])
if "appCaches" in desc:
print_json_field("App caches", desc["appCaches"])
# Container-specific
if 'type' in desc:
print_field("Container type", desc["type"])
if 'project' in desc:
print_field("Associated project", desc["project"])
if 'destroyAt' in desc:
print_field("To be destroyed", render_timestamp(desc['modified']))
if 'app' in desc:
print_field("Associated App ID", desc["app"])
if 'appName' in desc:
print_field("Associated App", desc["appName"])
for field in desc:
if field not in recognized_fields:
print_json_field(field, desc[field]) | dnanexus/dx-toolkit | [
74,
76,
74,
71,
1340756627
] |
def print_app_desc(desc, verbose=False):
recognized_fields = ['id', 'class', 'name', 'version', 'aliases', 'createdBy', 'created', 'modified', 'deleted', 'published', 'title', 'subtitle', 'description', 'categories', 'access', 'dxapi', 'inputSpec', 'outputSpec', 'runSpec', 'resources', 'billTo', 'installed', 'openSource', 'summary', 'applet', 'installs', 'billing', 'details', 'developerNotes',
'authorizedUsers']
print_field("ID", desc["id"])
print_field("Class", desc["class"])
if 'billTo' in desc:
print_field("Billed to", desc['billTo'][5 if desc['billTo'].startswith('user-') else 0:])
print_field("Name", desc["name"])
print_field("Version", desc["version"])
print_list_field("Aliases", desc["aliases"])
print_field("Created by", desc["createdBy"][5 if desc['createdBy'].startswith('user-') else 0:])
print_field("Created", render_timestamp(desc['created']))
print_field("Last modified", render_timestamp(desc['modified']))
print_field("Created from", desc["applet"])
print_json_field('Installed', desc['installed'])
print_json_field('Open source', desc['openSource'])
print_json_field('Deleted', desc['deleted'])
if not desc['deleted']:
advanced_inputs = []
details = desc["details"]
if isinstance(details, dict) and "advancedInputs" in details:
if not verbose:
advanced_inputs = details["advancedInputs"]
del details["advancedInputs"]
if 'published' not in desc or desc["published"] < 0:
print_field("Published", "-")
else:
print_field("Published", render_timestamp(desc['published']))
if "title" in desc and desc['title'] is not None:
print_field("Title", desc["title"])
if "subtitle" in desc and desc['subtitle'] is not None:
print_field("Subtitle", desc["subtitle"])
if 'summary' in desc and desc['summary'] is not None:
print_field("Summary", desc['summary'])
print_list_field("Categories", desc["categories"])
if 'details' in desc:
print_json_field("Details", desc["details"])
print_json_field("Access", desc["access"])
print_field("API version", desc["dxapi"])
if 'inputSpec' in desc:
print_nofill_field("Input Spec", get_io_spec(desc["inputSpec"], skip_fields=advanced_inputs))
print_nofill_field("Output Spec", get_io_spec(desc["outputSpec"]))
print_field("Interpreter", desc["runSpec"]["interpreter"])
if "resources" in desc["runSpec"]:
print_json_field("Resources", desc["runSpec"]["resources"])
if "bundledDepends" in desc["runSpec"]:
print_list_field("bundledDepends", render_bundleddepends(desc["runSpec"]["bundledDepends"]))
if "execDepends" in desc["runSpec"]:
print_list_field("execDepends", render_execdepends(desc["runSpec"]["execDepends"]))
if "systemRequirements" in desc['runSpec']:
print_json_field('Sys Requirements', desc['runSpec']['systemRequirements'])
if 'resources' in desc:
print_field("Resources", desc['resources'])
if 'installs' in desc:
print_field('# Installs', str(desc['installs']))
if 'authorizedUsers' in desc:
print_list_field('AuthorizedUsers', desc["authorizedUsers"])
for field in desc:
if field not in recognized_fields:
print_json_field(field, desc[field]) | dnanexus/dx-toolkit | [
74,
76,
74,
71,
1340756627
] |
def get_col_str(col_desc):
return col_desc['name'] + DELIMITER(" (") + col_desc['type'] + DELIMITER(")") | dnanexus/dx-toolkit | [
74,
76,
74,
71,
1340756627
] |
def printable_ssh_host_key(ssh_host_key):
try:
keygen = subprocess.Popen(["ssh-keygen", "-lf", "/dev/stdin"], stdin=subprocess.PIPE, stdout=subprocess.PIPE)
if USING_PYTHON2:
(stdout, stderr) = keygen.communicate(ssh_host_key)
else:
(stdout, stderr) = keygen.communicate(ssh_host_key.encode())
except:
return ssh_host_key.strip()
else:
if not USING_PYTHON2:
stdout = stdout.decode()
return stdout.replace(" no comment", "").strip() | dnanexus/dx-toolkit | [
74,
76,
74,
71,
1340756627
] |
def locale_from_currency_code(dx_code):
"""
This is a (temporary) hardcoded mapping between currency_list.json in nucleus and standard
locale string useful for further formatting
:param dx_code: An id of nucleus/commons/pricing_models/currency_list.json collection
:return: standardised locale, eg 'en_US'; None when no mapping found
"""
currency_locale_map = {0: 'en_US', 1: 'en_GB'}
return currency_locale_map[dx_code] if dx_code in currency_locale_map else None | dnanexus/dx-toolkit | [
74,
76,
74,
71,
1340756627
] |
def format_currency(value, meta, currency_locale=None):
"""
Formats currency value into properly decorated currency string based on either locale (preferred)
or if that is not available then currency metadata. Until locale is provided from the server
a crude mapping between `currency.dxCode` and a locale string is used instead (eg 0: 'en_US')
:param value: amount
:param meta: server metadata (`currency`)
:return: formatted currency string
"""
try:
if currency_locale is None:
currency_locale = locale_from_currency_code(meta['dxCode'])
if currency_locale is None:
return format_currency_from_meta(value, meta)
else:
locale.setlocale(locale.LC_ALL, currency_locale)
return locale.currency(value, grouping=True)
except locale.Error:
# .. locale is probably not available -> fallback to format manually
return format_currency_from_meta(value, meta) | dnanexus/dx-toolkit | [
74,
76,
74,
71,
1340756627
] |
def print_generic_desc(desc):
for field in desc:
print_json_field(field, desc[field]) | dnanexus/dx-toolkit | [
74,
76,
74,
71,
1340756627
] |
def get_ls_desc(desc, print_id=False):
addendum = ' : ' + desc['id'] if print_id is True else ''
if desc['class'] in ['applet', 'workflow']:
return BOLD() + GREEN() + desc['name'] + ENDC() + addendum
else:
return desc['name'] + addendum | dnanexus/dx-toolkit | [
74,
76,
74,
71,
1340756627
] |
def get_ls_l_header():
return (BOLD() +
'State' + DELIMITER(' ') +
'Last modified' + DELIMITER(' ') +
'Size' + DELIMITER(' ') +
'Name' + DELIMITER(' (') +
'ID' + DELIMITER(')') +
ENDC()) | dnanexus/dx-toolkit | [
74,
76,
74,
71,
1340756627
] |
def get_ls_l_desc_fields():
return {
'id': True,
'class': True,
'folder': True,
'length': True,
'modified': True,
'name': True,
'project': True,
'size': True,
'state': True
} | dnanexus/dx-toolkit | [
74,
76,
74,
71,
1340756627
] |
def print_ls_l_desc(desc, **kwargs):
print(get_ls_l_desc(desc, **kwargs)) | dnanexus/dx-toolkit | [
74,
76,
74,
71,
1340756627
] |
def each(f):
if f.body:
f.hashes = []
for hash_type, h in HashFile.extract_hashes(f.body.contents):
hash_object = Hash.get_or_create(value=h.hexdigest())
hash_object.add_source("analytics")
hash_object.save()
f.active_link_to(
hash_object,
"{} hash".format(hash_type.upper()),
"HashFile",
clean_old=False,
)
f.hashes.append({"hash": hash_type, "value": h.hexdigest()})
f.save() | yeti-platform/yeti | [
1360,
268,
1360,
132,
1450025666
] |
def setUp(self):
self.fd, self.path = tempfile.mkstemp() | rbuffat/pyidf | [
20,
7,
20,
2,
1417292720
] |
def __init__(
self, client, name, offset, read_rows_kwargs, retry_delay_callback=None | googleapis/python-bigquery-storage | [
73,
35,
73,
15,
1575936548
] |
def __iter__(self):
"""An iterable of messages.
Returns:
Iterable[ \
~google.cloud.bigquery_storage_v1.types.ReadRowsResponse \
]:
A sequence of row messages.
"""
# Infinite loop to reconnect on reconnectable errors while processing
# the row stream.
if self._wrapped is None:
self._reconnect()
while True:
try:
for message in self._wrapped:
rowcount = message.row_count
self._offset += rowcount
yield message
return # Made it through the whole stream.
except google.api_core.exceptions.InternalServerError as exc:
resumable_error = any(
resumable_message in exc.message
for resumable_message in _STREAM_RESUMPTION_INTERNAL_ERROR_MESSAGES
)
if not resumable_error:
raise
except _STREAM_RESUMPTION_EXCEPTIONS:
# Transient error, so reconnect to the stream.
pass
except Exception as exc:
if not self._resource_exhausted_exception_is_retryable(exc):
raise
self._reconnect() | googleapis/python-bigquery-storage | [
73,
35,
73,
15,
1575936548
] |
def _resource_exhausted_exception_is_retryable(self, exc):
if isinstance(exc, google.api_core.exceptions.ResourceExhausted):
# ResourceExhausted errors are only retried if a valid
# RetryInfo is provided with the error.
#
# TODO: Remove hasattr logic when we require google-api-core >= 2.2.0.
# ResourceExhausted added details/_details in google-api-core 2.2.0.
details = None
if hasattr(exc, "details"):
details = exc.details
elif hasattr(exc, "_details"):
details = exc._details
if details is not None:
for detail in details:
if isinstance(detail, google.rpc.error_details_pb2.RetryInfo):
retry_delay = detail.retry_delay
if retry_delay is not None:
delay = max(
0,
float(retry_delay.seconds)
+ (float(retry_delay.nanos) / 1e9),
)
if self._retry_delay_callback:
self._retry_delay_callback(delay)
time.sleep(delay)
return True
return False | googleapis/python-bigquery-storage | [
73,
35,
73,
15,
1575936548
] |
def to_arrow(self, read_session=None):
"""Create a :class:`pyarrow.Table` of all rows in the stream.
This method requires the pyarrow library and a stream using the Arrow
format.
Args:
read_session ( \
~google.cloud.bigquery_storage_v1.types.ReadSession \
):
DEPRECATED.
This argument was used to specify the schema of the rows in the
stream, but now the first message in a read stream contains
this information.
Returns:
pyarrow.Table:
A table of all rows in the stream.
"""
return self.rows(read_session=read_session).to_arrow() | googleapis/python-bigquery-storage | [
73,
35,
73,
15,
1575936548
] |
def __init__(self, reader, read_session=None):
self._reader = reader
if read_session is not None:
self._stream_parser = _StreamParser.from_read_session(read_session)
else:
self._stream_parser = None | googleapis/python-bigquery-storage | [
73,
35,
73,
15,
1575936548
] |
def pages(self):
"""A generator of all pages in the stream.
Returns:
types.GeneratorType[google.cloud.bigquery_storage_v1.ReadRowsPage]:
A generator of pages.
"""
# Each page is an iterator of rows. But also has num_items, remaining,
# and to_dataframe.
for message in self._reader:
# Only the first message contains the schema, which is needed to
# decode the messages.
if not self._stream_parser:
self._stream_parser = _StreamParser.from_read_rows_response(message)
yield ReadRowsPage(self._stream_parser, message) | googleapis/python-bigquery-storage | [
73,
35,
73,
15,
1575936548
] |
def to_arrow(self):
"""Create a :class:`pyarrow.Table` of all rows in the stream.
This method requires the pyarrow library and a stream using the Arrow
format.
Returns:
pyarrow.Table:
A table of all rows in the stream.
"""
record_batches = []
for page in self.pages:
record_batches.append(page.to_arrow())
if record_batches:
return pyarrow.Table.from_batches(record_batches)
# No data, return an empty Table.
self._stream_parser._parse_arrow_schema()
return pyarrow.Table.from_batches([], schema=self._stream_parser._schema) | googleapis/python-bigquery-storage | [
73,
35,
73,
15,
1575936548
] |
def _dtypes_from_avro(self, avro_fields):
"""Determine Pandas dtypes for columns in Avro schema.
Args:
avro_fields (Iterable[Mapping[str, Any]]):
Avro fields' metadata.
Returns:
colelctions.OrderedDict[str, str]:
Column names with their corresponding Pandas dtypes.
"""
result = collections.OrderedDict()
type_map = {"long": "int64", "double": "float64", "boolean": "bool"}
for field_info in avro_fields:
# If a type is an union of multiple types, pick the first type
# that is not "null".
if isinstance(field_info["type"], list):
type_info = next(item for item in field_info["type"] if item != "null")
if isinstance(type_info, str):
field_dtype = type_map.get(type_info, "object")
else:
logical_type = type_info.get("logicalType")
if logical_type == "timestamp-micros":
field_dtype = "datetime64[ns, UTC]"
else:
field_dtype = "object"
result[field_info["name"]] = field_dtype
return result | googleapis/python-bigquery-storage | [
73,
35,
73,
15,
1575936548
] |
def __init__(self, stream_parser, message):
self._stream_parser = stream_parser
self._message = message
self._iter_rows = None
self._num_items = self._message.row_count
self._remaining = self._message.row_count | googleapis/python-bigquery-storage | [
73,
35,
73,
15,
1575936548
] |
def num_items(self):
"""int: Total items in the page."""
return self._num_items | googleapis/python-bigquery-storage | [
73,
35,
73,
15,
1575936548
] |
def remaining(self):
"""int: Remaining items in the page."""
return self._remaining | googleapis/python-bigquery-storage | [
73,
35,
73,
15,
1575936548
] |
def next(self):
"""Get the next row in the page."""
self._parse_rows()
if self._remaining > 0:
self._remaining -= 1
return next(self._iter_rows) | googleapis/python-bigquery-storage | [
73,
35,
73,
15,
1575936548
] |
def to_arrow(self):
"""Create an :class:`pyarrow.RecordBatch` of rows in the page.
Returns:
pyarrow.RecordBatch:
Rows from the message, as an Arrow record batch.
"""
return self._stream_parser.to_arrow(self._message) | googleapis/python-bigquery-storage | [
73,
35,
73,
15,
1575936548
] |
def to_arrow(self, message):
raise NotImplementedError("Not implemented.") | googleapis/python-bigquery-storage | [
73,
35,
73,
15,
1575936548
] |
def to_rows(self, message):
raise NotImplementedError("Not implemented.") | googleapis/python-bigquery-storage | [
73,
35,
73,
15,
1575936548
] |
def _parse_arrow_schema(self):
raise NotImplementedError("Not implemented.") | googleapis/python-bigquery-storage | [
73,
35,
73,
15,
1575936548
] |
def from_read_session(read_session):
schema_type = read_session._pb.WhichOneof("schema")
if schema_type == "avro_schema":
return _AvroStreamParser(read_session)
elif schema_type == "arrow_schema":
return _ArrowStreamParser(read_session)
else:
raise TypeError(
"Unsupported schema type in read_session: {0}".format(schema_type)
) | googleapis/python-bigquery-storage | [
73,
35,
73,
15,
1575936548
] |
def from_read_rows_response(message):
schema_type = message._pb.WhichOneof("schema")
if schema_type == "avro_schema":
return _AvroStreamParser(message)
elif schema_type == "arrow_schema":
return _ArrowStreamParser(message)
else:
raise TypeError(
"Unsupported schema type in message: {0}".format(schema_type)
) | googleapis/python-bigquery-storage | [
73,
35,
73,
15,
1575936548
] |
def __init__(self, message):
"""Construct an _AvroStreamParser.
Args:
message (Union[
google.cloud.bigquery_storage_v1.types.ReadSession, \
google.cloud.bigquery_storage_v1.types.ReadRowsResponse, \
]):
Either the first message of data from a read rows stream or a
read session. Both types contain a oneof "schema" field, which
can be used to determine how to deserialize rows.
"""
if fastavro is None:
raise ImportError(_FASTAVRO_REQUIRED)
self._first_message = message
self._avro_schema_json = None
self._fastavro_schema = None
self._column_names = None | googleapis/python-bigquery-storage | [
73,
35,
73,
15,
1575936548
] |
def to_dataframe(self, message, dtypes=None):
"""Create a :class:`pandas.DataFrame` of rows in the page.
This method requires the pandas libary to create a data frame and the
fastavro library to parse row messages.
.. warning::
DATETIME columns are not supported. They are currently parsed as
strings in the fastavro library.
Args:
message ( \
~google.cloud.bigquery_storage_v1.types.ReadRowsResponse \
):
A message containing Avro bytes to parse into a pandas DataFrame.
dtypes ( \
Map[str, Union[str, pandas.Series.dtype]] \
):
Optional. A dictionary of column names pandas ``dtype``s. The
provided ``dtype`` is used when constructing the series for
the column specified. Otherwise, the default pandas behavior
is used.
Returns:
pandas.DataFrame:
A data frame of all rows in the stream.
"""
self._parse_avro_schema()
if dtypes is None:
dtypes = {}
columns = collections.defaultdict(list)
for row in self.to_rows(message):
for column in row:
columns[column].append(row[column])
for column in dtypes:
columns[column] = pandas.Series(columns[column], dtype=dtypes[column])
return pandas.DataFrame(columns, columns=self._column_names) | googleapis/python-bigquery-storage | [
73,
35,
73,
15,
1575936548
] |
def _parse_fastavro(self):
"""Convert parsed Avro schema to fastavro format."""
self._parse_avro_schema()
self._fastavro_schema = fastavro.parse_schema(self._avro_schema_json) | googleapis/python-bigquery-storage | [
73,
35,
73,
15,
1575936548
] |
def __init__(self, message):
"""Construct an _ArrowStreamParser.
Args:
message (Union[
google.cloud.bigquery_storage_v1.types.ReadSession, \
google.cloud.bigquery_storage_v1.types.ReadRowsResponse, \
]):
Either the first message of data from a read rows stream or a
read session. Both types contain a oneof "schema" field, which
can be used to determine how to deserialize rows.
"""
if pyarrow is None:
raise ImportError(_PYARROW_REQUIRED)
self._first_message = message
self._schema = None | googleapis/python-bigquery-storage | [
73,
35,
73,
15,
1575936548
] |
def to_rows(self, message):
record_batch = self._parse_arrow_message(message)
# Iterate through each column simultaneously, and make a dict from the
# row values
for row in zip(*record_batch.columns):
yield dict(zip(self._column_names, row)) | googleapis/python-bigquery-storage | [
73,
35,
73,
15,
1575936548
] |
def _parse_arrow_message(self, message):
self._parse_arrow_schema()
return pyarrow.ipc.read_record_batch(
pyarrow.py_buffer(message.arrow_record_batch.serialized_record_batch),
self._schema,
) | googleapis/python-bigquery-storage | [
73,
35,
73,
15,
1575936548
] |
def setUp(self):
with mock.patch(
'airflow.providers.google.cloud.hooks.vision.CloudVisionHook.__init__',
new=mock_base_gcp_hook_default_project_id,
):
self.hook = CloudVisionHook(gcp_conn_id='test') | apache/incubator-airflow | [
29418,
12032,
29418,
869,
1428948298
] |
def test_product_search_client_creation(self, mock_client, mock_get_creds, mock_client_info):
result = self.hook.get_conn()
mock_client.assert_called_once_with(
credentials=mock_get_creds.return_value, client_info=mock_client_info.return_value
)
assert mock_client.return_value == result
assert self.hook._client == result | apache/incubator-airflow | [
29418,
12032,
29418,
869,
1428948298
] |
def test_create_productset_explicit_id(self, get_conn):
# Given
create_product_set_method = get_conn.return_value.create_product_set
create_product_set_method.return_value = None
parent = ProductSearchClient.location_path(PROJECT_ID_TEST, LOC_ID_TEST)
product_set = ProductSet()
# When
result = self.hook.create_product_set(
location=LOC_ID_TEST,
product_set_id=PRODUCTSET_ID_TEST,
product_set=product_set,
project_id=PROJECT_ID_TEST,
retry=None,
timeout=None,
metadata=None,
)
# Then
# ProductSet ID was provided explicitly in the method call above, should be returned from the method
assert result == PRODUCTSET_ID_TEST
create_product_set_method.assert_called_once_with(
parent=parent,
product_set=product_set,
product_set_id=PRODUCTSET_ID_TEST,
retry=None,
timeout=None,
metadata=None,
) | apache/incubator-airflow | [
29418,
12032,
29418,
869,
1428948298
] |
def test_create_productset_autogenerated_id(self, get_conn):
# Given
autogenerated_id = 'autogen-id'
response_product_set = ProductSet(
name=ProductSearchClient.product_set_path(PROJECT_ID_TEST, LOC_ID_TEST, autogenerated_id)
)
create_product_set_method = get_conn.return_value.create_product_set
create_product_set_method.return_value = response_product_set
parent = ProductSearchClient.location_path(PROJECT_ID_TEST, LOC_ID_TEST)
product_set = ProductSet()
# When
result = self.hook.create_product_set(
location=LOC_ID_TEST, product_set_id=None, product_set=product_set, project_id=PROJECT_ID_TEST
)
# Then
# ProductSet ID was not provided in the method call above. Should be extracted from the API response
# and returned.
assert result == autogenerated_id
create_product_set_method.assert_called_once_with(
parent=parent,
product_set=product_set,
product_set_id=None,
retry=None,
timeout=None,
metadata=None,
) | apache/incubator-airflow | [
29418,
12032,
29418,
869,
1428948298
] |
def test_create_productset_autogenerated_id_wrong_api_response(self, get_conn):
# Given
response_product_set = None
create_product_set_method = get_conn.return_value.create_product_set
create_product_set_method.return_value = response_product_set
parent = ProductSearchClient.location_path(PROJECT_ID_TEST, LOC_ID_TEST)
product_set = ProductSet()
# When
with pytest.raises(AirflowException) as ctx:
self.hook.create_product_set(
location=LOC_ID_TEST,
product_set_id=None,
product_set=product_set,
project_id=PROJECT_ID_TEST,
retry=None,
timeout=None,
metadata=None,
)
# Then
# API response was wrong (None) and thus ProductSet ID extraction should fail.
err = ctx.value
assert 'Unable to get name from response...' in str(err)
create_product_set_method.assert_called_once_with(
parent=parent,
product_set=product_set,
product_set_id=None,
retry=None,
timeout=None,
metadata=None,
) | apache/incubator-airflow | [
29418,
12032,
29418,
869,
1428948298
] |
def test_get_productset(self, get_conn):
# Given
name = ProductSearchClient.product_set_path(PROJECT_ID_TEST, LOC_ID_TEST, PRODUCTSET_ID_TEST)
response_product_set = ProductSet(name=name)
get_product_set_method = get_conn.return_value.get_product_set
get_product_set_method.return_value = response_product_set
# When
response = self.hook.get_product_set(
location=LOC_ID_TEST, product_set_id=PRODUCTSET_ID_TEST, project_id=PROJECT_ID_TEST
)
# Then
assert response
assert response == MessageToDict(response_product_set)
get_product_set_method.assert_called_once_with(name=name, retry=None, timeout=None, metadata=None) | apache/incubator-airflow | [
29418,
12032,
29418,
869,
1428948298
] |
def test_update_productset_no_explicit_name(self, get_conn):
# Given
product_set = ProductSet()
update_product_set_method = get_conn.return_value.update_product_set
update_product_set_method.return_value = product_set
productset_name = ProductSearchClient.product_set_path(
PROJECT_ID_TEST, LOC_ID_TEST, PRODUCTSET_ID_TEST
)
# When
result = self.hook.update_product_set(
location=LOC_ID_TEST,
product_set_id=PRODUCTSET_ID_TEST,
product_set=product_set,
update_mask=None,
project_id=PROJECT_ID_TEST,
retry=None,
timeout=None,
metadata=None,
)
# Then
assert result == MessageToDict(product_set)
update_product_set_method.assert_called_once_with(
product_set=ProductSet(name=productset_name),
metadata=None,
retry=None,
timeout=None,
update_mask=None,
) | apache/incubator-airflow | [
29418,
12032,
29418,
869,
1428948298
] |
def test_update_productset_no_explicit_name_and_missing_params_for_constructed_name(
self, location, product_set_id, get_conn | apache/incubator-airflow | [
29418,
12032,
29418,
869,
1428948298
] |
def test_update_productset_explicit_name_missing_params_for_constructed_name(
self, location, product_set_id, get_conn | apache/incubator-airflow | [
29418,
12032,
29418,
869,
1428948298
] |
def test_update_productset_explicit_name_different_from_constructed(self, get_conn):
# Given
update_product_set_method = get_conn.return_value.update_product_set
update_product_set_method.return_value = None
explicit_ps_name = ProductSearchClient.product_set_path(
PROJECT_ID_TEST_2, LOC_ID_TEST_2, PRODUCTSET_ID_TEST_2
)
product_set = ProductSet(name=explicit_ps_name)
template_ps_name = ProductSearchClient.product_set_path(
PROJECT_ID_TEST, LOC_ID_TEST, PRODUCTSET_ID_TEST
)
# When
# Location and product_set_id are passed in addition to a ProductSet with an explicit name,
# but both names differ (constructed != explicit).
# Should throw AirflowException in this case.
with pytest.raises(AirflowException) as ctx:
self.hook.update_product_set(
location=LOC_ID_TEST,
product_set_id=PRODUCTSET_ID_TEST,
product_set=product_set,
update_mask=None,
project_id=PROJECT_ID_TEST,
retry=None,
timeout=None,
metadata=None,
)
err = ctx.value
# self.assertIn("The required parameter 'project_id' is missing", str(err))
assert err
assert (
ERR_DIFF_NAMES.format(
explicit_name=explicit_ps_name,
constructed_name=template_ps_name,
label="ProductSet",
id_label="productset_id",
)
in str(err)
)
update_product_set_method.assert_not_called() | apache/incubator-airflow | [
29418,
12032,
29418,
869,
1428948298
] |
def test_delete_productset(self, get_conn):
# Given
delete_product_set_method = get_conn.return_value.delete_product_set
delete_product_set_method.return_value = None
name = ProductSearchClient.product_set_path(PROJECT_ID_TEST, LOC_ID_TEST, PRODUCTSET_ID_TEST)
# When
response = self.hook.delete_product_set(
location=LOC_ID_TEST, product_set_id=PRODUCTSET_ID_TEST, project_id=PROJECT_ID_TEST
)
# Then
assert response is None
delete_product_set_method.assert_called_once_with(name=name, retry=None, timeout=None, metadata=None) | apache/incubator-airflow | [
29418,
12032,
29418,
869,
1428948298
] |
def test_create_reference_image_explicit_id(self, get_conn):
# Given
create_reference_image_method = get_conn.return_value.create_reference_image
# When
result = self.hook.create_reference_image(
project_id=PROJECT_ID_TEST,
location=LOC_ID_TEST,
product_id=PRODUCT_ID_TEST,
reference_image=REFERENCE_IMAGE_WITHOUT_ID_NAME,
reference_image_id=REFERENCE_IMAGE_ID_TEST,
)
# Then
# Product ID was provided explicitly in the method call above, should be returned from the method
assert result == REFERENCE_IMAGE_ID_TEST
create_reference_image_method.assert_called_once_with(
parent=PRODUCT_NAME,
reference_image=REFERENCE_IMAGE_WITHOUT_ID_NAME,
reference_image_id=REFERENCE_IMAGE_ID_TEST,
retry=None,
timeout=None,
metadata=None,
) | apache/incubator-airflow | [
29418,
12032,
29418,
869,
1428948298
] |
def test_create_reference_image_autogenerated_id(self, get_conn):
# Given
create_reference_image_method = get_conn.return_value.create_reference_image
# When
result = self.hook.create_reference_image(
project_id=PROJECT_ID_TEST,
location=LOC_ID_TEST,
product_id=PRODUCT_ID_TEST,
reference_image=REFERENCE_IMAGE_TEST,
reference_image_id=REFERENCE_IMAGE_ID_TEST,
)
# Then
# Product ID was provided explicitly in the method call above, should be returned from the method
assert result == REFERENCE_IMAGE_GEN_ID_TEST
create_reference_image_method.assert_called_once_with(
parent=PRODUCT_NAME,
reference_image=REFERENCE_IMAGE_TEST,
reference_image_id=REFERENCE_IMAGE_ID_TEST,
retry=None,
timeout=None,
metadata=None,
) | apache/incubator-airflow | [
29418,
12032,
29418,
869,
1428948298
] |
def test_add_product_to_product_set(self, get_conn):
# Given
add_product_to_product_set_method = get_conn.return_value.add_product_to_product_set
# When
self.hook.add_product_to_product_set(
product_set_id=PRODUCTSET_ID_TEST,
product_id=PRODUCT_ID_TEST,
location=LOC_ID_TEST,
project_id=PROJECT_ID_TEST,
)
# Then
# Product ID was provided explicitly in the method call above, should be returned from the method
add_product_to_product_set_method.assert_called_once_with(
name=PRODUCTSET_NAME_TEST, product=PRODUCT_NAME_TEST, retry=None, timeout=None, metadata=None
) | apache/incubator-airflow | [
29418,
12032,
29418,
869,
1428948298
] |
def test_remove_product_from_product_set(self, get_conn):
# Given
remove_product_from_product_set_method = get_conn.return_value.remove_product_from_product_set
# When
self.hook.remove_product_from_product_set(
product_set_id=PRODUCTSET_ID_TEST,
product_id=PRODUCT_ID_TEST,
location=LOC_ID_TEST,
project_id=PROJECT_ID_TEST,
)
# Then
# Product ID was provided explicitly in the method call above, should be returned from the method
remove_product_from_product_set_method.assert_called_once_with(
name=PRODUCTSET_NAME_TEST, product=PRODUCT_NAME_TEST, retry=None, timeout=None, metadata=None
) | apache/incubator-airflow | [
29418,
12032,
29418,
869,
1428948298
] |
def test_annotate_image(self, annotator_client_mock):
# Given
annotate_image_method = annotator_client_mock.annotate_image
# When
self.hook.annotate_image(request=ANNOTATE_IMAGE_REQUEST)
# Then
# Product ID was provided explicitly in the method call above, should be returned from the method
annotate_image_method.assert_called_once_with(
request=ANNOTATE_IMAGE_REQUEST, retry=None, timeout=None
) | apache/incubator-airflow | [
29418,
12032,
29418,
869,
1428948298
] |
def test_batch_annotate_images(self, annotator_client_mock):
# Given
batch_annotate_images_method = annotator_client_mock.batch_annotate_images
# When
self.hook.batch_annotate_images(requests=BATCH_ANNOTATE_IMAGE_REQUEST)
# Then
# Product ID was provided explicitly in the method call above, should be returned from the method
batch_annotate_images_method.assert_called_once_with(
requests=BATCH_ANNOTATE_IMAGE_REQUEST, retry=None, timeout=None
) | apache/incubator-airflow | [
29418,
12032,
29418,
869,
1428948298
] |
def test_create_product_explicit_id(self, get_conn):
# Given
create_product_method = get_conn.return_value.create_product
create_product_method.return_value = None
parent = ProductSearchClient.location_path(PROJECT_ID_TEST, LOC_ID_TEST)
product = Product()
# When
result = self.hook.create_product(
location=LOC_ID_TEST, product_id=PRODUCT_ID_TEST, product=product, project_id=PROJECT_ID_TEST
)
# Then
# Product ID was provided explicitly in the method call above, should be returned from the method
assert result == PRODUCT_ID_TEST
create_product_method.assert_called_once_with(
parent=parent,
product=product,
product_id=PRODUCT_ID_TEST,
retry=None,
timeout=None,
metadata=None,
) | apache/incubator-airflow | [
29418,
12032,
29418,
869,
1428948298
] |
def test_create_product_autogenerated_id(self, get_conn):
# Given
autogenerated_id = 'autogen-p-id'
response_product = Product(
name=ProductSearchClient.product_path(PROJECT_ID_TEST, LOC_ID_TEST, autogenerated_id)
)
create_product_method = get_conn.return_value.create_product
create_product_method.return_value = response_product
parent = ProductSearchClient.location_path(PROJECT_ID_TEST, LOC_ID_TEST)
product = Product()
# When
result = self.hook.create_product(
location=LOC_ID_TEST, product_id=None, product=product, project_id=PROJECT_ID_TEST
)
# Then
# Product ID was not provided in the method call above. Should be extracted from the API response
# and returned.
assert result == autogenerated_id
create_product_method.assert_called_once_with(
parent=parent, product=product, product_id=None, retry=None, timeout=None, metadata=None
) | apache/incubator-airflow | [
29418,
12032,
29418,
869,
1428948298
] |
def test_create_product_autogenerated_id_wrong_name_in_response(self, get_conn):
# Given
wrong_name = 'wrong_name_not_a_correct_path'
response_product = Product(name=wrong_name)
create_product_method = get_conn.return_value.create_product
create_product_method.return_value = response_product
parent = ProductSearchClient.location_path(PROJECT_ID_TEST, LOC_ID_TEST)
product = Product()
# When
with pytest.raises(AirflowException) as ctx:
self.hook.create_product(
location=LOC_ID_TEST, product_id=None, product=product, project_id=PROJECT_ID_TEST
)
# Then
# API response was wrong (wrong name format) and thus ProductSet ID extraction should fail.
err = ctx.value
assert 'Unable to get id from name' in str(err)
create_product_method.assert_called_once_with(
parent=parent, product=product, product_id=None, retry=None, timeout=None, metadata=None
) | apache/incubator-airflow | [
29418,
12032,
29418,
869,
1428948298
] |
def test_create_product_autogenerated_id_wrong_api_response(self, get_conn):
# Given
response_product = None
create_product_method = get_conn.return_value.create_product
create_product_method.return_value = response_product
parent = ProductSearchClient.location_path(PROJECT_ID_TEST, LOC_ID_TEST)
product = Product()
# When
with pytest.raises(AirflowException) as ctx:
self.hook.create_product(
location=LOC_ID_TEST, product_id=None, product=product, project_id=PROJECT_ID_TEST
)
# Then
# API response was wrong (None) and thus ProductSet ID extraction should fail.
err = ctx.value
assert 'Unable to get name from response...' in str(err)
create_product_method.assert_called_once_with(
parent=parent, product=product, product_id=None, retry=None, timeout=None, metadata=None
) | apache/incubator-airflow | [
29418,
12032,
29418,
869,
1428948298
] |
def test_update_product_no_explicit_name(self, get_conn):
# Given
product = Product()
update_product_method = get_conn.return_value.update_product
update_product_method.return_value = product
product_name = ProductSearchClient.product_path(PROJECT_ID_TEST, LOC_ID_TEST, PRODUCT_ID_TEST)
# When
result = self.hook.update_product(
location=LOC_ID_TEST,
product_id=PRODUCT_ID_TEST,
product=product,
update_mask=None,
project_id=PROJECT_ID_TEST,
retry=None,
timeout=None,
metadata=None,
)
# Then
assert result == MessageToDict(product)
update_product_method.assert_called_once_with(
product=Product(name=product_name), metadata=None, retry=None, timeout=None, update_mask=None
) | apache/incubator-airflow | [
29418,
12032,
29418,
869,
1428948298
] |
def test_update_product_no_explicit_name_and_missing_params_for_constructed_name(
self, location, product_id, get_conn | apache/incubator-airflow | [
29418,
12032,
29418,
869,
1428948298
] |
def test_update_product_explicit_name_missing_params_for_constructed_name(
self, location, product_id, get_conn | apache/incubator-airflow | [
29418,
12032,
29418,
869,
1428948298
] |
def test_update_product_explicit_name_different_from_constructed(self, get_conn):
# Given
update_product_method = get_conn.return_value.update_product
update_product_method.return_value = None
explicit_p_name = ProductSearchClient.product_path(
PROJECT_ID_TEST_2, LOC_ID_TEST_2, PRODUCT_ID_TEST_2
)
product = Product(name=explicit_p_name)
template_p_name = ProductSearchClient.product_path(PROJECT_ID_TEST, LOC_ID_TEST, PRODUCT_ID_TEST)
# When
# Location and product_id are passed in addition to a Product with an explicit name,
# but both names differ (constructed != explicit).
# Should throw AirflowException in this case.
with pytest.raises(AirflowException) as ctx:
self.hook.update_product(
location=LOC_ID_TEST,
product_id=PRODUCT_ID_TEST,
product=product,
update_mask=None,
project_id=PROJECT_ID_TEST,
retry=None,
timeout=None,
metadata=None,
)
err = ctx.value
assert err
assert (
ERR_DIFF_NAMES.format(
explicit_name=explicit_p_name,
constructed_name=template_p_name,
label="Product",
id_label="product_id",
)
in str(err)
)
update_product_method.assert_not_called() | apache/incubator-airflow | [
29418,
12032,
29418,
869,
1428948298
] |
def test_delete_product(self, get_conn):
# Given
delete_product_method = get_conn.return_value.delete_product
delete_product_method.return_value = None
name = ProductSearchClient.product_path(PROJECT_ID_TEST, LOC_ID_TEST, PRODUCT_ID_TEST)
# When
response = self.hook.delete_product(
location=LOC_ID_TEST, product_id=PRODUCT_ID_TEST, project_id=PROJECT_ID_TEST
)
# Then
assert response is None
delete_product_method.assert_called_once_with(name=name, retry=None, timeout=None, metadata=None) | apache/incubator-airflow | [
29418,
12032,
29418,
869,
1428948298
] |
def test_detect_text(self, annotator_client_mock):
# Given
detect_text_method = annotator_client_mock.text_detection
detect_text_method.return_value = AnnotateImageResponse(
text_annotations=[EntityAnnotation(description="test", score=0.5)]
)
# When
self.hook.text_detection(image=DETECT_TEST_IMAGE)
# Then
detect_text_method.assert_called_once_with(
image=DETECT_TEST_IMAGE, max_results=None, retry=None, timeout=None
) | apache/incubator-airflow | [
29418,
12032,
29418,
869,
1428948298
] |
def test_detect_text_with_additional_properties(self, annotator_client_mock):
# Given
detect_text_method = annotator_client_mock.text_detection
detect_text_method.return_value = AnnotateImageResponse(
text_annotations=[EntityAnnotation(description="test", score=0.5)]
)
# When
self.hook.text_detection(
image=DETECT_TEST_IMAGE, additional_properties={"prop1": "test1", "prop2": "test2"}
)
# Then
detect_text_method.assert_called_once_with(
image=DETECT_TEST_IMAGE, max_results=None, retry=None, timeout=None, prop1="test1", prop2="test2"
) | apache/incubator-airflow | [
29418,
12032,
29418,
869,
1428948298
] |
def test_detect_text_with_error_response(self, annotator_client_mock):
# Given
detect_text_method = annotator_client_mock.text_detection
detect_text_method.return_value = AnnotateImageResponse(
error={"code": 3, "message": "test error message"}
)
# When
with pytest.raises(AirflowException) as ctx:
self.hook.text_detection(image=DETECT_TEST_IMAGE)
err = ctx.value
assert "test error message" in str(err) | apache/incubator-airflow | [
29418,
12032,
29418,
869,
1428948298
] |
def test_document_text_detection(self, annotator_client_mock):
# Given
document_text_detection_method = annotator_client_mock.document_text_detection
document_text_detection_method.return_value = AnnotateImageResponse(
text_annotations=[EntityAnnotation(description="test", score=0.5)]
)
# When
self.hook.document_text_detection(image=DETECT_TEST_IMAGE)
# Then
document_text_detection_method.assert_called_once_with(
image=DETECT_TEST_IMAGE, max_results=None, retry=None, timeout=None
) | apache/incubator-airflow | [
29418,
12032,
29418,
869,
1428948298
] |
def test_document_text_detection_with_additional_properties(self, annotator_client_mock):
# Given
document_text_detection_method = annotator_client_mock.document_text_detection
document_text_detection_method.return_value = AnnotateImageResponse(
text_annotations=[EntityAnnotation(description="test", score=0.5)]
)
# When
self.hook.document_text_detection(
image=DETECT_TEST_IMAGE, additional_properties={"prop1": "test1", "prop2": "test2"}
)
# Then
document_text_detection_method.assert_called_once_with(
image=DETECT_TEST_IMAGE, max_results=None, retry=None, timeout=None, prop1="test1", prop2="test2"
) | apache/incubator-airflow | [
29418,
12032,
29418,
869,
1428948298
] |
def test_detect_document_text_with_error_response(self, annotator_client_mock):
# Given
detect_text_method = annotator_client_mock.document_text_detection
detect_text_method.return_value = AnnotateImageResponse(
error={"code": 3, "message": "test error message"}
)
# When
with pytest.raises(AirflowException) as ctx:
self.hook.document_text_detection(image=DETECT_TEST_IMAGE)
err = ctx.value
assert "test error message" in str(err) | apache/incubator-airflow | [
29418,
12032,
29418,
869,
1428948298
] |
def test_label_detection(self, annotator_client_mock):
# Given
label_detection_method = annotator_client_mock.label_detection
label_detection_method.return_value = AnnotateImageResponse(
label_annotations=[EntityAnnotation(description="test", score=0.5)]
)
# When
self.hook.label_detection(image=DETECT_TEST_IMAGE)
# Then
label_detection_method.assert_called_once_with(
image=DETECT_TEST_IMAGE, max_results=None, retry=None, timeout=None
) | apache/incubator-airflow | [
29418,
12032,
29418,
869,
1428948298
] |
def test_label_detection_with_additional_properties(self, annotator_client_mock):
# Given
label_detection_method = annotator_client_mock.label_detection
label_detection_method.return_value = AnnotateImageResponse(
label_annotations=[EntityAnnotation(description="test", score=0.5)]
)
# When
self.hook.label_detection(
image=DETECT_TEST_IMAGE, additional_properties={"prop1": "test1", "prop2": "test2"}
)
# Then
label_detection_method.assert_called_once_with(
image=DETECT_TEST_IMAGE, max_results=None, retry=None, timeout=None, prop1="test1", prop2="test2"
) | apache/incubator-airflow | [
29418,
12032,
29418,
869,
1428948298
] |
def test_label_detection_with_error_response(self, annotator_client_mock):
# Given
detect_text_method = annotator_client_mock.label_detection
detect_text_method.return_value = AnnotateImageResponse(
error={"code": 3, "message": "test error message"}
)
# When
with pytest.raises(AirflowException) as ctx:
self.hook.label_detection(image=DETECT_TEST_IMAGE)
err = ctx.value
assert "test error message" in str(err) | apache/incubator-airflow | [
29418,
12032,
29418,
869,
1428948298
] |