function
stringlengths
11
56k
repo_name
stringlengths
5
60
features
sequence
def switch_status(cls): """ Reads line status and sends an alert if the status is different :return status: str, Door status, "OPEN" or "CLOSED" """ response = cli_command("gpio dio") if "D1: DOUT=OFF, DIN=LOW" in response: if not "D0: DOUT=ON" in response: # Door is closed status = "CLOSED" else: # Door is open status = "OPEN" return status
digidotcom/transport_examples
[ 6, 12, 6, 2, 1448060105 ]
def send_alert(self, text): """ :param text: str, Alert content :return: """ for alert in self.alert_list: alert.send_alert(text)
digidotcom/transport_examples
[ 6, 12, 6, 2, 1448060105 ]
def monitor_switch(self): """ Runs line monitoring and alerting in a loop :return: """ while True: status = self.switch_status() if status != self.d1_status: print "WR31 door is: {0}".format(status) self.send_alert(status) self.d1_status = status time.sleep(.5)
digidotcom/transport_examples
[ 6, 12, 6, 2, 1448060105 ]
def get_modules(): path = os.path.join(settings.BASE_DIR, 'utils', 'upgrade') root, dirs, files = next(os.walk(path)) return files
BirkbeckCTP/janeway
[ 143, 55, 143, 533, 1499695733 ]
def add_arguments(self, parser): """Adds arguments to Django's management command-line parser. :param parser: the parser to which the required arguments will be added :return: None """ parser.add_argument('--path', required=False)
BirkbeckCTP/janeway
[ 143, 55, 143, 533, 1499695733 ]
def __init__(self, vu_cfg, time_scale=0.9, **strat_cfg2): StratNaive.__init__(self,vu_cfg=vu_cfg, **strat_cfg2) self.time_scale = time_scale
flowersteam/naminggamesal
[ 4, 3, 4, 2, 1431465313 ]
def _get_project_webhooks(project): webhooks = [] for webhook in project.webhooks.all(): webhooks.append({ "id": webhook.pk, "url": webhook.url, "key": webhook.key, }) return webhooks
taigaio/taiga-back
[ 5787, 1103, 5787, 191, 1363706040 ]
def birthdays(): dates = [ date(1964, 12, 30), date(1964, 12, 31), date(1964, 1, 1), date(1964, 1, 2), date(1964, 1, 9), date(1964, 1, 10), date(1964, 1, 11), date(1964, 1, 12), date(1964, 1, 30), date(1964, 1, 31), date(1964, 2, 1), date(1964, 2, 2), date(1964, 2, 27), date(1964, 2, 28), date(1964, 2, 29), date(1964, 3, 1), date(1964, 3, 2)] attendees = [] for d in dates: attendees.append(Attendee( placeholder=True, first_name='Born on', last_name=d.strftime('%B %-d, %Y'), ribbon=c.VOLUNTEER_RIBBON, staffing=True, birthdate=d)) ids = [] with Session() as session: session.bulk_insert(attendees) ids = [a.id for a in attendees] yield ids with Session() as session: session.query(Attendee).filter(Attendee.id.in_(ids)).delete( synchronize_session=False)
magfest/ubersystem
[ 44, 49, 44, 436, 1391223385 ]
def test_attendee_birthday_calendar( self, admin_attendee, year, birthdays, monkeypatch): if year: assert str(year) response = summary.Root().attendee_birthday_calendar(year=year) else: assert str(datetime.now(UTC).year) response = summary.Root().attendee_birthday_calendar() if isinstance(response, bytes): response = response.decode('utf-8') lines = response.strip().split('\n') assert len(lines) == (17 + 1) # Extra line for the header
magfest/ubersystem
[ 44, 49, 44, 436, 1391223385 ]
def test_event_birthday_calendar( self, admin_attendee, epoch, eschaton, expected, birthdays, monkeypatch): monkeypatch.setattr(c, 'EPOCH', epoch) monkeypatch.setattr(c, 'ESCHATON', eschaton) response = summary.Root().event_birthday_calendar() if isinstance(response, bytes): response = response.decode('utf-8') lines = response.strip().split('\n') assert len(lines) == (expected + 1) # Extra line for the header
magfest/ubersystem
[ 44, 49, 44, 436, 1391223385 ]
def grim_reaper(signum, frame): while True: try: pid, status = os.waitpid( -1, # Wait for any child process os.WNOHANG # Do not block and return EWOULDBLOCK error ) print( 'Child {pid} terminated with status {status}' '\n'.format(pid=pid, status=status) ) except OSError: return if pid == 0: # no more zombies return
hamishcunningham/fishy-wifi
[ 25, 14, 25, 13, 1420576253 ]
def __init__(self, server_address): # Create a listening socket self.listen_socket = listen_socket = socket.socket( self.address_family, self.socket_type ) # Allow to reuse the same address listen_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) # Bind listen_socket.bind(server_address) # Activate listen_socket.listen(self.request_queue_size) # Get server host name and port host, port = self.listen_socket.getsockname()[:2] self.server_name = socket.getfqdn(host) self.server_port = port # Return headers set by Web framework/Web application self.headers_set = []
hamishcunningham/fishy-wifi
[ 25, 14, 25, 13, 1420576253 ]
def serve_forever(self): listen_socket = self.listen_socket while True: try: self.client_connection, client_address = listen_socket.accept() except IOError as e: code, msg = e.args # restart 'accept' if it was interrupted if code == errno.EINTR: continue else: raise pid = os.fork() if pid == 0: # child listen_socket.close() # close child copy # Handle one request and close the client connection. self.handle_one_request() os._exit(0) else: # parent self.client_connection.close() # close parent copy
hamishcunningham/fishy-wifi
[ 25, 14, 25, 13, 1420576253 ]
def parse_request(self, text): request_line = text.splitlines()[0] request_line = request_line.rstrip('\r\n') # Break down the request line into components (self.request_method, # GET self.path, # /hello self.request_version # HTTP/1.1 ) = request_line.split()
hamishcunningham/fishy-wifi
[ 25, 14, 25, 13, 1420576253 ]
def start_response(self, status, response_headers, exc_info=None): # Add necessary server headers server_headers = [ ('Date', 'Tue, 31 Mar 2015 12:54:48 GMT'), ('Server', 'WSGIServer 0.2'), ] self.headers_set = [status, response_headers + server_headers] # To adhere to WSGI specification the start_response must return # a 'write' callable. We simplicity's sake we'll ignore that detail # for now. # return self.finish_response
hamishcunningham/fishy-wifi
[ 25, 14, 25, 13, 1420576253 ]
def make_server(server_address, application): signal.signal(signal.SIGCHLD, grim_reaper) server = WSGIServer(server_address) server.set_app(application) return server
hamishcunningham/fishy-wifi
[ 25, 14, 25, 13, 1420576253 ]
def add_arguments(self, parser): parser.add_argument('args', nargs='*') parser.add_argument('--indiv-id') parser.add_argument('--cohort-id') parser.add_argument('--clear', action="store_true", help="Whether to clear any previously-added VCF paths before adding this one") parser.add_argument('--load', action="store_true", help="Whether to also load the VCF data, and not just add record its path in the meta-data tables")
macarthur-lab/xbrowse
[ 139, 77, 139, 77, 1389113017 ]
def __init__(self,data,timedata_column='index',data_type='NAT', experiment_tag='No tag given',time_unit=None): """ initialisation of a LabExperimBased object, based on a previously defined HydroData object. """ HydroData.__init__(self,data,timedata_column=timedata_column,data_type=data_type, experiment_tag=experiment_tag,time_unit=time_unit)
cdemulde/wwdata
[ 10, 12, 10, 30, 1480075778 ]
def add_conc(self,column_name,x,y,new_name='default'): """ calculates the concentration values of the given column and adds them as a new column to the DataFrame. Parameters ---------- column_name : str column with values x : int ... y : int ... new_name : str name of the new column, default to 'column_name + mg/L' """ if new_name == 'default': new_name = column_name + ' ' + 'mg/L' self.data[new_name] = self.data[column_name].values*x*y
cdemulde/wwdata
[ 10, 12, 10, 30, 1480075778 ]
def check_ph(self,ph_column='pH',thresh=0.4): """ gives the maximal change in pH Parameters ---------- ph_column : str column with pH-values, default to 'pH' threshold : int threshold value for warning, default to '0.4' """ dph = self.data[ph_column].max()-self.data[ph_column].min() if dph > thresh: wn.warn('Strong change in pH during experiment!') else: self.delta_ph = dph
cdemulde/wwdata
[ 10, 12, 10, 30, 1480075778 ]
def removal(self,columns): """ total removal of nitrogen (1-(end_values/start_values)) Parameters ---------- columns : array of strings """ inv=0 outv=0 indexes= self.time.values for column in columns: inv += self.data[column][indexes[0]] for column in columns: outv += self.data[column][indexes[-1]] removal = 1-(outv/inv) return removal
cdemulde/wwdata
[ 10, 12, 10, 30, 1480075778 ]
def plot(self,columns,time_column='index'): """ calculates the slope of the selected columns Parameters ---------- columns : array of strings columns to plot time_column : str time used for calculation; default to 'h' """ fig = plt.figure(figsize=(10,6)) ax = fig.add_subplot(111) if time_column=='index': for column in columns: ax.plot(self.time,self.data[column],marker='o') else: for column in columns: ax.plot(self.data[time_column],self.data[column],marker='o') ax.legend() return fig,ax
cdemulde/wwdata
[ 10, 12, 10, 30, 1480075778 ]
def _print_removed_output(original,new,type_): """ function printing the output of functions that remove datapoints. Parameters ---------- original : int original length of the dataset new : int length of the new dataset type_ : str 'removed' or 'dropped' """ print('Original dataset:',original,'datapoints') print('New dataset:',new,'datapoints') print(original-new,'datapoints ',type_)
cdemulde/wwdata
[ 10, 12, 10, 30, 1480075778 ]
def setUp(self): super().setUp() self.project1 = self.env["project.project"].create({"name": "Project 1"}) self.task1 = self.env["project.task"].create( {"name": "name1", "project_id": self.project1.id} ) self.subtask1 = self.env["project.task"].create( {"name": "2", "project_id": self.project1.id, "parent_id": self.task1.id} ) self.subtask2 = self.env["project.task"].create( {"name": "3", "project_id": self.project1.id, "parent_id": self.task1.id} )
OCA/project
[ 222, 689, 222, 48, 1402944045 ]
def sqlite_column_reflect_listener(inspector, table, column_info): """Adds parenthesis around SQLite datetime defaults for utcnow.""" if column_info['default'] == "datetime('now', 'utc')": column_info['default'] = utcnow_server_default
magfest/ubersystem
[ 44, 49, 44, 436, 1391223385 ]
def upgrade(): op.add_column('panel_applicant', sa.Column('other_pronouns', sa.Unicode(), server_default='', nullable=False)) op.add_column('panel_applicant', sa.Column('pronouns', sa.Unicode(), server_default='', nullable=False)) op.add_column('panel_application', sa.Column('is_loud', sa.Boolean(), server_default='False', nullable=False))
magfest/ubersystem
[ 44, 49, 44, 436, 1391223385 ]
def test_watch_task(client): user = f.UserFactory.create() task = f.create_task(owner=user, milestone=None) f.MembershipFactory.create(project=task.project, user=user, is_admin=True) url = reverse("tasks-watch", args=(task.id,)) client.login(user) response = client.post(url) assert response.status_code == 200
taigaio/taiga-back
[ 5787, 1103, 5787, 191, 1363706040 ]
def test_list_task_watchers(client): user = f.UserFactory.create() task = f.TaskFactory(owner=user) f.MembershipFactory.create(project=task.project, user=user, is_admin=True) f.WatchedFactory.create(content_object=task, user=user) url = reverse("task-watchers-list", args=(task.id,)) client.login(user) response = client.get(url) assert response.status_code == 200 assert response.data[0]['id'] == user.id
taigaio/taiga-back
[ 5787, 1103, 5787, 191, 1363706040 ]
def test_get_task_watchers(client): user = f.UserFactory.create() task = f.TaskFactory(owner=user) f.MembershipFactory.create(project=task.project, user=user, is_admin=True) url = reverse("tasks-detail", args=(task.id,)) f.WatchedFactory.create(content_object=task, user=user) client.login(user) response = client.get(url) assert response.status_code == 200 assert response.data['watchers'] == [user.id] assert response.data['total_watchers'] == 1
taigaio/taiga-back
[ 5787, 1103, 5787, 191, 1363706040 ]
def pick_w(self,m,voc,mem,context=[]): if m in voc.get_known_meanings(): if m in list(mem['prefered words'].keys()): w = mem['prefered words'][m] if w not in voc.get_known_words(m=m): w = voc.get_random_known_w(m=m) else: w = voc.get_random_known_w(m=m) elif voc.get_unknown_words(): w = voc.get_new_unknown_w() else: w = voc.get_random_known_w(option='min') return w
flowersteam/naminggamesal
[ 4, 3, 4, 2, 1431465313 ]
def __init__(self, *args, **kwargs): WordPreference.__init__(self,memory_policies=[{'mem_type':'wordpreference_smart'}],*args,**kwargs)
flowersteam/naminggamesal
[ 4, 3, 4, 2, 1431465313 ]
def __init__(self, *args, **kwargs): WordPreference.__init__(self,memory_policies=[{'mem_type':'wordpreference_last'}],*args,**kwargs)
flowersteam/naminggamesal
[ 4, 3, 4, 2, 1431465313 ]
def __init__(self, core, cfg, name): self.stats_reader = None self.reader = None super(Plugin, self).__init__(core, cfg, name) self.device = None try: self.cfg = cfg['volta_options'] for key, value in self.cfg.items(): if not isinstance(value, dict): logger.debug('Malformed VoltaConfig key: %s value %s', key, value) raise RuntimeError('Malformed VoltaConfig passed, key: %s. Should by dict' % key) except AttributeError: logger.error('Failed to read Volta config', exc_info=True) self.volta_core = VoltaCore(self.cfg)
yandex/yandex-tank
[ 2265, 289, 2265, 74, 1343041558 ]
def get_key(): return __file__
yandex/yandex-tank
[ 2265, 289, 2265, 74, 1343041558 ]
def configure(self): self.volta_core.configure()
yandex/yandex-tank
[ 2265, 289, 2265, 74, 1343041558 ]
def get_stats_reader(self): if self.stats_reader is None: self.stats_reader = AndroidStatsReader() return self.stats_reader
yandex/yandex-tank
[ 2265, 289, 2265, 74, 1343041558 ]
def start_test(self): try: self.volta_core.start_test() # FIXME raise/catch appropriate exception here except: # noqa: E722 logger.info('Failed to start test of Android plugin', exc_info=True) return 1
yandex/yandex-tank
[ 2265, 289, 2265, 74, 1343041558 ]
def end_test(self, retcode): try: self.volta_core.end_test() uploaders = self.core.get_plugins_of_type(DataUploaderPlugin) for uploader in uploaders: response = uploader.lp_job.api_client.link_mobile_job( lp_key=uploader.lp_job.number, mobile_key=self.volta_core.uploader.jobno ) logger.info( 'Linked mobile job %s to %s for plugin: %s. Response: %s', self.volta_core.uploader.jobno, uploader.lp_job.number, uploader.backend_type, response ) # FIXME raise/catch appropriate exception here except: # noqa: E722 logger.error('Failed to complete end_test of Android plugin', exc_info=True) retcode = 1 return retcode
yandex/yandex-tank
[ 2265, 289, 2265, 74, 1343041558 ]
def post_process(self, retcode): try: self.volta_core.post_process() # FIXME raise/catch appropriate exception here except: # noqa: E722 logger.error('Failed to complete post_process of Android plugin', exc_info=True) retcode = 1 return retcode
yandex/yandex-tank
[ 2265, 289, 2265, 74, 1343041558 ]
def loadMapFile(path, engine, callback=None, debug=True, extensions={}): """ load map file and get (an optional) callback if major stuff is done: - map creation - parsed imports - parsed layers - parsed cameras the callback will send both a string and a float (which shows the overall process), callback(string, float) @type engine: object @param engine: FIFE engine instance @type callback: function @param callback: callback for maploading progress @type debug: bool @param debug: flag to activate / deactivate print statements @rtype object @return FIFE map object """ (filename, extension) = os.path.splitext(path) map_loader = mapFileMapping[extension[1:]](engine, callback, debug, extensions) map = map_loader.loadResource(path) if debug: print("--- Loading map took: ", map_loader.time_to_load, " seconds.") return map
fifengine/fifengine
[ 528, 74, 528, 44, 1363191556 ]
def _input(prompt=''): sys.stdout.write(prompt) sys.stdout.flush() with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as sock: try: sock.connect((input_host, input_port)) userdata = sock.recv(1024) except ConnectionRefusedError: userdata = b'<user-input-unavailable>' return userdata.decode()
lablup/sorna-agent
[ 17, 13, 17, 2, 1435420282 ]
def _raw_input(prompt=''): sys.stdout.write(prompt) sys.stdout.flush() try: sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) sock.connect((input_host, input_port)) userdata = sock.recv(1024) except socket.error: userdata = b'<user-input-unavailable>' finally: sock.close() return userdata.decode()
lablup/sorna-agent
[ 17, 13, 17, 2, 1435420282 ]
def __init__(self): QDialog.__init__(self, appdata.topwindow, Qt.WindowSystemMenuHint | Qt.WindowTitleHint) self.setWindowTitle(self.vis_label) layout = QVBoxLayout(self) box = QGroupBox(tm.main.tests_field, self) self.tests_list = QListWidget(box) boxlayout = QHBoxLayout(box) boxlayout.addWidget(self.tests_list) layout.addWidget(box) for n in os.listdir(self.tests_dir): if n.startswith(".") or not n.endswith('.py'): continue sp = os.path.splitext(n) item = QListWidgetItem(sp[0], self.tests_list) item.setCheckState(Qt.Unchecked)
TechInvestLab/dot15926
[ 33, 12, 33, 2, 1426868785 ]
def SelectAll(self): self.tests_list.SetChecked([x for x in xrange(self.tests_list.Count)])
TechInvestLab/dot15926
[ 33, 12, 33, 2, 1426868785 ]
def OnPrepare(self): if Choice(tm.main.tests_prepare_warning): for k in self.tests_list.CheckedStrings: self.report.AppendText(tm.main.tests_preparing.format(k)) locals = {'mode': 'prepare'} ec = EnvironmentContext(None, locals) ec.ExecutePythonFile(os.path.join(self.tests_dir, k + '.py')) self.report.AppendText(tm.main.tests_preparing_done)
TechInvestLab/dot15926
[ 33, 12, 33, 2, 1426868785 ]
def draw_very_custom_shape(self, ctx, custom_shape, select_id=None): # similar to draw_custom_shape shape, batch, shader = custom_shape shader.bind() if select_id is not None: gpu.select.load_id(select_id) else: if self.is_highlight: color = (*self.color_highlight, self.alpha_highlight) else: color = (*self.color, self.alpha) shader.uniform_float("color", color) shape.glenable() shape.uniform_region(ctx) # shader.uniform_float('modelMatrix', self.matrix_world) with gpu.matrix.push_pop(): gpu.matrix.multiply_matrix(self.matrix_world) batch.draw() bgl.glDisable(bgl.GL_BLEND)
IfcOpenShell/IfcOpenShell
[ 1191, 546, 1191, 377, 1439197394 ]
def invoke(self, ctx, event): self.init_value = self.target_get_value("offset") / self.scale_value coordz = self.project_mouse(ctx, event) if coordz is None: return {"CANCELLED"} self.init_coordz = coordz return {"RUNNING_MODAL"}
IfcOpenShell/IfcOpenShell
[ 1191, 546, 1191, 377, 1439197394 ]
def project_mouse(self, ctx, event): """Projecting mouse coords to local axis Z""" # logic from source/blender/editors/gizmo_library/gizmo_types/arrow3d_gizmo.c:gizmo_arrow_modal mouse = Vector((event.mouse_region_x, event.mouse_region_y)) region = ctx.region region3d = ctx.region_data ray_orig = view3d_utils.region_2d_to_origin_3d(region, region3d, mouse) ray_norm = view3d_utils.region_2d_to_vector_3d(region, region3d, mouse) # 'arrow' origin and direction base = Vector((0, 0, 0)) axis = Vector((0, 0, 1)) # projecttion of the arrow to a plane, perpendicular to view ray axis_proj = axis - ray_norm * axis.dot(ray_norm) # intersection of the axis with the plane through view origin perpendicular to the arrow projection coords = geometry.intersect_line_plane(base, axis, ray_orig, axis_proj) return coords.z
IfcOpenShell/IfcOpenShell
[ 1191, 546, 1191, 377, 1439197394 ]
def setup(self): self.custom_shape = self.new_custom_shape(type="TRIS", verts=X3DISC)
IfcOpenShell/IfcOpenShell
[ 1191, 546, 1191, 377, 1439197394 ]
def draw(self, ctx): self.refresh() self.draw_custom_shape(self.custom_shape)
IfcOpenShell/IfcOpenShell
[ 1191, 546, 1191, 377, 1439197394 ]
def setup(self): shader = DotsGizmoShader() self.custom_shape = shader, shader.batch(pos=((0, 0, 0),)), shader.prog self.use_draw_scale = False
IfcOpenShell/IfcOpenShell
[ 1191, 546, 1191, 377, 1439197394 ]
def draw(self, ctx): self.refresh() self.draw_very_custom_shape(ctx, self.custom_shape)
IfcOpenShell/IfcOpenShell
[ 1191, 546, 1191, 377, 1439197394 ]
def setup(self): shader = ExtrusionGuidesShader() self.custom_shape = shader, shader.batch(pos=((0, 0, 0), (0, 0, 1))), shader.prog self.use_draw_scale = False
IfcOpenShell/IfcOpenShell
[ 1191, 546, 1191, 377, 1439197394 ]
def draw(self, ctx): self.refresh() self.draw_very_custom_shape(ctx, self.custom_shape)
IfcOpenShell/IfcOpenShell
[ 1191, 546, 1191, 377, 1439197394 ]
def setup(self): pass
IfcOpenShell/IfcOpenShell
[ 1191, 546, 1191, 377, 1439197394 ]
def draw(self, ctx): self.refresh(ctx) self.draw_text(ctx)
IfcOpenShell/IfcOpenShell
[ 1191, 546, 1191, 377, 1439197394 ]
def poll(cls, ctx): obj = ctx.object return ( obj and obj.type == "MESH" and obj.data.BIMMeshProperties.ifc_parameters.get("IfcExtrudedAreaSolid/Depth") is not None )
IfcOpenShell/IfcOpenShell
[ 1191, 546, 1191, 377, 1439197394 ]
def refresh(self, ctx): """updating gizmos""" target = ctx.object basis = target.matrix_world.normalized() self.handle.matrix_basis = basis self.guides.matrix_basis = basis
IfcOpenShell/IfcOpenShell
[ 1191, 546, 1191, 377, 1439197394 ]
def __init__(self, key): self.key = key self.prio = random.randint(0, 1000000000) self.size = 1 self.left = None self.right = None
indy256/codelibrary
[ 1742, 506, 1742, 96, 1300394309 ]
def size(treap): return 0 if treap is None else treap.size
indy256/codelibrary
[ 1742, 506, 1742, 96, 1300394309 ]
def merge(left, right): if left is None: return right if right is None: return left if left.prio > right.prio: left.right = merge(left.right, right) left.update() return left else: right.left = merge(left, right.left) right.update() return right
indy256/codelibrary
[ 1742, 506, 1742, 96, 1300394309 ]
def remove(root, key): left, right = split(root, key) return merge(left, split(right, key + 1)[1])
indy256/codelibrary
[ 1742, 506, 1742, 96, 1300394309 ]
def print_treap(root): def dfs_print(root): if root is None: return dfs_print(root.left) print(str(root.key) + ' ', end='') dfs_print(root.right) dfs_print(root) print()
indy256/codelibrary
[ 1742, 506, 1742, 96, 1300394309 ]
def raise_for_error(f): """ Wrapper method to parse any error response and raise the ErrorResponse instance if an error is encountered. :param f: :return: """ def inner(*args, **kwargs): warn('`raise_for_error` is deprecated and will not process any response content.') return f(*args, **kwargs) # e = ErrorResponse.load(content) # e.raise_for_error() # return content return inner
ziplokk1/python-amazon-mws-tools
[ 7, 7, 7, 4, 1483121407 ]
def inner(*args, **kwargs): warn('`raise_response_for_error` is deprecated and will not process any response content.') return f(*args, **kwargs)
ziplokk1/python-amazon-mws-tools
[ 7, 7, 7, 4, 1483121407 ]
def _apply_mask(y_true, sample_weight, masked_tokens, dtype): if sample_weight is None: sample_weight = tf.ones_like(y_true, dtype) else: sample_weight = tf.cast(sample_weight, dtype) for token in masked_tokens: mask = tf.cast(tf.not_equal(y_true, token), dtype) sample_weight = sample_weight * mask return sample_weight
google-research/federated
[ 505, 161, 505, 11, 1600124947 ]
def __init__(self, masked_tokens=None, name='num_tokens', dtype=tf.int64): self._masked_tokens = masked_tokens or [] super().__init__(name, dtype)
google-research/federated
[ 505, 161, 505, 11, 1600124947 ]
def get_config(self): config = super().get_config() config['masked_tokens'] = tuple(self._masked_tokens) return config
google-research/federated
[ 505, 161, 505, 11, 1600124947 ]
def __init__(self, masked_tokens=None, name='accuracy', dtype=None): self._masked_tokens = masked_tokens or [] super().__init__(name, dtype=dtype)
google-research/federated
[ 505, 161, 505, 11, 1600124947 ]
def _copy_vars(v_list): """Copy variables in v_list.""" t_list = [] for v in v_list: t_list.append(tf.identity(v)) return t_list
deepmind/deepmind-research
[ 11519, 2366, 11519, 161, 1547546053 ]
def _scale_vars(s, v_list): """Scale all variables in v_list by s.""" return [s * v for v in v_list]
deepmind/deepmind-research
[ 11519, 2366, 11519, 161, 1547546053 ]
def _compute_reg_grads(gen_grads, disc_vars): """Compute gradients norm (this is an upper-bpund of the full-batch norm).""" gen_norm = tf.accumulate_n([tf.reduce_sum(u * u) for u in gen_grads]) disc_reg_grads = tf.gradients(gen_norm, disc_vars) return disc_reg_grads
deepmind/deepmind-research
[ 11519, 2366, 11519, 161, 1547546053 ]
def update_model(model, disc_grads, gen_grads, disc_opt, gen_opt, global_step, update_scale): """Update model with gradients.""" disc_vars, gen_vars = model.get_variables() with tf.control_dependencies(gen_grads + disc_grads): disc_update_op = disc_opt.apply_gradients( zip(_scale_vars(update_scale, disc_grads), disc_vars)) gen_update_op = gen_opt.apply_gradients( zip(_scale_vars(update_scale, gen_grads), gen_vars), global_step=global_step) update_op = tf.group([disc_update_op, gen_update_op]) return update_op
deepmind/deepmind-research
[ 11519, 2366, 11519, 161, 1547546053 ]
def sample_fn(x): return utils.optimise_and_sample(x, module=model, data=None, is_training=False)[0]
deepmind/deepmind-research
[ 11519, 2366, 11519, 161, 1547546053 ]
def test_avseth(self): # Base case: gas # Subbing with: brine sub = fluidsub.avseth_fluidsub(vp=vp_gas, vs=vs_gas, rho=rho_gas, phi=phi, rhof1=rhohc, rhof2=rhow, kmin=37000000000, kf1=khc, kf2=kw) self.assertAlmostEqual(sub[0], vp_brine, places=-1) # Cannot match :( self.assertAlmostEqual(sub[1], vs_brine, places=-1) # Cannot match :( self.assertAlmostEqual(sub[2], rho_brine, places=-1) # Cannot match :(
agile-geoscience/agilegeo
[ 242, 114, 242, 30, 1378483924 ]
def seconds_to_timestamp(seconds): return pd.Timestamp(seconds, unit='s', tz='UTC')
bartosh/zipline
[ 12, 5, 12, 2, 1474228866 ]
def str_to_seconds(s): """ Convert a pandas-intelligible string to (integer) seconds since UTC. >>> from pandas import Timestamp >>> (Timestamp('2014-01-01') - Timestamp(0)).total_seconds() 1388534400.0 >>> str_to_seconds('2014-01-01') 1388534400 """ return timedelta_to_integral_seconds(pd.Timestamp(s, tz='UTC') - EPOCH)
bartosh/zipline
[ 12, 5, 12, 2, 1474228866 ]
def check_algo_results(test, results, expected_transactions_count=None, expected_order_count=None, expected_positions_count=None, sid=None): if expected_transactions_count is not None: txns = flatten_list(results["transactions"]) test.assertEqual(expected_transactions_count, len(txns)) if expected_positions_count is not None: raise NotImplementedError if expected_order_count is not None: # de-dup orders on id, because orders are put back into perf packets # whenever they a txn is filled orders = set([order['id'] for order in flatten_list(results["orders"])]) test.assertEqual(expected_order_count, len(orders))
bartosh/zipline
[ 12, 5, 12, 2, 1474228866 ]
def assert_single_position(test, zipline): output, transaction_count = drain_zipline(test, zipline) if 'expected_transactions' in test.zipline_test_config: test.assertEqual( test.zipline_test_config['expected_transactions'], transaction_count ) else: test.assertEqual( test.zipline_test_config['order_count'], transaction_count ) # the final message is the risk report, the second to # last is the final day's results. Positions is a list of # dicts. closing_positions = output[-2]['daily_perf']['positions'] # confirm that all orders were filled. # iterate over the output updates, overwriting # orders when they are updated. Then check the status on all. orders_by_id = {} for update in output: if 'daily_perf' in update: if 'orders' in update['daily_perf']: for order in update['daily_perf']['orders']: orders_by_id[order['id']] = order for order in itervalues(orders_by_id): test.assertEqual( order['status'], ORDER_STATUS.FILLED, "") test.assertEqual( len(closing_positions), 1, "Portfolio should have one position." ) sid = test.zipline_test_config['sid'] test.assertEqual( closing_positions[0]['sid'], sid, "Portfolio should have one position in " + str(sid) ) return output, transaction_count
bartosh/zipline
[ 12, 5, 12, 2, 1474228866 ]
def security_list_copy(): old_dir = security_list.SECURITY_LISTS_DIR new_dir = tempfile.mkdtemp() try: for subdir in os.listdir(old_dir): shutil.copytree(os.path.join(old_dir, subdir), os.path.join(new_dir, subdir)) with patch.object(security_list, 'SECURITY_LISTS_DIR', new_dir), \ patch.object(security_list, 'using_copy', True, create=True): yield finally: shutil.rmtree(new_dir, True)
bartosh/zipline
[ 12, 5, 12, 2, 1474228866 ]
def all_pairs_matching_predicate(values, pred): """ Return an iterator of all pairs, (v0, v1) from values such that `pred(v0, v1) == True` Parameters ---------- values : iterable pred : function Returns ------- pairs_iterator : generator Generator yielding pairs matching `pred`. Examples -------- >>> from zipline.testing import all_pairs_matching_predicate >>> from operator import eq, lt >>> list(all_pairs_matching_predicate(range(5), eq)) [(0, 0), (1, 1), (2, 2), (3, 3), (4, 4)] >>> list(all_pairs_matching_predicate("abcd", lt)) [('a', 'b'), ('a', 'c'), ('a', 'd'), ('b', 'c'), ('b', 'd'), ('c', 'd')] """ return filter(lambda pair: pred(*pair), product(values, repeat=2))
bartosh/zipline
[ 12, 5, 12, 2, 1474228866 ]
def all_subindices(index): """ Return all valid sub-indices of a pandas Index. """ return ( index[start:stop] for start, stop in product_upper_triangle(range(len(index) + 1)) )
bartosh/zipline
[ 12, 5, 12, 2, 1474228866 ]
def make_trade_data_for_asset_info(dates, asset_info, price_start, price_step_by_date, price_step_by_sid, volume_start, volume_step_by_date, volume_step_by_sid, frequency, writer=None): """ Convert the asset info dataframe into a dataframe of trade data for each sid, and write to the writer if provided. Write NaNs for locations where assets did not exist. Return a dict of the dataframes, keyed by sid. """ trade_data = {} sids = asset_info.index price_sid_deltas = np.arange(len(sids), dtype=float64) * price_step_by_sid price_date_deltas = (np.arange(len(dates), dtype=float64) * price_step_by_date) prices = (price_sid_deltas + as_column(price_date_deltas)) + price_start volume_sid_deltas = np.arange(len(sids)) * volume_step_by_sid volume_date_deltas = np.arange(len(dates)) * volume_step_by_date volumes = volume_sid_deltas + as_column(volume_date_deltas) + volume_start for j, sid in enumerate(sids): start_date, end_date = asset_info.loc[sid, ['start_date', 'end_date']] # Normalize here so the we still generate non-NaN values on the minutes # for an asset's last trading day. for i, date in enumerate(dates.normalize()): if not (start_date <= date <= end_date): prices[i, j] = 0 volumes[i, j] = 0 df = pd.DataFrame( { "open": prices[:, j], "high": prices[:, j], "low": prices[:, j], "close": prices[:, j], "volume": volumes[:, j], }, index=dates, ) if writer: writer.write_sid(sid, df) trade_data[sid] = df return trade_data
bartosh/zipline
[ 12, 5, 12, 2, 1474228866 ]
def check_arrays(x, y, err_msg='', verbose=True, check_dtypes=True): """ Wrapper around np.testing.assert_array_equal that also verifies that inputs are ndarrays. See Also -------- np.assert_array_equal """ assert type(x) == type(y), "{x} != {y}".format(x=type(x), y=type(y)) assert x.dtype == y.dtype, "{x.dtype} != {y.dtype}".format(x=x, y=y) if isinstance(x, LabelArray): # Check that both arrays have missing values in the same locations... assert_array_equal( x.is_missing(), y.is_missing(), err_msg=err_msg, verbose=verbose, ) # ...then check the actual values as well. x = x.as_string_array() y = y.as_string_array() elif x.dtype.kind in 'mM': x_isnat = isnat(x) y_isnat = isnat(y) assert_array_equal( x_isnat, y_isnat, err_msg="NaTs not equal", verbose=verbose, ) # Fill NaTs with zero for comparison. x = np.where(x_isnat, np.zeros_like(x), x) y = np.where(y_isnat, np.zeros_like(y), y) return assert_array_equal(x, y, err_msg=err_msg, verbose=verbose)
bartosh/zipline
[ 12, 5, 12, 2, 1474228866 ]
def __getattribute__(self, name): raise UnexpectedAttributeAccess(name)
bartosh/zipline
[ 12, 5, 12, 2, 1474228866 ]
def create_minute_bar_data(minutes, sids): length = len(minutes) for sid_idx, sid in enumerate(sids): yield sid, pd.DataFrame( { 'open': np.arange(length) + 10 + sid_idx, 'high': np.arange(length) + 15 + sid_idx, 'low': np.arange(length) + 8 + sid_idx, 'close': np.arange(length) + 10 + sid_idx, 'volume': 100 + sid_idx, }, index=minutes, )
bartosh/zipline
[ 12, 5, 12, 2, 1474228866 ]
def write_daily_data(tempdir, sim_params, sids, trading_calendar): path = os.path.join(tempdir.path, "testdaily.bcolz") BcolzDailyBarWriter(path, trading_calendar, sim_params.start_session, sim_params.end_session).write( create_daily_bar_data(sim_params.sessions, sids), ) return path
bartosh/zipline
[ 12, 5, 12, 2, 1474228866 ]
def write_bcolz_minute_data(trading_calendar, days, path, data): BcolzMinuteBarWriter( path, trading_calendar, days[0], days[-1], US_EQUITIES_MINUTES_PER_DAY ).write(data)
bartosh/zipline
[ 12, 5, 12, 2, 1474228866 ]
def create_daily_df_for_asset(trading_calendar, start_day, end_day, interval=1): days = trading_calendar.sessions_in_range(start_day, end_day) days_count = len(days) days_arr = np.arange(days_count) + 2 df = pd.DataFrame( { "open": days_arr + 1, "high": days_arr + 2, "low": days_arr - 1, "close": days_arr, "volume": days_arr * 100, }, index=days, ) if interval > 1: # only keep every 'interval' rows for idx, _ in enumerate(days_arr): if (idx + 1) % interval != 0: df["open"].iloc[idx] = 0 df["high"].iloc[idx] = 0 df["low"].iloc[idx] = 0 df["close"].iloc[idx] = 0 df["volume"].iloc[idx] = 0 return df
bartosh/zipline
[ 12, 5, 12, 2, 1474228866 ]
def create_data_portal_from_trade_history(asset_finder, trading_calendar, tempdir, sim_params, trades_by_sid): if sim_params.data_frequency == "daily": path = os.path.join(tempdir.path, "testdaily.bcolz") writer = BcolzDailyBarWriter( path, trading_calendar, sim_params.start_session, sim_params.end_session ) writer.write( trades_by_sid_to_dfs(trades_by_sid, sim_params.sessions), ) equity_daily_reader = BcolzDailyBarReader(path) return DataPortal( asset_finder, trading_calendar, first_trading_day=equity_daily_reader.first_trading_day, equity_daily_reader=equity_daily_reader, ) else: minutes = trading_calendar.minutes_in_range( sim_params.first_open, sim_params.last_close ) length = len(minutes) assets = {} for sidint, trades in iteritems(trades_by_sid): opens = np.zeros(length) highs = np.zeros(length) lows = np.zeros(length) closes = np.zeros(length) volumes = np.zeros(length) for trade in trades: # put them in the right place idx = minutes.searchsorted(trade.dt) opens[idx] = trade.open_price * 1000 highs[idx] = trade.high * 1000 lows[idx] = trade.low * 1000 closes[idx] = trade.close_price * 1000 volumes[idx] = trade.volume assets[sidint] = pd.DataFrame({ "open": opens, "high": highs, "low": lows, "close": closes, "volume": volumes, "dt": minutes }).set_index("dt") write_bcolz_minute_data( trading_calendar, sim_params.sessions, tempdir.path, assets ) equity_minute_reader = BcolzMinuteBarReader(tempdir.path) return DataPortal( asset_finder, trading_calendar, first_trading_day=equity_minute_reader.first_trading_day, equity_minute_reader=equity_minute_reader, )
bartosh/zipline
[ 12, 5, 12, 2, 1474228866 ]
def __init__(self, env, trading_calendar=None, first_trading_day=None): if trading_calendar is None: trading_calendar = get_calendar("NYSE") super(FakeDataPortal, self).__init__(env.asset_finder, trading_calendar, first_trading_day)
bartosh/zipline
[ 12, 5, 12, 2, 1474228866 ]
def get_history_window(self, assets, end_dt, bar_count, frequency, field, data_frequency, ffill=True): if frequency == "1d": end_idx = \ self.trading_calendar.all_sessions.searchsorted(end_dt) days = self.trading_calendar.all_sessions[ (end_idx - bar_count + 1):(end_idx + 1) ] df = pd.DataFrame( np.full((bar_count, len(assets)), 100.0), index=days, columns=assets ) return df
bartosh/zipline
[ 12, 5, 12, 2, 1474228866 ]
def __init__(self, asset_finder, trading_calendar, first_trading_day=None): super(FetcherDataPortal, self).__init__(asset_finder, trading_calendar, first_trading_day)
bartosh/zipline
[ 12, 5, 12, 2, 1474228866 ]
def _get_daily_window_for_sid(self, asset, field, days_in_window, extra_slot=True): return np.arange(days_in_window, dtype=np.float64)
bartosh/zipline
[ 12, 5, 12, 2, 1474228866 ]
def __init__(self, url='sqlite:///:memory:', equities=_default_equities, **frames): self._url = url self._eng = None if equities is self._default_equities: equities = make_simple_equity_info( list(map(ord, 'ABC')), pd.Timestamp(0), pd.Timestamp('2015'), ) frames['equities'] = equities self._frames = frames self._eng = None # set in enter and exit
bartosh/zipline
[ 12, 5, 12, 2, 1474228866 ]
def __exit__(self, *excinfo): assert self._eng is not None, '_eng was not set in __enter__' self._eng.dispose() self._eng = None
bartosh/zipline
[ 12, 5, 12, 2, 1474228866 ]
def __init__(self, url='sqlite:///:memory:', finder_cls=AssetFinder, **frames): self._finder_cls = finder_cls super(tmp_asset_finder, self).__init__(url=url, **frames)
bartosh/zipline
[ 12, 5, 12, 2, 1474228866 ]
def empty_asset_finder(): """Context manager for creating an empty asset finder. See Also -------- empty_assets_db tmp_assets_db tmp_asset_finder """ return tmp_asset_finder(equities=None)
bartosh/zipline
[ 12, 5, 12, 2, 1474228866 ]
def __init__(self, load=None, *args, **kwargs): super(tmp_trading_env, self).__init__(*args, **kwargs) self._load = load
bartosh/zipline
[ 12, 5, 12, 2, 1474228866 ]
def empty_trading_env(): return tmp_trading_env(equities=None)
bartosh/zipline
[ 12, 5, 12, 2, 1474228866 ]