function
stringlengths
11
56k
repo_name
stringlengths
5
60
features
sequence
def my_retried_method(count_func): count_func() exception = ClientError(error_response={"Error": {"Code": "Another Error", "Message": "Foo"}}, operation_name="DescribeStacks") raise exception
marco-hoyer/cfn-sphere
[ 82, 26, 82, 4, 1438784024 ]
def test_with_boto_retry_does_not_retry_without_exception(self): count_func = Mock() @util.with_boto_retry(max_retries=1, pause_time_multiplier=1) def my_retried_method(count_func): count_func() return "foo" self.assertEqual("foo", my_retried_method(count_func)) self.assertEqual(1, count_func.call_count)
marco-hoyer/cfn-sphere
[ 82, 26, 82, 4, 1438784024 ]
def test_get_pretty_stack_outputs_returns_proper_table(self): outputs = [ { 'OutputKey': 'key1', 'OutputValue': 'value1', 'Description': 'desc1' }, { 'OutputKey': 'key2', 'OutputValue': 'value2', 'Description': 'desc2' }, { 'OutputKey': 'key3', 'OutputValue': 'value3', 'Description': 'desc3' } ] expected = """+--------+--------+
marco-hoyer/cfn-sphere
[ 82, 26, 82, 4, 1438784024 ]
def test_strip_string_strips_string(self): s = "sfsdklgashgslkadghkafhgaknkbndkjfbnwurtqwhgsdnkshGLSAKGKLDJFHGSKDLGFLDFGKSDFLGKHAsdjdghskjdhsdcxbvwerA323" result = util.strip_string(s) self.assertEqual( "sfsdklgashgslkadghkafhgaknkbndkjfbnwurtqwhgsdnkshGLSAKGKLDJFHGSKDLGFLDFGKSDFLGKHAsdjdghskjdhsdcxbvwe...", result)
marco-hoyer/cfn-sphere
[ 82, 26, 82, 4, 1438784024 ]
def test_get_git_repository_remote_url_returns_none_if_no_repository_present(self, repo_mock): repo_mock.side_effect = InvalidGitRepositoryError self.assertEqual(None, util.get_git_repository_remote_url(tempfile.mkdtemp()))
marco-hoyer/cfn-sphere
[ 82, 26, 82, 4, 1438784024 ]
def test_get_git_repository_remote_url_returns_repo_url(self, repo_mock): url = "http://config.repo.git" repo_mock.return_value.remotes.origin.url = url self.assertEqual(url, util.get_git_repository_remote_url(tempfile.mkdtemp()))
marco-hoyer/cfn-sphere
[ 82, 26, 82, 4, 1438784024 ]
def test_get_git_repository_remote_url_returns_repo_url_from_parent_dir(self, repo_mock): url = "http://config.repo.git" repo_object_mock = Mock() repo_object_mock.remotes.origin.url = url repo_mock.side_effect = [InvalidGitRepositoryError, repo_object_mock] self.assertEqual(url, util.get_git_repository_remote_url(tempfile.mkdtemp()))
marco-hoyer/cfn-sphere
[ 82, 26, 82, 4, 1438784024 ]
def test_get_git_repository_remote_url_returns_none_for_empty_string_working_dir(self): self.assertEqual(None, util.get_git_repository_remote_url(""))
marco-hoyer/cfn-sphere
[ 82, 26, 82, 4, 1438784024 ]
def test_kv_list_to_dict(self): result = util.kv_list_to_dict(["k1=v1", "k2=v2"]) self.assertEqual({"k1": "v1", "k2": "v2"}, result)
marco-hoyer/cfn-sphere
[ 82, 26, 82, 4, 1438784024 ]
def _get_model(shape, dtype, a_min, a_max): assert a_min >= np.iinfo(dtype).min and a_max <= np.iinfo(dtype).max a = relay.var("a", shape=shape, dtype=dtype) relu = relay.clip(a, a_min=a_min, a_max=a_max) return relu
dmlc/tvm
[ 9142, 2938, 9142, 595, 1476310828 ]
def test_relu(dtype): trials = [ ((1, 4, 4, 4), 65, 178, "uint8"), ((1, 8, 4, 2), 1, 254, "uint8"), ((1, 16), 12, 76, "uint8"), ((1, 4, 4, 4), 65, 125, "int8"), ((1, 8, 4, 2), -100, 100, "int8"), ((1, 16), -120, -20, "int8"), ] np.random.seed(0) for shape, a_min, a_max, trial_dtype in trials: if trial_dtype == dtype: inputs = { "a": tvm.nd.array( np.random.randint( low=np.iinfo(dtype).min, high=np.iinfo(dtype).max + 1, size=shape, dtype=dtype, ) ), } outputs = [] for npu in [False, True]: model = _get_model(inputs["a"].shape, dtype, a_min, a_max) mod = tei.make_module(model, {}) outputs.append(tei.build_and_run(mod, inputs, 1, {}, npu=npu)) tei.verify(outputs, dtype, 1)
dmlc/tvm
[ 9142, 2938, 9142, 595, 1476310828 ]
def sanitize(txt): txt = ''.join(filter(lambda c: c in printable, txt)) return txt
yhalpern/anchorExplorer
[ 17, 3, 17, 2, 1415238212 ]
def getEdges(t, outfile): for c in t.children: print >>outfile, sanitize(t.code+'\t'+c.code) getEdges(c, outfile)
yhalpern/anchorExplorer
[ 17, 3, 17, 2, 1415238212 ]
def unsupported_versions_1979(): """Unsupported python versions for itertags 3.7.0 - 3.7.2 and 3.8.0a1 - https://github.com/streamlink/streamlink/issues/1979 - https://bugs.python.org/issue34294 """ v = sys.version_info if (v.major == 3) and ( # 3.7.0 - 3.7.2 (v.minor == 7 and v.micro <= 2) # 3.8.0a1 or (v.minor == 8 and v.micro == 0 and v.releaselevel == 'alpha' and v.serial <= 1) ): return True else: return False
beardypig/streamlink
[ 3, 1, 3, 4, 1475754729 ]
def test_itertags_single_text(self): title = list(itertags(self.test_html, "title")) self.assertTrue(len(title), 1) self.assertEqual(title[0].tag, "title") self.assertEqual(title[0].text, "Title") self.assertEqual(title[0].attributes, {})
beardypig/streamlink
[ 3, 1, 3, 4, 1475754729 ]
def test_itertags_multi_attrs(self): metas = list(itertags(self.test_html, "meta")) self.assertTrue(len(metas), 3) self.assertTrue(all(meta.tag == "meta" for meta in metas)) self.assertEqual(metas[0].text, None) self.assertEqual(metas[1].text, None) self.assertEqual(metas[2].text, None) self.assertEqual(metas[0].attributes, {"property": "og:type", "content": "website"}) self.assertEqual(metas[1].attributes, {"property": "og:url", "content": "http://test.se/"}) self.assertEqual(metas[2].attributes, {"property": "og:site_name", "content": "Test"})
beardypig/streamlink
[ 3, 1, 3, 4, 1475754729 ]
def test_no_end_tag(self): links = list(itertags(self.test_html, "link")) self.assertTrue(len(links), 1) self.assertEqual(links[0].tag, "link") self.assertEqual(links[0].text, None) self.assertEqual(links[0].attributes, {"rel": "stylesheet", "type": "text/css", "href": "https://test.se/test.css"})
beardypig/streamlink
[ 3, 1, 3, 4, 1475754729 ]
def test_properties(): rng = np.random.default_rng(5) for i in range(100): R = rng.normal(0.0, 0.3) # negative allowed sphere = batoid.Sphere(R) assert sphere.R == R do_pickle(sphere)
jmeyers314/batoid
[ 13, 9, 13, 21, 1485797644 ]
def test_sag(): rng = np.random.default_rng(57) for i in range(100): R = 1./rng.normal(0.0, 0.3) sphere = batoid.Sphere(R) for j in range(10): x = rng.uniform(-0.7*abs(R), 0.7*abs(R)) y = rng.uniform(-0.7*abs(R), 0.7*abs(R)) result = sphere.sag(x, y) np.testing.assert_allclose( result, R*(1-np.sqrt(1.0-(x*x + y*y)/R/R)) ) # Check that it returned a scalar float and not an array assert isinstance(result, float) # Check 0,0 np.testing.assert_allclose(sphere.sag(0, 0), 0.0, rtol=0, atol=1e-17) # Check vectorization x = rng.uniform(-0.7*abs(R), 0.7*abs(R), size=(10, 10)) y = rng.uniform(-0.7*abs(R), 0.7*abs(R), size=(10, 10)) np.testing.assert_allclose( sphere.sag(x, y), R*(1-np.sqrt(1.0-(x*x + y*y)/R/R)) ) # Make sure non-unit stride arrays also work np.testing.assert_allclose( sphere.sag(x[::5,::2], y[::5,::2]), R*(1-np.sqrt(1.0-(x*x + y*y)/R/R))[::5,::2] ) do_pickle(sphere)
jmeyers314/batoid
[ 13, 9, 13, 21, 1485797644 ]
def test_normal(): rng = np.random.default_rng(577) for i in range(100): R = 1./rng.normal(0.0, 0.3) sphere = batoid.Sphere(R) for j in range(10): x = rng.uniform(-0.7*abs(R), 0.7*abs(R)) y = rng.uniform(-0.7*abs(R), 0.7*abs(R)) result = sphere.normal(x, y) r = np.hypot(x, y) rat = r/R dzdr = rat/np.sqrt(1-rat*rat) nz = 1/np.sqrt(1+dzdr*dzdr) normal = np.array([-x/r*dzdr*nz, -y/r*dzdr*nz, nz]) np.testing.assert_allclose(result, normal) # Check 0,0 np.testing.assert_equal(sphere.normal(0, 0), np.array([0, 0, 1])) # Check vectorization x = rng.uniform(-0.7*abs(R), 0.7*abs(R), size=(10, 10)) y = rng.uniform(-0.7*abs(R), 0.7*abs(R), size=(10, 10)) r = np.hypot(x, y) rat = r/R dzdr = rat/np.sqrt(1-rat*rat) nz = 1/np.sqrt(1+dzdr*dzdr) normal = np.dstack([-x/r*dzdr*nz, -y/r*dzdr*nz, nz]) np.testing.assert_allclose( sphere.normal(x, y), normal ) # Make sure non-unit stride arrays also work np.testing.assert_allclose( sphere.normal(x[::5,::2], y[::5,::2]), normal[::5, ::2] )
jmeyers314/batoid
[ 13, 9, 13, 21, 1485797644 ]
def test_intersect(): rng = np.random.default_rng(5772) size = 10_000 for i in range(100): R = 1./rng.normal(0.0, 0.3) sphereCoordSys = batoid.CoordSys(origin=[0, 0, -1]) sphere = batoid.Sphere(R) x = rng.uniform(-0.3*abs(R), 0.3*abs(R), size=size) y = rng.uniform(-0.3*abs(R), 0.3*abs(R), size=size) z = np.full_like(x, -2*abs(R)) # If we shoot rays straight up, then it's easy to predict the intersection vx = np.zeros_like(x) vy = np.zeros_like(x) vz = np.ones_like(x) rv = batoid.RayVector(x, y, z, vx, vy, vz) np.testing.assert_allclose(rv.z, -2*abs(R)) rv2 = batoid.intersect(sphere, rv.copy(), sphereCoordSys) assert rv2.coordSys == sphereCoordSys rv2 = rv2.toCoordSys(batoid.CoordSys()) np.testing.assert_allclose(rv2.x, x) np.testing.assert_allclose(rv2.y, y) np.testing.assert_allclose(rv2.z, sphere.sag(x, y)-1, rtol=0, atol=1e-9) # Check default intersect coordTransform rv2 = rv.copy().toCoordSys(sphereCoordSys) batoid.intersect(sphere, rv2) assert rv2.coordSys == sphereCoordSys rv2 = rv2.toCoordSys(batoid.CoordSys()) np.testing.assert_allclose(rv2.x, x) np.testing.assert_allclose(rv2.y, y) np.testing.assert_allclose(rv2.z, sphere.sag(x, y)-1, rtol=0, atol=1e-9)
jmeyers314/batoid
[ 13, 9, 13, 21, 1485797644 ]
def test_reflect(): rng = np.random.default_rng(57721) size = 10_000 for i in range(100): R = 1./rng.normal(0.0, 0.3) sphere = batoid.Sphere(R) x = rng.uniform(-0.3*abs(R), 0.3*abs(R), size=size) y = rng.uniform(-0.3*abs(R), 0.3*abs(R), size=size) z = np.full_like(x, -2*abs(R)) vx = rng.uniform(-1e-5, 1e-5, size=size) vy = rng.uniform(-1e-5, 1e-5, size=size) vz = np.full_like(x, 1) rv = batoid.RayVector(x, y, z, vx, vy, vz) rvr = batoid.reflect(sphere, rv.copy()) rvr2 = sphere.reflect(rv.copy()) rays_allclose(rvr, rvr2) # print(f"{np.sum(rvr.failed)/len(rvr)*100:.2f}% failed") normal = sphere.normal(rvr.x, rvr.y) # Test law of reflection a0 = np.einsum("ad,ad->a", normal, rv.v)[~rvr.failed] a1 = np.einsum("ad,ad->a", normal, -rvr.v)[~rvr.failed] np.testing.assert_allclose( a0, a1, rtol=0, atol=1e-12 ) # Test that rv.v, rvr.v and normal are all in the same plane np.testing.assert_allclose( np.einsum( "ad,ad->a", np.cross(normal, rv.v), rv.v )[~rvr.failed], 0.0, rtol=0, atol=1e-12 )
jmeyers314/batoid
[ 13, 9, 13, 21, 1485797644 ]
def test_refract(): rng = np.random.default_rng(577215) size = 10_000 for i in range(100): R = 1./rng.normal(0.0, 0.3) sphere = batoid.Sphere(R) m0 = batoid.ConstMedium(rng.normal(1.2, 0.01)) m1 = batoid.ConstMedium(rng.normal(1.3, 0.01)) x = rng.uniform(-0.3*abs(R), 0.3*abs(R), size=size) y = rng.uniform(-0.3*abs(R), 0.3*abs(R), size=size) z = np.full_like(x, -2*abs(R)) vx = rng.uniform(-1e-5, 1e-5, size=size) vy = rng.uniform(-1e-5, 1e-5, size=size) vz = np.sqrt(1-vx*vx-vy*vy)/m0.n rv = batoid.RayVector(x, y, z, vx, vy, vz) rvr = batoid.refract(sphere, rv.copy(), m0, m1) rvr2 = sphere.refract(rv.copy(), m0, m1) rays_allclose(rvr, rvr2) # print(f"{np.sum(rvr.failed)/len(rvr)*100:.2f}% failed") normal = sphere.normal(rvr.x, rvr.y) # Test Snell's law s0 = np.sum(np.cross(normal, rv.v*m0.n)[~rvr.failed], axis=-1) s1 = np.sum(np.cross(normal, rvr.v*m1.n)[~rvr.failed], axis=-1) np.testing.assert_allclose( m0.n*s0, m1.n*s1, rtol=0, atol=1e-9 ) # Test that rv.v, rvr.v and normal are all in the same plane np.testing.assert_allclose( np.einsum( "ad,ad->a", np.cross(normal, rv.v), rv.v )[~rvr.failed], 0.0, rtol=0, atol=1e-12 )
jmeyers314/batoid
[ 13, 9, 13, 21, 1485797644 ]
def test_ne(): objs = [ batoid.Sphere(1.0), batoid.Sphere(2.0), batoid.Plane() ] all_obj_diff(objs)
jmeyers314/batoid
[ 13, 9, 13, 21, 1485797644 ]
def test_fail(): sphere = batoid.Sphere(1.0) rv = batoid.RayVector(0, 10, 0, 0, 0, -1) # Too far to side rv2 = batoid.intersect(sphere, rv.copy()) np.testing.assert_equal(rv2.failed, np.array([True])) # This one passes rv = batoid.RayVector(0, 0, 0, 0, 0, -1) rv2 = batoid.intersect(sphere, rv.copy()) np.testing.assert_equal(rv2.failed, np.array([False]))
jmeyers314/batoid
[ 13, 9, 13, 21, 1485797644 ]
def adc_response(msg, isjson, code=200, json_encoded=False): if json_encoded: body = msg else: template = 'response.json' if isjson else 'response.html' body = render_template(template, msg=msg) resp = make_response(body) if code == 200: resp.status = 'OK' elif code == 400: resp.status = 'Bad Request' elif code == 401: resp.status = 'Unauthorized' resp.status_code = code resp.headers['Content-Type'] = 'application/json' if isjson else 'text/html; charset=utf-8' return resp
dasadc/conmgr
[ 2, 2, 2, 3, 1427178829 ]
def adc_response_text(body, code=200): resp = make_response(body) resp.status_code = code resp.headers['Content-Type'] = 'text/plain; charset=utf-8' return resp
dasadc/conmgr
[ 2, 2, 2, 3, 1427178829 ]
def adc_response_Q_data(result): "問題テキストデータを返す" if result is None: code = 404 text = "Not Found\r\n" else: code = 200 text = result.text return adc_response_text(text, code)
dasadc/conmgr
[ 2, 2, 2, 3, 1427178829 ]
def log_get_or_delete(username=None, fetch_num=100, when=None, delete=False): query = Log.query(ancestor = log_key()).order(-Log.date) if username is not None: query = query.filter(Log.username == username) if when is not None: before = datetime.datetime.now() - when #print "before=", before query = query.filter(Log.date > before) q = query.fetch(fetch_num) results = [] for i in q: if delete: tmp = { 'date': gae_datetime_JST(i.date) } i.key.delete() else: tmp = { 'date': gae_datetime_JST(i.date), 'username': i.username, 'what': i.what } results.append( tmp ) return results
dasadc/conmgr
[ 2, 2, 2, 3, 1427178829 ]
def adc_change_password(salt, username, users, attr, priv_admin=False): "パスワード変更。管理者は他人のパスワードも変更できる。" if ('password_old' in attr and 'password_new1' in attr and 'password_new2' in attr): if not priv_admin: # 管理者でないときは、現在のパスワードをチェック u = adc_login(salt, username, attr['password_old'], users) if u is None: return False, "password mismatched" if attr['password_new1'] != attr['password_new2']: return False, "new password is not same" if change_password(username, attr['password_new1'].encode('utf-8'), salt): return True, "password changed" else: return False, "password change failed" else: return False, "error"
dasadc/conmgr
[ 2, 2, 2, 3, 1427178829 ]
def adc_get_user_list(users): res = [] # まずはローカルに定義されたユーザを検索 for u in users: res.append(u[0]) # 次に、データベースに登録されたユーザを検索 res2 = get_username_list() res.extend(res2) return res
dasadc/conmgr
[ 2, 2, 2, 3, 1427178829 ]
def update_Q_data(q_num, text, author="DASymposium", year=DEFAULT_YEAR): "問題データを変更する" # 問題データの内容チェック (size, line_num, line_mat, msg, ok) = numberlink.read_input_data(text) if not ok: return (False, "Error: syntax error in Q data\n"+msg, None, None) text2 = numberlink.generate_Q_data(size, line_num, line_mat) # 既存のエンティティを取り出す res = get_user_Q_data(q_num, author, year) if res is None: num = 0 else: num = 1 res.text = text2 res.rows = size[1] res.cols = size[0] res.linenum = line_num res.put() return (True, num, size, line_num)
dasadc/conmgr
[ 2, 2, 2, 3, 1427178829 ]
def get_Q_author_all(): "出題の番号から、authorを引けるテーブルを作る" qla = ndb.Key(QuestionListAll, 'master', parent=qdata_key()).get() if qla is None: return None authors = ['']*(len(qla.qs)+1) # q_numは1から始まるので、+1しておく qn = 1 # 出題番号 for q_key in qla.qs: q = q_key.get() authors[qn] = q.author qn += 1 # q.qnum は、問題登録したときの番号であり、出題番号ではない return authors
dasadc/conmgr
[ 2, 2, 2, 3, 1427178829 ]
def get_user_Q_data(q_num, author, year=DEFAULT_YEAR, fetch_num=99): "qnumとauthorを指定して問題データをデータベースから取り出す" userinfo = get_userinfo(author) if userinfo is None: root = qdata_key(year) else: root = userinfo.key key = ndb.Key(Question, str(q_num), parent=root) return key.get()
dasadc/conmgr
[ 2, 2, 2, 3, 1427178829 ]
def admin_Q_list_get(): "コンテストの出題リストを取り出す" qla = ndb.Key(QuestionListAll, 'master', parent=qdata_key()).get() if qla is None: return '' else: return qla.text_admin
dasadc/conmgr
[ 2, 2, 2, 3, 1427178829 ]
def admin_Q_list_delete(): "コンテストの出題リストを削除する" root = qdata_key() ndb.Key(QuestionListAll, 'master', parent=root).delete() return "DELETE Q-list"
dasadc/conmgr
[ 2, 2, 2, 3, 1427178829 ]
def menu_post_A(username): "回答ファイルをアップロードするフォームを返す" qla = ndb.Key(QuestionListAll, 'master', parent=qdata_key()).get() if qla is None: return '' out = "" num=1 for i in qla.text_user.splitlines(): out += '<a href="/A/%s/Q/%d">post answer %s</a><br />\n' % (username, num, i) num += 1 return out
dasadc/conmgr
[ 2, 2, 2, 3, 1427178829 ]
def get_user_Q_all(author, html=None): "authorを指定して、問題データの一覧リストを返す" userinfo = get_userinfo(author) if userinfo is None: root = qdata_key() else: root = userinfo.key query = Question.query( ancestor = root ).order(Question.qnum) #query = query.filter(Question.author == author ) q = query.fetch() num = len(q) out = "" for i in q: if html is None: out += "Q%d SIZE %dX%d LINE_NUM %d (%s)\n" % (i.qnum, i.cols, i.rows, i.linenum, i.author) else: url = '/user/%s/Q/%d' % (author, i.qnum) out += '<a href="%s">Q%d SIZE %dX%d LINE_NUM %d (%s)</a><br />\n' % (url, i.qnum, i.cols, i.rows, i.linenum, i.author) return out
dasadc/conmgr
[ 2, 2, 2, 3, 1427178829 ]
def get_admin_A_all(): "データベースに登録されたすべての回答データの一覧リスト" #query = Answer.query(ancestor=userlist_key()).order(Answer.owner, Answer.anum) query = Answer.query(ancestor=userlist_key()) q = query.fetch() num = len(q) out = str(num) + "\n" for i in q: dt = gae_datetime_JST(i.date) out += "A%02d (%s) %s\n" % (i.anum, i.owner, dt) return out
dasadc/conmgr
[ 2, 2, 2, 3, 1427178829 ]
def get_A_data(a_num=None, username=None): """ データベースから回答データを取り出す。 a_numがNoneのとき、複数のデータを返す。 a_numが数値のとき、その数値のデータを1つだけ返す。存在しないときはNone。 """ if username is None: root = userlist_key() else: userinfo = get_userinfo(username) if userinfo is None: msg = "ERROR: user not found: %s" % username return False, msg, None root = userinfo.key if a_num is not None: a = ndb.Key(Answer, str(a_num), parent=root).get() return True, a, root #query = Answer.query(ancestor=root).order(Answer.anum) query = Answer.query(ancestor=root) #if a_num is not None: # query = query.filter(Answer.anum == a_num) q = query.fetch() return True, q, root
dasadc/conmgr
[ 2, 2, 2, 3, 1427178829 ]
def put_A_data(a_num, username, text, cpu_sec=None, mem_byte=None, misc_text=None): "回答データをデータベースに格納する" msg = "" # 出題データを取り出す ret, q_text = get_Q_data_text(a_num) if not ret: msg = "Error in Q%d data: " % a_num + q_text return False, msg # 重複回答していないかチェック ret, q, root = get_A_data(a_num, username) if ret==True and q is not None: msg += "ERROR: duplicated answer\n"; return False, msg # 回答データのチェックをする judges, msg = numberlink.check_A_data(text, q_text) q = 0.0 if judges[0] != True: msg += "Error in answer A%d\n" % a_num check_A = False else: check_A = True # 正解 q = judges[1] # 解の品質 msg += "Quality factor = %1.19f\n" % q # データベースに登録する。不正解でも登録する a = Answer( parent = root, id = str(a_num), anum = a_num, text = text, owner = username, cpu_sec = cpu_sec, mem_byte = mem_byte, misc_text = misc_text, result = msg[-1499:], # 長さ制限がある。末尾のみ保存。 judge = int(check_A), q_factor = q ) a_key = a.put() return True, msg
dasadc/conmgr
[ 2, 2, 2, 3, 1427178829 ]
def get_or_delete_A_data(a_num=None, username=None, delete=False): "回答データをデータベースから、削除or取り出し" ret, q, root = get_A_data(a_num=a_num, username=username) if not ret: return False, q # q==msg if q is None: return ret, [] result = [] if a_num is None: # a_num==Noneのとき、データが複数個になる q2 = q else: q2 = [q] if delete: get_or_delete_A_info(a_num=a_num, username=username, delete=True) for i in q2: result.append("DELETE A%d" % i.anum) i.key.delete() else: # GETの場合 for i in q2: result.append("GET A%d" % i.anum) result.append(i.text) return True, result
dasadc/conmgr
[ 2, 2, 2, 3, 1427178829 ]
def get_or_delete_A_info(a_num=None, username=None, delete=False): "回答データの補足情報をデータベースから、削除or取り出し" msg = "" r, a, root = get_A_data(a_num, username) if not r: return False, a, None if a_num is None: q = a else: if a is None: msg += "A%d not found" % a_num return True, msg, [] q = [a] results = [] num = 0 for i in q: num += 1 if delete: results.append({'anum': i.anum}) i.cpu_sec = None i.mem_byte = None i.misc_text = None i.put() else: tmp = i.to_dict() del tmp['text'] results.append( tmp ) method = 'DELETE' if delete else 'GET' a_num2 = 0 if a_num is None else a_num msg += "%s A%d info %d" % (method, a_num2, num) return True, msg, results
dasadc/conmgr
[ 2, 2, 2, 3, 1427178829 ]
def create_user(username, password, displayname, uid, gid, salt): "ユーザーをデータベースに登録" hashed = hashed_password(username, password, salt) userlist = userlist_key() u = UserInfo( parent = userlist, id = username, username = username, password = hashed, displayname = displayname, uid = uid, gid = gid ) u.put()
dasadc/conmgr
[ 2, 2, 2, 3, 1427178829 ]
def get_username_list(): "ユーザー名の一覧リストをデータベースから取り出す" #query = UserInfo.query( ancestor = userlist_key() ).order(UserInfo.uid) query = UserInfo.query( ancestor = userlist_key() ) q = query.fetch() res = [] for u in q: res.append(u.username) return res
dasadc/conmgr
[ 2, 2, 2, 3, 1427178829 ]
def delete_user(username): "ユーザーをデータベースから削除" userinfo = get_userinfo(username) if userinfo is None: return 0 else: userinfo.key.delete() return 1 return n
dasadc/conmgr
[ 2, 2, 2, 3, 1427178829 ]
def calc_score_all(): "スコア計算" authors = get_Q_author_all() #print "authors=", authors q_factors = {} q_point = {} ok_point = {} bonus_point = {} result = {} misc = {} query = Answer.query(ancestor=userlist_key()) q = query.fetch() all_numbers = {} all_users = {} for i in q: #anum = 'A%d' % i.anum anum = 'A%02d' % i.anum username = i.owner all_numbers[anum] = 1 all_users[username] = 1 # 正解ポイント if not(anum in ok_point): ok_point[anum] = {} ok_point[anum][username] = i.judge # 品質ポイント if not(anum in q_factors): q_factors[anum] = {} q_factors[anum][username] = i.q_factor # 出題ボーナスポイント if i.judge in (0,1) and authors[i.anum] == username: #print "check_bonus:", i.anum, i.judge, authors[i.anum], username if not(anum in bonus_point): bonus_point[anum] = {} bonus_point[anum][username] = i.judge # result(ログメッセージ) if not(anum in result): result[anum] = {} result[anum][username] = i.result # (その他) date, cpu_sec, mem_byte, misc_text if not(anum in misc): misc[anum] = {} misc[anum][username] = [i.date, i.cpu_sec, i.mem_byte, i.misc_text] #print "ok_point=", ok_point #print "bonus_point=", bonus_point #print "q_factors=", q_factors #print "result=\n", result # 品質ポイントを計算する q_pt = 10.0 for anum, values in q_factors.iteritems(): # 問題番号ごとに #print "anum=", anum qf_total = 0.0 # Q_factorの合計 for user, qf in values.iteritems(): #print "qf=", qf qf_total += qf #print "qf_total=", qf_total for user, qf in values.iteritems(): if qf_total == 0.0: tmp = 0.0 else: tmp = q_pt * qf / qf_total if not anum in q_point: q_point[anum] = {} q_point[anum][user] = tmp #print "q_point=", q_point # 集計する tmp = ['']*(len(all_numbers) + 1) i = 0 for anum in sorted(all_numbers.keys()): tmp[i] = anum i += 1 tmp[i] = 'TOTAL' score_board = {'/header/': tmp} # 見出しの行 for user in sorted(all_users.keys()): #print user if not(user in score_board): score_board[user] = [0]*(len(all_numbers) + 1) i = 0 ptotal = 0.0 for anum in sorted(all_numbers.keys()): #print anum p = 0.0 if user in ok_point[anum]: p += ok_point[anum][user] if user in q_point[anum]: p += q_point[anum][user] if anum in bonus_point and user in bonus_point[anum]: p += bonus_point[anum][user] #print "p=", p score_board[user][i] = p ptotal += p i += 1 score_board[user][i] = ptotal #print "score_board=", score_board return score_board, ok_point, q_point, bonus_point, q_factors, result, misc
dasadc/conmgr
[ 2, 2, 2, 3, 1427178829 ]
def from_port_specs(source, dest): """from_port_specs(source: PortSpec, dest: PortSpec) -> Connection Static method that creates a Connection given source and destination ports. """ conn = Connection() conn.source = copy.copy(source) conn.destination = copy.copy(dest) return conn
VisTrails/VisTrails
[ 98, 44, 98, 181, 1344454638 ]
def fromID(id): """fromTypeID(id: int) -> Connection Static method that creates a Connection given an id. """ conn = Connection() conn.id = id conn.source.endPoint = PortEndPoint.Source conn.destination.endPoint = PortEndPoint.Destination return conn
VisTrails/VisTrails
[ 98, 44, 98, 181, 1344454638 ]
def __init__(self, *args, **kwargs): """__init__() -> Connection Initializes source and destination ports.
VisTrails/VisTrails
[ 98, 44, 98, 181, 1344454638 ]
def __copy__(self): """__copy__() -> Connection - Returns a clone of self.
VisTrails/VisTrails
[ 98, 44, 98, 181, 1344454638 ]
def do_copy(self, new_ids=False, id_scope=None, id_remap=None): cp = DBConnection.do_copy(self, new_ids, id_scope, id_remap) cp.__class__ = Connection for port in cp.ports: Port.convert(port) return cp
VisTrails/VisTrails
[ 98, 44, 98, 181, 1344454638 ]
def convert(_connection):
VisTrails/VisTrails
[ 98, 44, 98, 181, 1344454638 ]
def add_port(self, port): self.db_add_port(port)
VisTrails/VisTrails
[ 98, 44, 98, 181, 1344454638 ]
def _set_sourceId(self, id): """ _set_sourceId(id : int) -> None Sets this connection source id. It updates both self.source.moduleId and self.source.id. Do not use this function, use sourceId property: c.sourceId = id """ self.source.moduleId = id self.source.id = id
VisTrails/VisTrails
[ 98, 44, 98, 181, 1344454638 ]
def _get_destinationId(self): """ _get_destinationId() -> int Returns the module id of dest port. Do not use this function, use sourceId property: c.destinationId """ return self.destination.moduleId
VisTrails/VisTrails
[ 98, 44, 98, 181, 1344454638 ]
def _get_source(self): """_get_source() -> Port Returns source port. Do not use this function, use source property: c.source """ try: return self.db_get_port_by_type('source') except KeyError: pass return None
VisTrails/VisTrails
[ 98, 44, 98, 181, 1344454638 ]
def _get_destination(self): """_get_destination() -> Port Returns destination port. Do not use this function, use destination property: c.destination """
VisTrails/VisTrails
[ 98, 44, 98, 181, 1344454638 ]
def _set_destination(self, dest): """_set_destination(dest: Port) -> None Sets this connection destination port. Do not use this function, use destination property instead: c.destination = dest """ try: port = self.db_get_port_by_type('destination') self.db_delete_port(port) except KeyError: pass if dest is not None: self.db_add_port(dest)
VisTrails/VisTrails
[ 98, 44, 98, 181, 1344454638 ]
def __str__(self): """__str__() -> str - Returns a string representation of a Connection object. """ rep = "<connection id='%s'>%s%s</connection>" return rep % (str(self.id), str(self.source), str(self.destination))
VisTrails/VisTrails
[ 98, 44, 98, 181, 1344454638 ]
def __eq__(self, other): if type(other) != type(self): return False return (self.source == other.source and self.dest == other.dest)
VisTrails/VisTrails
[ 98, 44, 98, 181, 1344454638 ]
def create_connection(self, id_scope=IdScope()): from vistrails.core.vistrail.port import Port from vistrails.core.modules.basic_modules import identifier as basic_pkg source = Port(id=id_scope.getNewId(Port.vtType), type='source', moduleId=21L, moduleName='String', name='value', signature='(%s:String)' % basic_pkg) destination = Port(id=id_scope.getNewId(Port.vtType), type='destination', moduleId=20L, moduleName='Float', name='value', signature='(%s:Float)' % basic_pkg) connection = Connection(id=id_scope.getNewId(Connection.vtType), ports=[source, destination]) return connection
VisTrails/VisTrails
[ 98, 44, 98, 181, 1344454638 ]
def test_serialization(self): import vistrails.core.db.io c1 = self.create_connection() xml_str = vistrails.core.db.io.serialize(c1) c2 = vistrails.core.db.io.unserialize(xml_str, Connection) self.assertEquals(c1, c2) self.assertEquals(c1.id, c2.id)
VisTrails/VisTrails
[ 98, 44, 98, 181, 1344454638 ]
def make_cookie_key(key): return 'after_signup_' + str(key)
canvasnetworks/canvas
[ 56, 15, 56, 3, 1447125133 ]
def configure(): cloud_host = JBoxCfg.get('cloud_host') JBoxGCD.INSTALLID = cloud_host['install_id'] JBoxGCD.REGION = cloud_host['region'] JBoxGCD.DOMAIN = cloud_host['domain']
JuliaLang/JuliaBox
[ 183, 50, 183, 60, 1383806846 ]
def domain(): if JBoxGCD.DOMAIN is None: JBoxGCD.configure() return JBoxGCD.DOMAIN
JuliaLang/JuliaBox
[ 183, 50, 183, 60, 1383806846 ]
def connect(): c = getattr(JBoxGCD.threadlocal, 'conn', None) if c is None: JBoxGCD.configure() creds = GoogleCredentials.get_application_default() JBoxGCD.threadlocal.conn = c = build("dns", "v1", credentials=creds) return c
JuliaLang/JuliaBox
[ 183, 50, 183, 60, 1383806846 ]
def add_cname(name, value): JBoxGCD.connect().changes().create( project=JBoxGCD.INSTALLID, managedZone=JBoxGCD.REGION, body={'kind': 'dns#change', 'additions': [ {'rrdatas': [value], 'kind': 'dns#resourceRecordSet', 'type': 'A', 'name': name, 'ttl': 300} ] }).execute()
JuliaLang/JuliaBox
[ 183, 50, 183, 60, 1383806846 ]
def detailed_balance_factor(self): r"""Returns the detailed balance factor (sometimes called the Bose factor) Parameters ---------- None Returns ------- dbf : ndarray The detailed balance factor (temperature correction) """ return 1. - np.exp(-self.Q[:, 3] / BOLTZMANN_IN_MEV_K / self.temp)
neutronpy/neutronpy
[ 11, 4, 11, 7, 1407430759 ]
def position(self, bounds=None, background=None, hkle=True): r"""Returns the position of a peak within the given bounds Parameters ---------- bounds : bool, optional A boolean expression representing the bounds inside which the calculation will be performed background : float or dict, optional Default: None hkle : bool, optional If True, integrates only over h, k, l, e dimensions, otherwise integrates over all dimensions in :py:attr:`.Data.data` Returns ------- result : tup The result is a tuple with position in each dimension of Q, (h, k, l, e) """ result = () for key in self.get_keys(hkle): _result = 0 for key_integrate in self.get_keys(hkle): _result += np.trapz(self.data[key][self.get_bounds(bounds)] * (self.intensity[self.get_bounds(bounds)] - self.estimate_background(background)), self.data[key_integrate][self.get_bounds(bounds)]) / self.integrate(bounds, background) result += (np.squeeze(_result),) if hkle: return result else: return dict((key, value) for key, value in zip(self.get_keys(hkle), result))
neutronpy/neutronpy
[ 11, 4, 11, 7, 1407430759 ]
def scattering_function(self, material, ei): r"""Returns the neutron scattering function, i.e. the detector counts scaled by :math:`4 \pi / \sigma_{\mathrm{tot}} * k_i/k_f`. Parameters ---------- material : object Definition of the material given by the :py:class:`.Material` class ei : float Incident energy in meV Returns ------- counts : ndarray The detector counts scaled by the total scattering cross section and ki/kf """ ki = Energy(energy=ei).wavevector kf = Energy(energy=ei - self.e).wavevector return 4 * np.pi / material.total_scattering_cross_section * ki / kf * self.detector
neutronpy/neutronpy
[ 11, 4, 11, 7, 1407430759 ]
def estimate_background(self, bg_params): r"""Estimate the background according to ``type`` specified. Parameters ---------- bg_params : dict Input dictionary has keys 'type' and 'value'. Types are * 'constant' : background is the constant given by 'value' * 'percent' : background is estimated by the bottom x%, where x is value * 'minimum' : background is estimated as the detector counts Returns ------- background : float or ndarray Value determined to be the background. Will return ndarray only if `'type'` is `'constant'` and `'value'` is an ndarray """ if isinstance(bg_params, type(None)): return 0 elif isinstance(bg_params, numbers.Number): return bg_params elif bg_params['type'] == 'constant': return bg_params['value'] elif bg_params['type'] == 'percent': inten = self.intensity[self.intensity >= 0.] Npts = int(inten.size * (bg_params['value'] / 100.)) min_vals = inten[np.argsort(inten)[:Npts]] background = np.average(min_vals) return background elif bg_params['type'] == 'minimum': return min(self.intensity) else: return 0
neutronpy/neutronpy
[ 11, 4, 11, 7, 1407430759 ]
def setUp(self): super().setUp() self.mongo_patcher = patch('common.djangoapps.track.backends.mongodb.MongoClient') self.mongo_patcher.start() self.addCleanup(self.mongo_patcher.stop) self.backend = MongoBackend()
eduNEXT/edx-platform
[ 5, 3, 5, 6, 1390926698 ]
def first_argument(call): _, args, _ = call return args[0]
eduNEXT/edx-platform
[ 5, 3, 5, 6, 1390926698 ]
def update_hash(hasher, obj): """ Update a `hashlib` hasher with a nested object. To properly cache nested structures, we need to compute a hash from the entire structure, canonicalizing at every level. `hasher`'s `.update()` method is called a number of times, touching all of `obj` in the process. Only primitive JSON-safe types are supported. """ hasher.update(six.b(str(type(obj)))) if isinstance(obj, (tuple, list)): for e in obj: update_hash(hasher, e) elif isinstance(obj, dict): for k in sorted(obj): update_hash(hasher, k) update_hash(hasher, obj[k]) else: hasher.update(six.b(repr(obj)))
eduNEXT/edx-platform
[ 5, 3, 5, 6, 1390926698 ]
def safe_exec( code, globals_dict, random_seed=None, python_path=None, extra_files=None, cache=None, limit_overrides_context=None, slug=None, unsafely=False,
eduNEXT/edx-platform
[ 5, 3, 5, 6, 1390926698 ]
def forwards(self, orm):
zuck/prometeo-erp
[ 11, 10, 11, 3, 1426302554 ]
def backwards(self, orm):
zuck/prometeo-erp
[ 11, 10, 11, 3, 1426302554 ]
def setUp(self): super().setUp() self.create_gcs_bucket(GCP_BUCKET_NAME, location="europe-north1")
airbnb/airflow
[ 29418, 12032, 29418, 869, 1428948298 ]
def test_run_example_dag_memorystore_redis(self): self.run_dag('gcp_cloud_memorystore_redis', CLOUD_DAG_FOLDER)
airbnb/airflow
[ 29418, 12032, 29418, 869, 1428948298 ]
def test_run_example_dag_memorystore_memcached(self): self.run_dag('gcp_cloud_memorystore_memcached', CLOUD_DAG_FOLDER)
airbnb/airflow
[ 29418, 12032, 29418, 869, 1428948298 ]
def __init__(self): super(FlavorManageController, self).__init__()
ntt-sic/nova
[ 1, 2, 1, 1, 1382427064 ]
def _delete(self, req, id): context = req.environ['nova.context'] authorize(context) try: flavor = flavors.get_flavor_by_flavor_id( id, ctxt=context, read_deleted="no") except exception.NotFound as e: raise webob.exc.HTTPNotFound(explanation=e.format_message()) flavors.destroy(flavor['name']) return webob.Response(status_int=202)
ntt-sic/nova
[ 1, 2, 1, 1, 1382427064 ]
def _create(self, req, body): context = req.environ['nova.context'] authorize(context) if not self.is_valid_body(body, 'flavor'): msg = _("Invalid request body") raise webob.exc.HTTPBadRequest(explanation=msg) vals = body['flavor'] name = vals.get('name') flavorid = vals.get('id') memory = vals.get('ram') vcpus = vals.get('vcpus') root_gb = vals.get('disk') ephemeral_gb = vals.get('OS-FLV-EXT-DATA:ephemeral', 0) swap = vals.get('swap', 0) rxtx_factor = vals.get('rxtx_factor', 1.0) is_public = vals.get('os-flavor-access:is_public', True) try: flavor = flavors.create(name, memory, vcpus, root_gb, ephemeral_gb=ephemeral_gb, flavorid=flavorid, swap=swap, rxtx_factor=rxtx_factor, is_public=is_public) if not flavor['is_public']: flavors.add_flavor_access(flavor['flavorid'], context.project_id, context) req.cache_db_flavor(flavor) except (exception.InstanceTypeExists, exception.InstanceTypeIdExists) as err: raise webob.exc.HTTPConflict(explanation=err.format_message()) except exception.InvalidInput as exc: raise webob.exc.HTTPBadRequest(explanation=exc.format_message()) return self._view_builder.show(req, flavor)
ntt-sic/nova
[ 1, 2, 1, 1, 1382427064 ]
def __init__(self, **argd): super(SchedulingComponentMixin, self).__init__(**argd) self.eventQueue = []
sparkslabs/kamaelia_
[ 13, 3, 13, 2, 1348148442 ]
def scheduleRel(self, message, delay, priority=1): """ Schedule an event to wake the component and send a message to the "event" inbox after a delay. """ return self.scheduleAbs(message, time.time() + delay, priority)
sparkslabs/kamaelia_
[ 13, 3, 13, 2, 1348148442 ]
def cancelEvent(self, event): """ Remove a scheduled event from the scheduler """ self.eventQueue.remove(event) heapq.heapify(self.eventQueue)
sparkslabs/kamaelia_
[ 13, 3, 13, 2, 1348148442 ]
def pause(self): """ Sleep until there is either an event ready or a message is received on an inbox """ if self.eventReady(): self.signalEvent() else: if self.eventQueue: eventTime = self.eventQueue[0][0] super(SchedulingComponentMixin, self).pause(eventTime - time.time()) if self.eventReady(): self.signalEvent() else: super(SchedulingComponentMixin, self).pause()
sparkslabs/kamaelia_
[ 13, 3, 13, 2, 1348148442 ]
def __init__(self, **argd): super(SchedulingComponent, self).__init__(**argd)
sparkslabs/kamaelia_
[ 13, 3, 13, 2, 1348148442 ]
def gemm_int8(n, m, l): A = te.placeholder((n, l), name="A", dtype="int8") B = te.placeholder((m, l), name="B", dtype="int8") k = te.reduce_axis((0, l), name="k") C = te.compute( (n, m), lambda i, j: te.sum(A[i, k].astype("int32") * B[j, k].astype("int32"), axis=k), name="C", ) cfg = autotvm.get_config() s = te.create_schedule(C.op) y, x = C.op.axis AA = s.cache_read(A, "shared", [C]) BB = s.cache_read(B, "shared", [C]) AL = s.cache_read(AA, "local", [C]) BL = s.cache_read(BB, "local", [C]) CC = s.cache_write(C, "local") k = CC.op.reduce_axis[0] cfg.define_split( "tile_k", cfg.axis(k), num_outputs=3, filter=lambda entity: entity.size[2] == 4 and entity.size[0] * 2 >= entity.size[1], ) ko, kt, ki = cfg["tile_k"].apply(s, CC, k) s[CC].tensorize(ki, intrin_dp4a) block_x = te.thread_axis("blockIdx.x") block_y = te.thread_axis("blockIdx.y") thread_x = te.thread_axis("threadIdx.x") thread_y = te.thread_axis("threadIdx.y") def block_size_filter(entity): return ( entity.size[0] * 2 >= entity.size[1] * 2 and entity.size[1] <= 16 and entity.size[3] <= 4 ) cfg.define_split("tile_y", cfg.axis(y), num_outputs=4, filter=block_size_filter) cfg.define_split("tile_x", cfg.axis(x), num_outputs=4, filter=block_size_filter) by, tyz, ty, yi = cfg["tile_y"].apply(s, C, y) bx, txz, tx, xi = cfg["tile_x"].apply(s, C, x) s[C].bind(by, block_y) s[C].bind(bx, block_x) s[C].bind(tyz, te.thread_axis("vthread")) s[C].bind(txz, te.thread_axis("vthread")) s[C].bind(ty, thread_y) s[C].bind(tx, thread_x) s[C].reorder(by, bx, tyz, txz, ty, tx, yi, xi) s[CC].compute_at(s[C], tx) yo, xo = CC.op.axis s[CC].reorder(ko, kt, yo, xo, ki) s[CC].unroll(kt) for stage in [AL, BL]: s[stage].compute_at(s[CC], kt) _, xi = s[stage].split(stage.op.axis[1], factor=4) s[stage].vectorize(xi) s[stage].double_buffer() cfg.define_knob("storage_align", [16, 48]) for stage in [AA, BB]: s[stage].storage_align(s[stage].op.axis[0], cfg["storage_align"].val, 0) s[stage].compute_at(s[CC], ko) fused = s[stage].fuse(*s[stage].op.axis) ty, tx = s[stage].split(fused, nparts=cfg["tile_y"].size[2]) tx, xi = s[stage].split(tx, nparts=cfg["tile_x"].size[2]) _, xi = s[stage].split(xi, factor=16) s[stage].bind(ty, thread_y) s[stage].bind(tx, thread_x) s[stage].vectorize(xi) cfg.define_knob("auto_unroll_max_step", [512, 1500]) s[C].pragma(by, "auto_unroll_max_step", cfg["auto_unroll_max_step"].val) s[C].pragma(by, "unroll_explicit", False) cfg.add_flop(n * m * l * 2) return s, [A, B, C]
dmlc/tvm
[ 9142, 2938, 9142, 595, 1476310828 ]
def init(self, cwd): home = os.path.expanduser('~') self.text = cwd.replace(home, '~')
nimiq/promptastic
[ 83, 18, 83, 8, 1411731732 ]
def init(self, cwd): self.text = ' ' + glyphs.WRITE_ONLY + ' ' if os.access(cwd, os.W_OK): self.active = False
nimiq/promptastic
[ 83, 18, 83, 8, 1411731732 ]
def __init__(self, start_lineno, end_lineno, text): # int : The first line number in the block. 1-indexed. self.start_lineno = start_lineno # int : The last line number. Inclusive! self.end_lineno = end_lineno # str : The text block including '#' character but not any leading spaces. self.text = text
nguy/artview
[ 40, 19, 40, 33, 1411768406 ]
def __repr__(self): return '%s(%r, %r, %r)' % (self.__class__.__name__, self.start_lineno, self.end_lineno, self.text)
nguy/artview
[ 40, 19, 40, 33, 1411768406 ]
def __init__(self, start_lineno, end_lineno): self.start_lineno = start_lineno self.end_lineno = end_lineno
nguy/artview
[ 40, 19, 40, 33, 1411768406 ]
def __repr__(self): return '%s(%r, %r)' % (self.__class__.__name__, self.start_lineno, self.end_lineno)
nguy/artview
[ 40, 19, 40, 33, 1411768406 ]
def __init__(self): # Start with a dummy. self.current_block = NonComment(0, 0) # All of the blocks seen so far. self.blocks = [] # The index mapping lines of code to their associated comment blocks. self.index = {}
nguy/artview
[ 40, 19, 40, 33, 1411768406 ]
def process_token(self, kind, string, start, end, line): """ Process a single token. """ if self.current_block.is_comment: if kind == tokenize.COMMENT: self.current_block.add(string, start, end, line) else: self.new_noncomment(start[0], end[0]) else: if kind == tokenize.COMMENT: self.new_comment(string, start, end, line) else: self.current_block.add(string, start, end, line)
nguy/artview
[ 40, 19, 40, 33, 1411768406 ]
def new_comment(self, string, start, end, line): """ Possibly add a new comment.
nguy/artview
[ 40, 19, 40, 33, 1411768406 ]
def make_index(self): """ Make the index mapping lines of actual code to their associated prefix comments. """ for prev, block in zip(self.blocks[:-1], self.blocks[1:]): if not block.is_comment: self.index[block.start_lineno] = prev
nguy/artview
[ 40, 19, 40, 33, 1411768406 ]