function
stringlengths
11
56k
repo_name
stringlengths
5
60
features
sequence
def _declare_consumer(self, consumer, nowait=False): """Declare consumer so messages can be received from it using :meth:`iterconsume`.""" if consumer.queue not in self._open_consumers: # Use the ConsumerSet's consumer by default, but if the # child consumer has a callback, honor it. callback = consumer.callbacks and \ consumer._receive_callback or self._receive_callback self.backend.declare_consumer(queue=consumer.queue, no_ack=consumer.no_ack, nowait=nowait, callback=callback, consumer_tag=consumer.consumer_tag) self._open_consumers[consumer.queue] = consumer.consumer_tag
ask/carrot
[ 196, 34, 196, 12, 1237911887 ]
def iterconsume(self, limit=None): """Cycle between all consumers in consume mode. See :meth:`Consumer.iterconsume`. """ self.consume() return self.backend.consume(limit=limit)
ask/carrot
[ 196, 34, 196, 12, 1237911887 ]
def flow(self, active): """This method asks the peer to pause or restart the flow of content data. See :meth:`Consumer.flow`. """ self.backend.flow(active)
ask/carrot
[ 196, 34, 196, 12, 1237911887 ]
def cancel(self): """Cancel a running :meth:`iterconsume` session.""" for consumer_tag in self._open_consumers.values(): try: self.backend.cancel(consumer_tag) except KeyError: pass self._open_consumers.clear()
ask/carrot
[ 196, 34, 196, 12, 1237911887 ]
def __init__(self, target='ImageDisplay:*'): self.ds9 = ds9.ds9()
saltastro/pysalt
[ 15, 18, 15, 33, 1366643211 ]
def regions(self, rgnstr): cmd = 'regions %s'
saltastro/pysalt
[ 15, 18, 15, 33, 1366643211 ]
def rotate(self, angle): """Rotate the image""" self.ds9.set('rotate to %f' % angle)
saltastro/pysalt
[ 15, 18, 15, 33, 1366643211 ]
def deleteregions(self): """Delete all regions in the frame""" cmd='regions delete all' self.ds9.set(cmd)
saltastro/pysalt
[ 15, 18, 15, 33, 1366643211 ]
def __init__(self,quillLang): if isinstance(quillLang,QuillLanguage): self.language = quillLang else: raise Exception,'Invalid parameter. Not of type QuillLanguage'
teamtachyon/Quillpad-Server
[ 69, 100, 69, 9, 1468413124 ]
def train(self,uWords,scope=4,splRulesFlag=True): self.language.setKnowledge(self.__buildKeyToCARTMap(uWords,scope,splRulesFlag,"primary"),"primary") self.language.setKnowledge(self.__buildKeyToCARTMap(uWords,scope,splRulesFlag,"predictive"),"predictive") return self.language
teamtachyon/Quillpad-Server
[ 69, 100, 69, 9, 1468413124 ]
def getLanguage(self): return self.language
teamtachyon/Quillpad-Server
[ 69, 100, 69, 9, 1468413124 ]
def store(self,fname=None): if fname == None: fname = self.language.language+'.qil'
teamtachyon/Quillpad-Server
[ 69, 100, 69, 9, 1468413124 ]
def load(self, trainedData): pass
teamtachyon/Quillpad-Server
[ 69, 100, 69, 9, 1468413124 ]
def __buildKeyToCARTMap ( self, uWords,scope=4,splRulesFlag=True,type="predictive" ): contextLen = scope splRules = [] if splRulesFlag == True: splRules = self.language.getSpecialRules(type)
teamtachyon/Quillpad-Server
[ 69, 100, 69, 9, 1468413124 ]
def createTrainingData( self, uWords,scope=4,splRulesType='predictive',fname = None ): contextLen = scope
teamtachyon/Quillpad-Server
[ 69, 100, 69, 9, 1468413124 ]
def drawLines(mode): pass
fos/fos-legacy
[ 15, 5, 15, 3, 1260185641 ]
def reshape (w, h):
fos/fos-legacy
[ 15, 5, 15, 3, 1260185641 ]
def display():
fos/fos-legacy
[ 15, 5, 15, 3, 1260185641 ]
def window(w=800,h=800,title='light',px=100,py=100): #glut init glut.glutInit(sys.argv) glut.glutInitDisplayMode (glut.GLUT_DOUBLE | glut.GLUT_RGB) glut.glutInitWindowSize (w, h) glut.glutInitWindowPosition (px, py) glut.glutCreateWindow (title)
fos/fos-legacy
[ 15, 5, 15, 3, 1260185641 ]
def keyboard(key, x, y): #global day,year,zdist if key == chr(27): sys.exit(0)
fos/fos-legacy
[ 15, 5, 15, 3, 1260185641 ]
def mouse(button, state, x, y): #global day,year
fos/fos-legacy
[ 15, 5, 15, 3, 1260185641 ]
def interaction(disp=display,resh=reshape, key=keyboard): global mi mi=mouse.MouseInteractor(.01,0.02) mi.registerCallbacks( ) glut.glutDisplayFunc(disp) glut.glutReshapeFunc(resh) glut.glutKeyboardFunc(key) #glut.glutMouseFunc(mous)
fos/fos-legacy
[ 15, 5, 15, 3, 1260185641 ]
def test_bug_01(): """ if first row of csv is shorter than the rest of the rows, the csv will be truncated by first row. This is a bug "a,d,e,f" <- this will be 1 '1',2,3,4 <- 4 '2',3,4,5 'b' <- give '' for missing cells """ r = p.Reader(os.path.join("tests", "fixtures", "bug_01.csv")) assert len(r.row[0]) == 4 # test "" is append for empty cells assert r[0, 1] == "" assert r[3, 1] == ""
chfw/pyexcel
[ 1105, 160, 1105, 15, 1408711940 ]
def test_issue_06(): import logging logger = logging.getLogger("test") logger.setLevel(logging.DEBUG) ch = logging.StreamHandler() ch.setLevel(logging.DEBUG) logger.addHandler(ch) output = StringIO() book = p.Book({"hoja1": [["datos", "de", "prueba"], [1, 2, 3]]}) book.save_to_memory("csv", output) logger.debug(output.getvalue())
chfw/pyexcel
[ 1105, 160, 1105, 15, 1408711940 ]
def test_issue_10(): thedict = OrderedDict() thedict.update({"Column 1": [1, 2, 3]}) thedict.update({"Column 2": [1, 2, 3]}) thedict.update({"Column 3": [1, 2, 3]}) p.save_as(adict=thedict, dest_file_name="issue10.xls") newdict = p.get_dict(file_name="issue10.xls") assert isinstance(newdict, OrderedDict) is True eq_(thedict, newdict) os.unlink("issue10.xls")
chfw/pyexcel
[ 1105, 160, 1105, 15, 1408711940 ]
def test_issue_29_nominablesheet(): a = [ ["date", "number", "misc", "long number"], # error case ["2016-03-31 10:59", "0123", "XS360_EU", "04566651561653122"], # python types [datetime(2016, 4, 15, 17, 52, 11), 123, False, 456193284757], ] s = p.get_sheet(array=a) s.name_columns_by_row(0) content = dedent( """ pyexcel_sheet1: +------------------+--------+----------+-------------------+ | date | number | misc | long number | +==================+========+==========+===================+ | 2016-03-31 10:59 | 0123 | XS360_EU | 04566651561653122 | +------------------+--------+----------+-------------------+ | 15/04/16 | 123 | false | 456193284757 | +------------------+--------+----------+-------------------+""" ) eq_(str(s), content.strip("\n"))
chfw/pyexcel
[ 1105, 160, 1105, 15, 1408711940 ]
def test_issue_51_normal_dict_in_records(): from pyexcel.plugins.sources.pydata.records import RecordsReader records = [] orderred_dict = {} orderred_dict.update({"Zebra": 10}) orderred_dict.update({"Hippo": 9}) orderred_dict.update({"Monkey": 8}) records.append(orderred_dict) orderred_dict2 = {} orderred_dict2.update({"Zebra": 1}) orderred_dict2.update({"Hippo": 2}) orderred_dict2.update({"Monkey": 3}) records.append(orderred_dict2) records_reader = RecordsReader(records) array = list(records_reader.to_array()) expected = [["Hippo", "Monkey", "Zebra"], [9, 8, 10], [2, 3, 1]] eq_(array, expected)
chfw/pyexcel
[ 1105, 160, 1105, 15, 1408711940 ]
def test_issue_60_chinese_text_in_python_2_stdout(): import sys data = [["这", "是", "中", "文"], ["这", "是", "中", "文"]] sheet = p.Sheet(data) sys.stdout.write(repr(sheet))
chfw/pyexcel
[ 1105, 160, 1105, 15, 1408711940 ]
def test_issue_63_empty_array_crash_texttable_renderer(): sheet = p.Sheet([]) print(sheet)
chfw/pyexcel
[ 1105, 160, 1105, 15, 1408711940 ]
def test_issue_68(): data = [[1]] sheet = p.Sheet(data) stream = sheet.save_to_memory("csv") eq_(stream.read(), "1\r\n") data = {"sheet": [[1]]} book = p.Book(data) stream = book.save_to_memory("csv") eq_(stream.read(), "1\r\n")
chfw/pyexcel
[ 1105, 160, 1105, 15, 1408711940 ]
def test_issue_76(): from pyexcel._compact import StringIO tsv_stream = StringIO() tsv_stream.write("1\t2\t3\t4\n") tsv_stream.write("1\t2\t3\t4\n") tsv_stream.seek(0) sheet = p.get_sheet( file_stream=tsv_stream, file_type="csv", delimiter="\t" ) data = [[1, 2, 3, 4], [1, 2, 3, 4]] eq_(sheet.array, data)
chfw/pyexcel
[ 1105, 160, 1105, 15, 1408711940 ]
def test_issue_83_file_handle_no_generator(): proc = psutil.Process() test_files = [ os.path.join("tests", "fixtures", "bug_01.csv"), os.path.join("tests", "fixtures", "test-single.csvz"), os.path.join("tests", "fixtures", "date_field.xls"), ] for test_file in test_files: open_files_l1 = proc.open_files() # start with a csv file p.get_array(file_name=test_file) open_files_l2 = proc.open_files() delta = len(open_files_l2) - len(open_files_l1) # no open file handle should be left assert delta == 0
chfw/pyexcel
[ 1105, 160, 1105, 15, 1408711940 ]
def test_issue_83_xls_file_handle(): proc = psutil.Process() test_file = os.path.join("tests", "fixtures", "date_field.xls") open_files_l1 = proc.open_files() # start with a csv file data = p.iget_array(file_name=test_file) open_files_l2 = proc.open_files() delta = len(open_files_l2) - len(open_files_l1) # interestingly, no open file using xlrd assert delta == 0 # now the file handle get opened when we run through # the generator list(data) open_files_l3 = proc.open_files() delta = len(open_files_l3) - len(open_files_l1) # still no open file assert delta == 0 p.free_resources() open_files_l4 = proc.open_files() eq_(open_files_l1, open_files_l4)
chfw/pyexcel
[ 1105, 160, 1105, 15, 1408711940 ]
def test_issue_92_incomplete_records(): records = [{"a": 1, "b": 2, "c": 3}, {"b": 2}, {"c": 3}] sheet = p.get_sheet(records=records) content = dedent( """ +---+---+---+ | a | b | c | +---+---+---+ | 1 | 2 | 3 | +---+---+---+ | | 2 | | +---+---+---+ | | | 3 | +---+---+---+""" ).strip("\n") eq_(str(sheet.content), content)
chfw/pyexcel
[ 1105, 160, 1105, 15, 1408711940 ]
def test_issue_95_preserve_order_in_iget_orders(): test_data = [["a", "b", "c"], ["1", "2", "3"], ["4", "5", "6"]] records = p.iget_records(array=test_data) result = [] for record in records: for key, value in record.items(): result.append([key, value]) expected = [ ["a", "1"], ["b", "2"], ["c", "3"], ["a", "4"], ["b", "5"], ["c", "6"], ] eq_(result, expected)
chfw/pyexcel
[ 1105, 160, 1105, 15, 1408711940 ]
def test_issue_95_preserve_order_in_get_orders(): test_data = [["a", "b", "c"], ["1", "2", "3"], ["4", "5", "6"]] records = p.get_records(array=test_data) result = [] for record in records: for key, value in record.items(): result.append([key, value]) expected = [ ["a", "1"], ["b", "2"], ["c", "3"], ["a", "4"], ["b", "5"], ["c", "6"], ] eq_(result, expected)
chfw/pyexcel
[ 1105, 160, 1105, 15, 1408711940 ]
def test_issue_125(): book = p.Book() book += p.Sheet([[1]], "A") book += p.Sheet([[2]], "B") eq_(book.sheet_names(), ["A", "B"]) book.sort_sheets(reverse=True) eq_(book.sheet_names(), ["B", "A"])
chfw/pyexcel
[ 1105, 160, 1105, 15, 1408711940 ]
def test_issue_125_using_key(): test_file = "issue_125.xls" book = p.Book() book += p.Sheet([[1]], "A") book += p.Sheet([[2]], "B") book += p.Sheet([[3]], "C") custom_order = {"A": 1, "B": 3, "C": 2} book.sort_sheets(key=lambda x: custom_order[x]) book.save_as(test_file) book2 = p.get_book(file_name=test_file) eq_(book2.sheet_names(), ["A", "C", "B"]) os.unlink(test_file)
chfw/pyexcel
[ 1105, 160, 1105, 15, 1408711940 ]
def test_issue_126_isave_as(): data = [[1]] test_file = "issue_126.xls" test_name = "doyoufindme" p.isave_as(array=data, dest_file_name=test_file, dest_sheet_name=test_name) sheet = p.get_sheet(file_name=test_file) eq_(sheet.name, test_name) os.unlink(test_file)
chfw/pyexcel
[ 1105, 160, 1105, 15, 1408711940 ]
def test_pyexcel_issue_140(): TestSheet1 = p.Sheet() TestSheet1[4, 4] = "4x4" TestSheet1[0, 0] = "0,0" expected = [ ["0,0", "", "", "", ""], ["", "", "", "", ""], ["", "", "", "", ""], ["", "", "", "", ""], ["", "", "", "", "4x4"], ] eq_(expected, TestSheet1.to_array())
chfw/pyexcel
[ 1105, 160, 1105, 15, 1408711940 ]
def __init__(self, initial_molecule_count=50): super(StochasticLogisticModel, self).__init__() self._n0 = float(initial_molecule_count) if self._n0 < 0: raise ValueError('Initial molecule count cannot be negative.')
martinjrobins/hobo
[ 171, 29, 171, 320, 1494936607 ]
def _simulate_raw(self, parameters): """ Returns tuple (raw times, population sizes) when reactions occur. """ parameters = np.asarray(parameters) if len(parameters) != self.n_parameters(): raise ValueError('This model should have only 2 parameters.') b = parameters[0] k = parameters[1] if b <= 0: raise ValueError('Rate constant must be positive.') # Initial time and count t = 0 a = self._n0 # Run stochastic logistic birth-only algorithm, calculating time until # next reaction and increasing population count by 1 at that time mol_count = [a] time = [t] while a < k: r = np.random.uniform(0, 1) t += np.log(1 / r) / (a * b * (1 - a / k)) a = a + 1 time.append(t) mol_count.append(a) return time, mol_count
martinjrobins/hobo
[ 171, 29, 171, 320, 1494936607 ]
def simulate(self, parameters, times): """ See :meth:`pints.ForwardModel.simulate()`. """ times = np.asarray(times) if np.any(times < 0): raise ValueError('Negative times are not allowed.') if self._n0 == 0: return np.zeros(times.shape) # run Gillespie time, pop_size = self._simulate_raw(parameters) # interpolate values = self._interpolate_values(time, pop_size, times, parameters) return values
martinjrobins/hobo
[ 171, 29, 171, 320, 1494936607 ]
def variance(self, parameters, times): r""" Returns the deterministic variance of infinitely many stochastic simulations. """ raise NotImplementedError
martinjrobins/hobo
[ 171, 29, 171, 320, 1494936607 ]
def __init__(self, sortby=None): self.profiler = None self.sortby = sortby or ('cumtime',)
quantmind/pulsar
[ 1878, 170, 1878, 29, 1295093413 ]
def __exit__(self, exc_type, exc_val, exc_tb): self.profiler.disable() self.write_stats()
quantmind/pulsar
[ 1878, 170, 1878, 29, 1295093413 ]
def test_output_option(tmp_path, httpbin, stdout_isatty): output_filename = tmp_path / 'test_output_option' url = httpbin + '/robots.txt' r = http('--output', str(output_filename), url, env=MockEnvironment(stdout_isatty=stdout_isatty)) assert r == '' expected_body = urlopen(url).read().decode() actual_body = output_filename.read_text(encoding=UTF8) assert actual_body == expected_body
jakubroztocil/httpie
[ 26714, 3750, 26714, 138, 1330173553 ]
def test_quiet(self, httpbin, quiet_flags): env = MockEnvironment( stdin_isatty=True, stdout_isatty=True, devnull=io.BytesIO() ) r = http(*quiet_flags, 'GET', httpbin.url + '/get', env=env) assert env.stdout is env.devnull assert env.stderr is env.devnull assert HTTP_OK in r.devnull assert r == '' assert r.stderr == ''
jakubroztocil/httpie
[ 26714, 3750, 26714, 138, 1330173553 ]
def test_quiet_with_check_status_non_zero_pipe(self, httpbin): r = http( '--quiet', '--check-status', httpbin + '/status/500', tolerate_error_exit_status=True, env=MockEnvironment(stdout_isatty=False) ) assert 'http: warning: HTTP 500' in r.stderr
jakubroztocil/httpie
[ 26714, 3750, 26714, 138, 1330173553 ]
def test_quiet_quiet_with_check_status_non_zero_pipe(self, httpbin): r = http( '--quiet', '--quiet', '--check-status', httpbin + '/status/500', tolerate_error_exit_status=True, env=MockEnvironment(stdout_isatty=False) ) assert 'http: warning: HTTP 500' in r.stderr
jakubroztocil/httpie
[ 26714, 3750, 26714, 138, 1330173553 ]
def test_quiet_with_password_prompt(self, httpbin, quiet_flags): """ Tests whether httpie still prompts for a password when request requires authentication and only username is provided """ env = MockEnvironment( stdin_isatty=True, stdout_isatty=True, devnull=io.BytesIO() ) r = http( *quiet_flags, '--auth', 'user', 'GET', httpbin.url + '/basic-auth/user/password', env=env ) assert env.stdout is env.devnull assert env.stderr is env.devnull assert HTTP_OK in r.devnull assert r == '' assert r.stderr == ''
jakubroztocil/httpie
[ 26714, 3750, 26714, 138, 1330173553 ]
def test_quiet_with_explicit_output_options(self, httpbin, quiet_flags, output_options): env = MockEnvironment(stdin_isatty=True, stdout_isatty=True) r = http(*quiet_flags, output_options, httpbin.url + '/get', env=env) assert env.stdout is env.devnull assert env.stderr is env.devnull assert r == '' assert r.stderr == ''
jakubroztocil/httpie
[ 26714, 3750, 26714, 138, 1330173553 ]
def test_quiet_with_output_redirection(self, tmp_path, httpbin, quiet_flags, with_download): url = httpbin + '/robots.txt' output_path = Path('output.txt') env = MockEnvironment() orig_cwd = os.getcwd() output = requests.get(url).text extra_args = ['--download'] if with_download else [] os.chdir(tmp_path) try: assert os.listdir('.') == [] r = http( *quiet_flags, '--output', str(output_path), *extra_args, url, env=env ) assert os.listdir('.') == [str(output_path)] assert r == '' assert r.stderr == '' assert env.stderr is env.devnull if with_download: assert env.stdout is env.devnull else: assert env.stdout is not env.devnull # --output swaps stdout. assert output_path.read_text(encoding=UTF8) == output finally: os.chdir(orig_cwd)
jakubroztocil/httpie
[ 26714, 3750, 26714, 138, 1330173553 ]
def test_verbose(self, httpbin): r = http('--verbose', 'GET', httpbin.url + '/get', 'test-header:__test__') assert HTTP_OK in r assert r.count('__test__') == 2
jakubroztocil/httpie
[ 26714, 3750, 26714, 138, 1330173553 ]
def test_verbose_form(self, httpbin): # https://github.com/httpie/httpie/issues/53 r = http('--verbose', '--form', 'POST', httpbin.url + '/post', 'A=B', 'C=D') assert HTTP_OK in r assert 'A=B&C=D' in r
jakubroztocil/httpie
[ 26714, 3750, 26714, 138, 1330173553 ]
def test_verbose_implies_all(self, httpbin): r = http('--verbose', '--follow', httpbin + '/redirect/1') assert 'GET /redirect/1 HTTP/1.1' in r assert 'HTTP/1.1 302 FOUND' in r assert 'GET /get HTTP/1.1' in r assert HTTP_OK in r
jakubroztocil/httpie
[ 26714, 3750, 26714, 138, 1330173553 ]
def test_get_lexer(self, mime, explicit_json, body, expected_lexer_name): lexer = get_lexer(mime, body=body, explicit_json=explicit_json) assert lexer is not None assert lexer.name == expected_lexer_name
jakubroztocil/httpie
[ 26714, 3750, 26714, 138, 1330173553 ]
def test_pretty_enabled_by_default(self, httpbin): env = MockEnvironment(colors=256) r = http('GET', httpbin.url + '/get', env=env) assert COLOR in r
jakubroztocil/httpie
[ 26714, 3750, 26714, 138, 1330173553 ]
def test_force_pretty(self, httpbin): env = MockEnvironment(stdout_isatty=False, colors=256) r = http('--pretty=all', 'GET', httpbin.url + '/get', env=env) assert COLOR in r
jakubroztocil/httpie
[ 26714, 3750, 26714, 138, 1330173553 ]
def test_subtype_based_pygments_lexer_match(self, httpbin): """Test that media subtype is used if type/subtype doesn't match any lexer. """ env = MockEnvironment(colors=256) r = http('--print=B', '--pretty=all', httpbin.url + '/post', 'Content-Type:text/foo+json', 'a=b', env=env) assert COLOR in r
jakubroztocil/httpie
[ 26714, 3750, 26714, 138, 1330173553 ]
def test_format_option(self, httpbin): env = MockEnvironment(colors=256) r = http('--print=B', '--pretty=format', 'GET', httpbin.url + '/get', 'a=b', env=env) # Tests that the JSON data is formatted. assert r.strip().count('\n') == 2 assert COLOR not in r
jakubroztocil/httpie
[ 26714, 3750, 26714, 138, 1330173553 ]
def _validate_crlf(self, msg): lines = iter(msg.splitlines(True)) for header in lines: if header == CRLF: break assert header.endswith(CRLF), repr(header) else: assert 0, f'CRLF between headers and body not found in {msg!r}' body = ''.join(lines) assert CRLF not in body return body
jakubroztocil/httpie
[ 26714, 3750, 26714, 138, 1330173553 ]
def test_CRLF_ugly_response(self, httpbin): r = http('--pretty=none', 'GET', httpbin.url + '/get') self._validate_crlf(r)
jakubroztocil/httpie
[ 26714, 3750, 26714, 138, 1330173553 ]
def test_CRLF_ugly_request(self, httpbin): r = http('--pretty=none', '--print=HB', 'GET', httpbin.url + '/get') self._validate_crlf(r)
jakubroztocil/httpie
[ 26714, 3750, 26714, 138, 1330173553 ]
def test_header_formatting_options(self): def get_headers(sort): return http( '--offline', '--print=H', '--format-options', 'headers.sort:' + sort, 'example.org', 'ZZZ:foo', 'XXX:foo', ) r_sorted = get_headers('true') r_unsorted = get_headers('false') assert r_sorted != r_unsorted assert f'XXX: foo{CRLF}ZZZ: foo' in r_sorted assert f'ZZZ: foo{CRLF}XXX: foo' in r_unsorted
jakubroztocil/httpie
[ 26714, 3750, 26714, 138, 1330173553 ]
def test_json_formatting_options(self, options: str, expected_json: str): r = http( '--offline', '--print=B', '--format-options', options, 'example.org', 'b:=0', 'a:=0', ) assert expected_json in r
jakubroztocil/httpie
[ 26714, 3750, 26714, 138, 1330173553 ]
def test_parse_format_options(self, defaults, options_string, expected): actual = parse_format_options(s=options_string, defaults=defaults) assert expected == actual
jakubroztocil/httpie
[ 26714, 3750, 26714, 138, 1330173553 ]
def test_parse_format_options_errors(self, options_string, expected_error): defaults = { 'foo': { 'bar': 1 } } with pytest.raises(argparse.ArgumentTypeError, match=expected_error): parse_format_options(s=options_string, defaults=defaults)
jakubroztocil/httpie
[ 26714, 3750, 26714, 138, 1330173553 ]
def test_format_options_accumulation(self, args, expected_format_options): parsed_args = parser.parse_args( args=[*args, 'example.org'], env=MockEnvironment(), ) assert parsed_args.format_options == expected_format_options
jakubroztocil/httpie
[ 26714, 3750, 26714, 138, 1330173553 ]
def test_response_mime_overwrite(): responses.add( method=responses.GET, url=DUMMY_URL, body=XML_DATA_RAW, content_type='text/plain', ) r = http( '--offline', '--raw', XML_DATA_RAW, '--response-mime=application/xml', DUMMY_URL ) assert XML_DATA_RAW in r # not affecting request bodies r = http('--response-mime=application/xml', DUMMY_URL) assert XML_DATA_FORMATTED in r
jakubroztocil/httpie
[ 26714, 3750, 26714, 138, 1330173553 ]
def detect(img): # start time start_time = cv2.getTickCount() # Gaussian Filter to remove noise img = cv2.medianBlur(img,5) gray = cv2.cvtColor(img,cv2.COLOR_BGR2GRAY) # print img.shape = (200,350,3) rows,cols,channels = img.shape
isarlab-department-engineering/ros_dt_lane_follower
[ 11, 8, 11, 2, 1491471214 ]
def lane_detection(): pub = rospy.Publisher('lane_detection', Int32, queue_size=10) #ros-lane-detection rospy.init_node('lane-detection',anonymous=True) camera = PiCamera() # Raspberry Pi Camera camera.resolution = (350,200) camera.framerate = 30 #50 camera.contrast = 40 #30 camera.saturation = 100 #20 camera.brightness = 30 #40 camera.sharpness = 0 camera.start_preview() time.sleep(1) rawCapture = PiRGBArray(camera)
isarlab-department-engineering/ros_dt_lane_follower
[ 11, 8, 11, 2, 1491471214 ]
def __init__(self, source: str, filename: PathLike) -> None: super().__init__() self._runner = CodeRunner(source, filename, [])
bokeh/bokeh
[ 17326, 4066, 17326, 698, 1332776401 ]
def _monkeypatch(self): def _pass(*args, **kw): pass def _add_root(obj, *args, **kw): curdoc().add_root(obj) def _curdoc(*args, **kw): return curdoc() # these functions are transitively imported from io into plotting, # so we have to patch them all. Assumption is that no other patching # has occurred, i.e. we can just save the funcs being patched once, # from io, and use those as the originals to replace everywhere import bokeh.io as io # lgtm [py/import-and-import-from] import bokeh.plotting as p mods = [io, p] old_io = {} for f in self._output_funcs + self._io_funcs: old_io[f] = getattr(io, f) for mod in mods: for f in self._output_funcs: setattr(mod, f, _pass) for f in self._io_funcs: setattr(mod, f, _add_root) import bokeh.document as d old_doc = d.Document d.Document = _curdoc return old_io, old_doc
bokeh/bokeh
[ 17326, 4066, 17326, 698, 1332776401 ]
def failed(self) -> bool: return self._runner.failed
bokeh/bokeh
[ 17326, 4066, 17326, 698, 1332776401 ]
def error(self) -> str | None: return self._runner.error
bokeh/bokeh
[ 17326, 4066, 17326, 698, 1332776401 ]
def error_detail(self) -> str | None: return self._runner.error_detail
bokeh/bokeh
[ 17326, 4066, 17326, 698, 1332776401 ]
def doc(self) -> str | None: return self._runner.doc
bokeh/bokeh
[ 17326, 4066, 17326, 698, 1332776401 ]
def extractSanguniangWordpressCom(item): ''' Parser for 'sanguniang.wordpress.com' ''' vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title']) if not (chp or vol) or "preview" in item['title'].lower(): return None tagmap = [ ('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel'), ] for tagname, name, tl_type in tagmap: if tagname in item['tags']: return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type) return False
fake-name/ReadableWebProxy
[ 191, 16, 191, 3, 1437712243 ]
def send_messages(self, email_messages): for email in email_messages: log_email(email)
bmun/huxley
[ 32, 45, 32, 62, 1339386785 ]
def send_messages(self, email_messages): with transaction.atomic(): for email in email_messages: log_email(email) try: return super(LoggingEmailBackend, self).send_messages([email]) except SMTPException: logger = logging.getLogger('huxley.api') exc_type, exc_value, exc_traceback = sys.exc_info() exc_traceback = ''.join(traceback.format_exception(exc_type, exc_value, exc_traceback)) log = json.dumps({ 'message': exc_traceback, 'uri': ', '.join(email.to), 'status_code': 500, 'username': ''}) logger.exception(log)
bmun/huxley
[ 32, 45, 32, 62, 1339386785 ]
def to_tree(cls, spec_coord, ctx): node = {} if isinstance(spec_coord, SpectralCoord): node['value'] = spec_coord.value node['unit'] = spec_coord.unit if spec_coord.observer is not None: node['observer'] = spec_coord.observer if spec_coord.target is not None: node['target'] = spec_coord.target return node raise TypeError(f"'{spec_coord}' is not a valid SpectralCoord")
astropy/astropy
[ 3699, 1575, 3699, 1256, 1311212029 ]
def rdzipn2targets(infile): """Read rdzipn infile and return target and truth tables """ ra, dec, z, itype, priority, numobs = io.read_rdzipn(infile) n = len(ra) #- Martin's itype is 1 to n, while Bob's fiberassign is 0 to n-1 itype -= 1 assert np.min(itype >= 0) #- rdzipn has float32 ra, dec, but it should be float64 ra = ra.astype('float64') % 360 #- enforce 0 <= ra < 360 dec = dec.astype('float64') #- Hardcoded in rdzipn format # 0 : 'QSO', #- QSO-LyA # 1 : 'QSO', #- QSO-Tracer # 2 : 'LRG', #- LRG # 3 : 'ELG', #- ELG # 4 : 'STAR', #- QSO-Fake # 5 : 'UNKNOWN', #- LRG-Fake # 6 : 'STAR', #- StdStar # 7 : 'SKY', #- Sky
desihub/fiberassign
[ 9, 8, 9, 38, 1425078804 ]
def __init__(self, protocol: Protocol) -> None: ''' Configure a Receiver with a specific Bokeh protocol. Args: protocol (Protocol) : A Bokeh protocol object to use to assemble collected message fragments. ''' self._protocol = protocol self._current_consumer = self._HEADER # type: ignore[assignment] # https://github.com/python/mypy/issues/2427 self._message = None self._partial = None self._buf_header = None
bokeh/bokeh
[ 17326, 4066, 17326, 698, 1332776401 ]
def _HEADER(self, fragment: Fragment) -> None: self._message = None self._partial = None self._fragments = [self._assume_text(fragment)] self._current_consumer = self._METADATA # type: ignore[assignment] # https://github.com/python/mypy/issues/2427
bokeh/bokeh
[ 17326, 4066, 17326, 698, 1332776401 ]
def _CONTENT(self, fragment: Fragment) -> None: content = self._assume_text(fragment) self._fragments.append(content) header_json, metadata_json, content_json = [self._assume_text(x) for x in self._fragments[:3]] self._partial = self._protocol.assemble(header_json, metadata_json, content_json) self._check_complete()
bokeh/bokeh
[ 17326, 4066, 17326, 698, 1332776401 ]
def _BUFFER_PAYLOAD(self, fragment: Fragment) -> None: payload = self._assume_binary(fragment) if self._buf_header is None: raise ValidationError("Consuming a buffer payload, but current buffer header is None") header = BufferHeader(id=self._buf_header["id"]) cast(Message[Any], self._partial).assemble_buffer(header, payload) self._check_complete()
bokeh/bokeh
[ 17326, 4066, 17326, 698, 1332776401 ]
def _assume_text(self, fragment: Fragment) -> str: if not isinstance(fragment, str): raise ValidationError(f"expected text fragment but received binary fragment for {self._current_consumer.__name__}") return fragment
bokeh/bokeh
[ 17326, 4066, 17326, 698, 1332776401 ]
def setUpClass(cls): super().setUpClass() cls.factory = RequestFactory() cls.domain = 'api-test' cls.project = Domain.get_or_create_with_name(cls.domain, is_active=True) cls.username = 'alice@example.com' cls.password = '***' cls.user = WebUser.create(cls.domain, cls.username, cls.password, None, None) cls.api_key, _ = HQApiKey.objects.get_or_create(user=WebUser.get_django_user(cls.user)) cls.domain_api_key, _ = HQApiKey.objects.get_or_create(user=WebUser.get_django_user(cls.user), name='domain-scoped', domain=cls.domain)
dimagi/commcare-hq
[ 465, 201, 465, 202, 1247158807 ]
def tearDownClass(cls): cls.project.delete() super().tearDownClass()
dimagi/commcare-hq
[ 465, 201, 465, 202, 1247158807 ]
def _get_request_with_basic_auth(self, domain=None): return self._get_request( domain, HTTP_AUTHORIZATION=self._construct_basic_auth_header(self.username, self.password) )
dimagi/commcare-hq
[ 465, 201, 465, 202, 1247158807 ]
def _construct_basic_auth_header(self, username, password): # https://stackoverflow.com/q/5495452/8207 encoded_auth = base64.b64encode(bytes(f'{username}:{password}', 'utf8')).decode('utf8') return f'Basic {encoded_auth}'
dimagi/commcare-hq
[ 465, 201, 465, 202, 1247158807 ]
def _get_domain_path(self): return f'/a/{self.domain}/'
dimagi/commcare-hq
[ 465, 201, 465, 202, 1247158807 ]
def assertAuthenticationFail(self, auth_instance, request): result = auth_instance.is_authenticated(request) # currently auth classes return a 401/403 response in some scenarios # this should likely be changed to always return False # more discussion here: https://github.com/dimagi/commcare-hq/pull/28201#discussion_r461082885 if isinstance(result, HttpResponse): self.assertIn(result.status_code, (401, 403)) else: self.assertFalse(result)
dimagi/commcare-hq
[ 465, 201, 465, 202, 1247158807 ]
def test_login_no_auth(self): self.assertAuthenticationFail(LoginAuthentication(), self._get_request())
dimagi/commcare-hq
[ 465, 201, 465, 202, 1247158807 ]
def test_auth_type_basic(self): self.assertAuthenticationSuccess(LoginAuthentication(), self._get_request_with_basic_auth())
dimagi/commcare-hq
[ 465, 201, 465, 202, 1247158807 ]
def setUpClass(cls): super().setUpClass() cls.domain2 = 'api-test-other' cls.project2 = Domain.get_or_create_with_name(cls.domain2, is_active=True) cls.user.add_domain_membership(cls.domain2, is_admin=True) cls.user.save()
dimagi/commcare-hq
[ 465, 201, 465, 202, 1247158807 ]
def tearDownClass(cls): cls.project2.delete() super().tearDownClass()
dimagi/commcare-hq
[ 465, 201, 465, 202, 1247158807 ]
def test_login_no_auth_with_domain(self): self.assertAuthenticationFail(LoginAndDomainAuthentication(), self._get_request(domain=self.domain))
dimagi/commcare-hq
[ 465, 201, 465, 202, 1247158807 ]
def test_login_with_domain_key(self): self.assertAuthenticationSuccess( LoginAndDomainAuthentication(), self._get_request( self.domain, HTTP_AUTHORIZATION=self._construct_api_auth_header( self.username, self.domain_api_key ) ) )
dimagi/commcare-hq
[ 465, 201, 465, 202, 1247158807 ]