repo
stringlengths
7
55
path
stringlengths
4
127
func_name
stringlengths
1
88
original_string
stringlengths
75
19.8k
language
stringclasses
1 value
code
stringlengths
75
19.8k
code_tokens
sequence
docstring
stringlengths
3
17.3k
docstring_tokens
sequence
sha
stringlengths
40
40
url
stringlengths
87
242
partition
stringclasses
1 value
basho/riak-python-client
riak/transports/http/transport.py
HttpTransport.create_search_index
def create_search_index(self, index, schema=None, n_val=None, timeout=None): """ Create a Solr search index for Yokozuna. :param index: a name of a yz index :type index: string :param schema: XML of Solr schema :type schema: string :param n_val: N value of the write :type n_val: int :param timeout: optional timeout (in ms) :type timeout: integer, None :rtype boolean """ if not self.yz_wm_index: raise NotImplementedError("Search 2.0 administration is not " "supported for this version") url = self.search_index_path(index) headers = {'Content-Type': 'application/json'} content_dict = dict() if schema: content_dict['schema'] = schema if n_val: content_dict['n_val'] = n_val if timeout: content_dict['timeout'] = timeout content = json.dumps(content_dict) # Run the request... status, _, _ = self._request('PUT', url, headers, content) if status != 204: raise RiakError('Error setting Search 2.0 index.') return True
python
def create_search_index(self, index, schema=None, n_val=None, timeout=None): """ Create a Solr search index for Yokozuna. :param index: a name of a yz index :type index: string :param schema: XML of Solr schema :type schema: string :param n_val: N value of the write :type n_val: int :param timeout: optional timeout (in ms) :type timeout: integer, None :rtype boolean """ if not self.yz_wm_index: raise NotImplementedError("Search 2.0 administration is not " "supported for this version") url = self.search_index_path(index) headers = {'Content-Type': 'application/json'} content_dict = dict() if schema: content_dict['schema'] = schema if n_val: content_dict['n_val'] = n_val if timeout: content_dict['timeout'] = timeout content = json.dumps(content_dict) # Run the request... status, _, _ = self._request('PUT', url, headers, content) if status != 204: raise RiakError('Error setting Search 2.0 index.') return True
[ "def", "create_search_index", "(", "self", ",", "index", ",", "schema", "=", "None", ",", "n_val", "=", "None", ",", "timeout", "=", "None", ")", ":", "if", "not", "self", ".", "yz_wm_index", ":", "raise", "NotImplementedError", "(", "\"Search 2.0 administration is not \"", "\"supported for this version\"", ")", "url", "=", "self", ".", "search_index_path", "(", "index", ")", "headers", "=", "{", "'Content-Type'", ":", "'application/json'", "}", "content_dict", "=", "dict", "(", ")", "if", "schema", ":", "content_dict", "[", "'schema'", "]", "=", "schema", "if", "n_val", ":", "content_dict", "[", "'n_val'", "]", "=", "n_val", "if", "timeout", ":", "content_dict", "[", "'timeout'", "]", "=", "timeout", "content", "=", "json", ".", "dumps", "(", "content_dict", ")", "# Run the request...", "status", ",", "_", ",", "_", "=", "self", ".", "_request", "(", "'PUT'", ",", "url", ",", "headers", ",", "content", ")", "if", "status", "!=", "204", ":", "raise", "RiakError", "(", "'Error setting Search 2.0 index.'", ")", "return", "True" ]
Create a Solr search index for Yokozuna. :param index: a name of a yz index :type index: string :param schema: XML of Solr schema :type schema: string :param n_val: N value of the write :type n_val: int :param timeout: optional timeout (in ms) :type timeout: integer, None :rtype boolean
[ "Create", "a", "Solr", "search", "index", "for", "Yokozuna", "." ]
91de13a16607cdf553d1a194e762734e3bec4231
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/transports/http/transport.py#L447-L483
train
basho/riak-python-client
riak/transports/http/transport.py
HttpTransport.list_search_indexes
def list_search_indexes(self): """ Return a list of Solr search indexes from Yokozuna. :rtype list of dicts """ if not self.yz_wm_index: raise NotImplementedError("Search 2.0 administration is not " "supported for this version") url = self.search_index_path() # Run the request... status, headers, body = self._request('GET', url) if status == 200: json_data = json.loads(bytes_to_str(body)) # Return a list of dictionaries return json_data else: raise RiakError('Error getting Search 2.0 index.')
python
def list_search_indexes(self): """ Return a list of Solr search indexes from Yokozuna. :rtype list of dicts """ if not self.yz_wm_index: raise NotImplementedError("Search 2.0 administration is not " "supported for this version") url = self.search_index_path() # Run the request... status, headers, body = self._request('GET', url) if status == 200: json_data = json.loads(bytes_to_str(body)) # Return a list of dictionaries return json_data else: raise RiakError('Error getting Search 2.0 index.')
[ "def", "list_search_indexes", "(", "self", ")", ":", "if", "not", "self", ".", "yz_wm_index", ":", "raise", "NotImplementedError", "(", "\"Search 2.0 administration is not \"", "\"supported for this version\"", ")", "url", "=", "self", ".", "search_index_path", "(", ")", "# Run the request...", "status", ",", "headers", ",", "body", "=", "self", ".", "_request", "(", "'GET'", ",", "url", ")", "if", "status", "==", "200", ":", "json_data", "=", "json", ".", "loads", "(", "bytes_to_str", "(", "body", ")", ")", "# Return a list of dictionaries", "return", "json_data", "else", ":", "raise", "RiakError", "(", "'Error getting Search 2.0 index.'", ")" ]
Return a list of Solr search indexes from Yokozuna. :rtype list of dicts
[ "Return", "a", "list", "of", "Solr", "search", "indexes", "from", "Yokozuna", "." ]
91de13a16607cdf553d1a194e762734e3bec4231
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/transports/http/transport.py#L508-L528
train
basho/riak-python-client
riak/transports/http/transport.py
HttpTransport.create_search_schema
def create_search_schema(self, schema, content): """ Create a new Solr schema for Yokozuna. :param schema: name of Solr schema :type schema: string :param content: actual defintion of schema (XML) :type content: string :rtype boolean """ if not self.yz_wm_schema: raise NotImplementedError("Search 2.0 administration is not " "supported for this version") url = self.search_schema_path(schema) headers = {'Content-Type': 'application/xml'} # Run the request... status, header, body = self._request('PUT', url, headers, content) if status != 204: raise RiakError('Error creating Search 2.0 schema.') return True
python
def create_search_schema(self, schema, content): """ Create a new Solr schema for Yokozuna. :param schema: name of Solr schema :type schema: string :param content: actual defintion of schema (XML) :type content: string :rtype boolean """ if not self.yz_wm_schema: raise NotImplementedError("Search 2.0 administration is not " "supported for this version") url = self.search_schema_path(schema) headers = {'Content-Type': 'application/xml'} # Run the request... status, header, body = self._request('PUT', url, headers, content) if status != 204: raise RiakError('Error creating Search 2.0 schema.') return True
[ "def", "create_search_schema", "(", "self", ",", "schema", ",", "content", ")", ":", "if", "not", "self", ".", "yz_wm_schema", ":", "raise", "NotImplementedError", "(", "\"Search 2.0 administration is not \"", "\"supported for this version\"", ")", "url", "=", "self", ".", "search_schema_path", "(", "schema", ")", "headers", "=", "{", "'Content-Type'", ":", "'application/xml'", "}", "# Run the request...", "status", ",", "header", ",", "body", "=", "self", ".", "_request", "(", "'PUT'", ",", "url", ",", "headers", ",", "content", ")", "if", "status", "!=", "204", ":", "raise", "RiakError", "(", "'Error creating Search 2.0 schema.'", ")", "return", "True" ]
Create a new Solr schema for Yokozuna. :param schema: name of Solr schema :type schema: string :param content: actual defintion of schema (XML) :type content: string :rtype boolean
[ "Create", "a", "new", "Solr", "schema", "for", "Yokozuna", "." ]
91de13a16607cdf553d1a194e762734e3bec4231
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/transports/http/transport.py#L552-L575
train
basho/riak-python-client
riak/transports/http/transport.py
HttpTransport.get_search_schema
def get_search_schema(self, schema): """ Fetch a Solr schema from Yokozuna. :param schema: name of Solr schema :type schema: string :rtype dict """ if not self.yz_wm_schema: raise NotImplementedError("Search 2.0 administration is not " "supported for this version") url = self.search_schema_path(schema) # Run the request... status, _, body = self._request('GET', url) if status == 200: result = {} result['name'] = schema result['content'] = bytes_to_str(body) return result else: raise RiakError('Error getting Search 2.0 schema.')
python
def get_search_schema(self, schema): """ Fetch a Solr schema from Yokozuna. :param schema: name of Solr schema :type schema: string :rtype dict """ if not self.yz_wm_schema: raise NotImplementedError("Search 2.0 administration is not " "supported for this version") url = self.search_schema_path(schema) # Run the request... status, _, body = self._request('GET', url) if status == 200: result = {} result['name'] = schema result['content'] = bytes_to_str(body) return result else: raise RiakError('Error getting Search 2.0 schema.')
[ "def", "get_search_schema", "(", "self", ",", "schema", ")", ":", "if", "not", "self", ".", "yz_wm_schema", ":", "raise", "NotImplementedError", "(", "\"Search 2.0 administration is not \"", "\"supported for this version\"", ")", "url", "=", "self", ".", "search_schema_path", "(", "schema", ")", "# Run the request...", "status", ",", "_", ",", "body", "=", "self", ".", "_request", "(", "'GET'", ",", "url", ")", "if", "status", "==", "200", ":", "result", "=", "{", "}", "result", "[", "'name'", "]", "=", "schema", "result", "[", "'content'", "]", "=", "bytes_to_str", "(", "body", ")", "return", "result", "else", ":", "raise", "RiakError", "(", "'Error getting Search 2.0 schema.'", ")" ]
Fetch a Solr schema from Yokozuna. :param schema: name of Solr schema :type schema: string :rtype dict
[ "Fetch", "a", "Solr", "schema", "from", "Yokozuna", "." ]
91de13a16607cdf553d1a194e762734e3bec4231
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/transports/http/transport.py#L577-L600
train
basho/riak-python-client
riak/transports/http/transport.py
HttpTransport.search
def search(self, index, query, **params): """ Performs a search query. """ if index is None: index = 'search' options = {} if 'op' in params: op = params.pop('op') options['q.op'] = op options.update(params) url = self.solr_select_path(index, query, **options) status, headers, data = self._request('GET', url) self.check_http_code(status, [200]) if 'json' in headers['content-type']: results = json.loads(bytes_to_str(data)) return self._normalize_json_search_response(results) elif 'xml' in headers['content-type']: return self._normalize_xml_search_response(data) else: raise ValueError("Could not decode search response")
python
def search(self, index, query, **params): """ Performs a search query. """ if index is None: index = 'search' options = {} if 'op' in params: op = params.pop('op') options['q.op'] = op options.update(params) url = self.solr_select_path(index, query, **options) status, headers, data = self._request('GET', url) self.check_http_code(status, [200]) if 'json' in headers['content-type']: results = json.loads(bytes_to_str(data)) return self._normalize_json_search_response(results) elif 'xml' in headers['content-type']: return self._normalize_xml_search_response(data) else: raise ValueError("Could not decode search response")
[ "def", "search", "(", "self", ",", "index", ",", "query", ",", "*", "*", "params", ")", ":", "if", "index", "is", "None", ":", "index", "=", "'search'", "options", "=", "{", "}", "if", "'op'", "in", "params", ":", "op", "=", "params", ".", "pop", "(", "'op'", ")", "options", "[", "'q.op'", "]", "=", "op", "options", ".", "update", "(", "params", ")", "url", "=", "self", ".", "solr_select_path", "(", "index", ",", "query", ",", "*", "*", "options", ")", "status", ",", "headers", ",", "data", "=", "self", ".", "_request", "(", "'GET'", ",", "url", ")", "self", ".", "check_http_code", "(", "status", ",", "[", "200", "]", ")", "if", "'json'", "in", "headers", "[", "'content-type'", "]", ":", "results", "=", "json", ".", "loads", "(", "bytes_to_str", "(", "data", ")", ")", "return", "self", ".", "_normalize_json_search_response", "(", "results", ")", "elif", "'xml'", "in", "headers", "[", "'content-type'", "]", ":", "return", "self", ".", "_normalize_xml_search_response", "(", "data", ")", "else", ":", "raise", "ValueError", "(", "\"Could not decode search response\"", ")" ]
Performs a search query.
[ "Performs", "a", "search", "query", "." ]
91de13a16607cdf553d1a194e762734e3bec4231
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/transports/http/transport.py#L602-L624
train
basho/riak-python-client
riak/transports/http/transport.py
HttpTransport.fulltext_add
def fulltext_add(self, index, docs): """ Adds documents to the search index. """ xml = Document() root = xml.createElement('add') for doc in docs: doc_element = xml.createElement('doc') for key in doc: value = doc[key] field = xml.createElement('field') field.setAttribute("name", key) text = xml.createTextNode(value) field.appendChild(text) doc_element.appendChild(field) root.appendChild(doc_element) xml.appendChild(root) self._request('POST', self.solr_update_path(index), {'Content-Type': 'text/xml'}, xml.toxml().encode('utf-8'))
python
def fulltext_add(self, index, docs): """ Adds documents to the search index. """ xml = Document() root = xml.createElement('add') for doc in docs: doc_element = xml.createElement('doc') for key in doc: value = doc[key] field = xml.createElement('field') field.setAttribute("name", key) text = xml.createTextNode(value) field.appendChild(text) doc_element.appendChild(field) root.appendChild(doc_element) xml.appendChild(root) self._request('POST', self.solr_update_path(index), {'Content-Type': 'text/xml'}, xml.toxml().encode('utf-8'))
[ "def", "fulltext_add", "(", "self", ",", "index", ",", "docs", ")", ":", "xml", "=", "Document", "(", ")", "root", "=", "xml", ".", "createElement", "(", "'add'", ")", "for", "doc", "in", "docs", ":", "doc_element", "=", "xml", ".", "createElement", "(", "'doc'", ")", "for", "key", "in", "doc", ":", "value", "=", "doc", "[", "key", "]", "field", "=", "xml", ".", "createElement", "(", "'field'", ")", "field", ".", "setAttribute", "(", "\"name\"", ",", "key", ")", "text", "=", "xml", ".", "createTextNode", "(", "value", ")", "field", ".", "appendChild", "(", "text", ")", "doc_element", ".", "appendChild", "(", "field", ")", "root", ".", "appendChild", "(", "doc_element", ")", "xml", ".", "appendChild", "(", "root", ")", "self", ".", "_request", "(", "'POST'", ",", "self", ".", "solr_update_path", "(", "index", ")", ",", "{", "'Content-Type'", ":", "'text/xml'", "}", ",", "xml", ".", "toxml", "(", ")", ".", "encode", "(", "'utf-8'", ")", ")" ]
Adds documents to the search index.
[ "Adds", "documents", "to", "the", "search", "index", "." ]
91de13a16607cdf553d1a194e762734e3bec4231
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/transports/http/transport.py#L626-L646
train
basho/riak-python-client
riak/transports/http/transport.py
HttpTransport.fulltext_delete
def fulltext_delete(self, index, docs=None, queries=None): """ Removes documents from the full-text index. """ xml = Document() root = xml.createElement('delete') if docs: for doc in docs: doc_element = xml.createElement('id') text = xml.createTextNode(doc) doc_element.appendChild(text) root.appendChild(doc_element) if queries: for query in queries: query_element = xml.createElement('query') text = xml.createTextNode(query) query_element.appendChild(text) root.appendChild(query_element) xml.appendChild(root) self._request('POST', self.solr_update_path(index), {'Content-Type': 'text/xml'}, xml.toxml().encode('utf-8'))
python
def fulltext_delete(self, index, docs=None, queries=None): """ Removes documents from the full-text index. """ xml = Document() root = xml.createElement('delete') if docs: for doc in docs: doc_element = xml.createElement('id') text = xml.createTextNode(doc) doc_element.appendChild(text) root.appendChild(doc_element) if queries: for query in queries: query_element = xml.createElement('query') text = xml.createTextNode(query) query_element.appendChild(text) root.appendChild(query_element) xml.appendChild(root) self._request('POST', self.solr_update_path(index), {'Content-Type': 'text/xml'}, xml.toxml().encode('utf-8'))
[ "def", "fulltext_delete", "(", "self", ",", "index", ",", "docs", "=", "None", ",", "queries", "=", "None", ")", ":", "xml", "=", "Document", "(", ")", "root", "=", "xml", ".", "createElement", "(", "'delete'", ")", "if", "docs", ":", "for", "doc", "in", "docs", ":", "doc_element", "=", "xml", ".", "createElement", "(", "'id'", ")", "text", "=", "xml", ".", "createTextNode", "(", "doc", ")", "doc_element", ".", "appendChild", "(", "text", ")", "root", ".", "appendChild", "(", "doc_element", ")", "if", "queries", ":", "for", "query", "in", "queries", ":", "query_element", "=", "xml", ".", "createElement", "(", "'query'", ")", "text", "=", "xml", ".", "createTextNode", "(", "query", ")", "query_element", ".", "appendChild", "(", "text", ")", "root", ".", "appendChild", "(", "query_element", ")", "xml", ".", "appendChild", "(", "root", ")", "self", ".", "_request", "(", "'POST'", ",", "self", ".", "solr_update_path", "(", "index", ")", ",", "{", "'Content-Type'", ":", "'text/xml'", "}", ",", "xml", ".", "toxml", "(", ")", ".", "encode", "(", "'utf-8'", ")", ")" ]
Removes documents from the full-text index.
[ "Removes", "documents", "from", "the", "full", "-", "text", "index", "." ]
91de13a16607cdf553d1a194e762734e3bec4231
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/transports/http/transport.py#L648-L671
train
basho/riak-python-client
riak/transports/pool.py
Resource.release
def release(self): """ Releases this resource back to the pool it came from. """ if self.errored: self.pool.delete_resource(self) else: self.pool.release(self)
python
def release(self): """ Releases this resource back to the pool it came from. """ if self.errored: self.pool.delete_resource(self) else: self.pool.release(self)
[ "def", "release", "(", "self", ")", ":", "if", "self", ".", "errored", ":", "self", ".", "pool", ".", "delete_resource", "(", "self", ")", "else", ":", "self", ".", "pool", ".", "release", "(", "self", ")" ]
Releases this resource back to the pool it came from.
[ "Releases", "this", "resource", "back", "to", "the", "pool", "it", "came", "from", "." ]
91de13a16607cdf553d1a194e762734e3bec4231
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/transports/pool.py#L76-L83
train
basho/riak-python-client
riak/transports/pool.py
Pool.delete_resource
def delete_resource(self, resource): """ Deletes the resource from the pool and destroys the associated resource. Not usually needed by users of the pool, but called internally when BadResource is raised. :param resource: the resource to remove :type resource: Resource """ with self.lock: self.resources.remove(resource) self.destroy_resource(resource.object) del resource
python
def delete_resource(self, resource): """ Deletes the resource from the pool and destroys the associated resource. Not usually needed by users of the pool, but called internally when BadResource is raised. :param resource: the resource to remove :type resource: Resource """ with self.lock: self.resources.remove(resource) self.destroy_resource(resource.object) del resource
[ "def", "delete_resource", "(", "self", ",", "resource", ")", ":", "with", "self", ".", "lock", ":", "self", ".", "resources", ".", "remove", "(", "resource", ")", "self", ".", "destroy_resource", "(", "resource", ".", "object", ")", "del", "resource" ]
Deletes the resource from the pool and destroys the associated resource. Not usually needed by users of the pool, but called internally when BadResource is raised. :param resource: the resource to remove :type resource: Resource
[ "Deletes", "the", "resource", "from", "the", "pool", "and", "destroys", "the", "associated", "resource", ".", "Not", "usually", "needed", "by", "users", "of", "the", "pool", "but", "called", "internally", "when", "BadResource", "is", "raised", "." ]
91de13a16607cdf553d1a194e762734e3bec4231
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/transports/pool.py#L209-L221
train
basho/riak-python-client
riak/codecs/ttb.py
TtbCodec.encode_timeseries_put
def encode_timeseries_put(self, tsobj): ''' Returns an Erlang-TTB encoded tuple with the appropriate data and metadata from a TsObject. :param tsobj: a TsObject :type tsobj: TsObject :rtype: term-to-binary encoded object ''' if tsobj.columns: raise NotImplementedError('columns are not used') if tsobj.rows and isinstance(tsobj.rows, list): req_rows = [] for row in tsobj.rows: req_r = [] for cell in row: req_r.append(self.encode_to_ts_cell(cell)) req_rows.append(tuple(req_r)) req = tsputreq_a, tsobj.table.name, [], req_rows mc = MSG_CODE_TS_TTB_MSG rc = MSG_CODE_TS_TTB_MSG return Msg(mc, encode(req), rc) else: raise RiakError("TsObject requires a list of rows")
python
def encode_timeseries_put(self, tsobj): ''' Returns an Erlang-TTB encoded tuple with the appropriate data and metadata from a TsObject. :param tsobj: a TsObject :type tsobj: TsObject :rtype: term-to-binary encoded object ''' if tsobj.columns: raise NotImplementedError('columns are not used') if tsobj.rows and isinstance(tsobj.rows, list): req_rows = [] for row in tsobj.rows: req_r = [] for cell in row: req_r.append(self.encode_to_ts_cell(cell)) req_rows.append(tuple(req_r)) req = tsputreq_a, tsobj.table.name, [], req_rows mc = MSG_CODE_TS_TTB_MSG rc = MSG_CODE_TS_TTB_MSG return Msg(mc, encode(req), rc) else: raise RiakError("TsObject requires a list of rows")
[ "def", "encode_timeseries_put", "(", "self", ",", "tsobj", ")", ":", "if", "tsobj", ".", "columns", ":", "raise", "NotImplementedError", "(", "'columns are not used'", ")", "if", "tsobj", ".", "rows", "and", "isinstance", "(", "tsobj", ".", "rows", ",", "list", ")", ":", "req_rows", "=", "[", "]", "for", "row", "in", "tsobj", ".", "rows", ":", "req_r", "=", "[", "]", "for", "cell", "in", "row", ":", "req_r", ".", "append", "(", "self", ".", "encode_to_ts_cell", "(", "cell", ")", ")", "req_rows", ".", "append", "(", "tuple", "(", "req_r", ")", ")", "req", "=", "tsputreq_a", ",", "tsobj", ".", "table", ".", "name", ",", "[", "]", ",", "req_rows", "mc", "=", "MSG_CODE_TS_TTB_MSG", "rc", "=", "MSG_CODE_TS_TTB_MSG", "return", "Msg", "(", "mc", ",", "encode", "(", "req", ")", ",", "rc", ")", "else", ":", "raise", "RiakError", "(", "\"TsObject requires a list of rows\"", ")" ]
Returns an Erlang-TTB encoded tuple with the appropriate data and metadata from a TsObject. :param tsobj: a TsObject :type tsobj: TsObject :rtype: term-to-binary encoded object
[ "Returns", "an", "Erlang", "-", "TTB", "encoded", "tuple", "with", "the", "appropriate", "data", "and", "metadata", "from", "a", "TsObject", "." ]
91de13a16607cdf553d1a194e762734e3bec4231
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/codecs/ttb.py#L116-L140
train
basho/riak-python-client
riak/codecs/ttb.py
TtbCodec.decode_timeseries_row
def decode_timeseries_row(self, tsrow, tsct, convert_timestamp=False): """ Decodes a TTB-encoded TsRow into a list :param tsrow: the TTB decoded TsRow to decode. :type tsrow: TTB dncoded row :param tsct: the TTB decoded column types (atoms). :type tsct: list :param convert_timestamp: Convert timestamps to datetime objects :type tsobj: boolean :rtype list """ row = [] for i, cell in enumerate(tsrow): if cell is None: row.append(None) elif isinstance(cell, list) and len(cell) == 0: row.append(None) else: if convert_timestamp and tsct[i] == timestamp_a: row.append(datetime_from_unix_time_millis(cell)) else: row.append(cell) return row
python
def decode_timeseries_row(self, tsrow, tsct, convert_timestamp=False): """ Decodes a TTB-encoded TsRow into a list :param tsrow: the TTB decoded TsRow to decode. :type tsrow: TTB dncoded row :param tsct: the TTB decoded column types (atoms). :type tsct: list :param convert_timestamp: Convert timestamps to datetime objects :type tsobj: boolean :rtype list """ row = [] for i, cell in enumerate(tsrow): if cell is None: row.append(None) elif isinstance(cell, list) and len(cell) == 0: row.append(None) else: if convert_timestamp and tsct[i] == timestamp_a: row.append(datetime_from_unix_time_millis(cell)) else: row.append(cell) return row
[ "def", "decode_timeseries_row", "(", "self", ",", "tsrow", ",", "tsct", ",", "convert_timestamp", "=", "False", ")", ":", "row", "=", "[", "]", "for", "i", ",", "cell", "in", "enumerate", "(", "tsrow", ")", ":", "if", "cell", "is", "None", ":", "row", ".", "append", "(", "None", ")", "elif", "isinstance", "(", "cell", ",", "list", ")", "and", "len", "(", "cell", ")", "==", "0", ":", "row", ".", "append", "(", "None", ")", "else", ":", "if", "convert_timestamp", "and", "tsct", "[", "i", "]", "==", "timestamp_a", ":", "row", ".", "append", "(", "datetime_from_unix_time_millis", "(", "cell", ")", ")", "else", ":", "row", ".", "append", "(", "cell", ")", "return", "row" ]
Decodes a TTB-encoded TsRow into a list :param tsrow: the TTB decoded TsRow to decode. :type tsrow: TTB dncoded row :param tsct: the TTB decoded column types (atoms). :type tsct: list :param convert_timestamp: Convert timestamps to datetime objects :type tsobj: boolean :rtype list
[ "Decodes", "a", "TTB", "-", "encoded", "TsRow", "into", "a", "list" ]
91de13a16607cdf553d1a194e762734e3bec4231
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/codecs/ttb.py#L205-L228
train
basho/riak-python-client
riak/datatypes/set.py
Set.to_op
def to_op(self): """ Extracts the modification operation from the set. :rtype: dict, None """ if not self._adds and not self._removes: return None changes = {} if self._adds: changes['adds'] = list(self._adds) if self._removes: changes['removes'] = list(self._removes) return changes
python
def to_op(self): """ Extracts the modification operation from the set. :rtype: dict, None """ if not self._adds and not self._removes: return None changes = {} if self._adds: changes['adds'] = list(self._adds) if self._removes: changes['removes'] = list(self._removes) return changes
[ "def", "to_op", "(", "self", ")", ":", "if", "not", "self", ".", "_adds", "and", "not", "self", ".", "_removes", ":", "return", "None", "changes", "=", "{", "}", "if", "self", ".", "_adds", ":", "changes", "[", "'adds'", "]", "=", "list", "(", "self", ".", "_adds", ")", "if", "self", ".", "_removes", ":", "changes", "[", "'removes'", "]", "=", "list", "(", "self", ".", "_removes", ")", "return", "changes" ]
Extracts the modification operation from the set. :rtype: dict, None
[ "Extracts", "the", "modification", "operation", "from", "the", "set", "." ]
91de13a16607cdf553d1a194e762734e3bec4231
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/datatypes/set.py#L60-L73
train
basho/riak-python-client
riak/datatypes/set.py
Set.discard
def discard(self, element): """ Removes an element from the set. .. note: You may remove elements from the set that are not present, but a context from the server is required. :param element: the element to remove :type element: str """ _check_element(element) self._require_context() self._removes.add(element)
python
def discard(self, element): """ Removes an element from the set. .. note: You may remove elements from the set that are not present, but a context from the server is required. :param element: the element to remove :type element: str """ _check_element(element) self._require_context() self._removes.add(element)
[ "def", "discard", "(", "self", ",", "element", ")", ":", "_check_element", "(", "element", ")", "self", ".", "_require_context", "(", ")", "self", ".", "_removes", ".", "add", "(", "element", ")" ]
Removes an element from the set. .. note: You may remove elements from the set that are not present, but a context from the server is required. :param element: the element to remove :type element: str
[ "Removes", "an", "element", "from", "the", "set", "." ]
91de13a16607cdf553d1a194e762734e3bec4231
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/datatypes/set.py#L101-L113
train
basho/riak-python-client
riak/multidict.py
MultiDict.getone
def getone(self, key): """ Get one value matching the key, raising a KeyError if multiple values were found. """ v = self.getall(key) if not v: raise KeyError('Key not found: %r' % key) if len(v) > 1: raise KeyError('Multiple values match %r: %r' % (key, v)) return v[0]
python
def getone(self, key): """ Get one value matching the key, raising a KeyError if multiple values were found. """ v = self.getall(key) if not v: raise KeyError('Key not found: %r' % key) if len(v) > 1: raise KeyError('Multiple values match %r: %r' % (key, v)) return v[0]
[ "def", "getone", "(", "self", ",", "key", ")", ":", "v", "=", "self", ".", "getall", "(", "key", ")", "if", "not", "v", ":", "raise", "KeyError", "(", "'Key not found: %r'", "%", "key", ")", "if", "len", "(", "v", ")", ">", "1", ":", "raise", "KeyError", "(", "'Multiple values match %r: %r'", "%", "(", "key", ",", "v", ")", ")", "return", "v", "[", "0", "]" ]
Get one value matching the key, raising a KeyError if multiple values were found.
[ "Get", "one", "value", "matching", "the", "key", "raising", "a", "KeyError", "if", "multiple", "values", "were", "found", "." ]
91de13a16607cdf553d1a194e762734e3bec4231
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/multidict.py#L73-L83
train
basho/riak-python-client
riak/multidict.py
MultiDict.dict_of_lists
def dict_of_lists(self): """ Returns a dictionary where each key is associated with a list of values. """ result = {} for key, value in self._items: if key in result: result[key].append(value) else: result[key] = [value] return result
python
def dict_of_lists(self): """ Returns a dictionary where each key is associated with a list of values. """ result = {} for key, value in self._items: if key in result: result[key].append(value) else: result[key] = [value] return result
[ "def", "dict_of_lists", "(", "self", ")", ":", "result", "=", "{", "}", "for", "key", ",", "value", "in", "self", ".", "_items", ":", "if", "key", "in", "result", ":", "result", "[", "key", "]", ".", "append", "(", "value", ")", "else", ":", "result", "[", "key", "]", "=", "[", "value", "]", "return", "result" ]
Returns a dictionary where each key is associated with a list of values.
[ "Returns", "a", "dictionary", "where", "each", "key", "is", "associated", "with", "a", "list", "of", "values", "." ]
91de13a16607cdf553d1a194e762734e3bec4231
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/multidict.py#L108-L119
train
basho/riak-python-client
riak/client/multi.py
MultiPool.enq
def enq(self, task): """ Enqueues a fetch task to the pool of workers. This will raise a RuntimeError if the pool is stopped or in the process of stopping. :param task: the Task object :type task: Task or PutTask """ if not self._stop.is_set(): self._inq.put(task) else: raise RuntimeError("Attempted to enqueue an operation while " "multi pool was shutdown!")
python
def enq(self, task): """ Enqueues a fetch task to the pool of workers. This will raise a RuntimeError if the pool is stopped or in the process of stopping. :param task: the Task object :type task: Task or PutTask """ if not self._stop.is_set(): self._inq.put(task) else: raise RuntimeError("Attempted to enqueue an operation while " "multi pool was shutdown!")
[ "def", "enq", "(", "self", ",", "task", ")", ":", "if", "not", "self", ".", "_stop", ".", "is_set", "(", ")", ":", "self", ".", "_inq", ".", "put", "(", "task", ")", "else", ":", "raise", "RuntimeError", "(", "\"Attempted to enqueue an operation while \"", "\"multi pool was shutdown!\"", ")" ]
Enqueues a fetch task to the pool of workers. This will raise a RuntimeError if the pool is stopped or in the process of stopping. :param task: the Task object :type task: Task or PutTask
[ "Enqueues", "a", "fetch", "task", "to", "the", "pool", "of", "workers", ".", "This", "will", "raise", "a", "RuntimeError", "if", "the", "pool", "is", "stopped", "or", "in", "the", "process", "of", "stopping", "." ]
91de13a16607cdf553d1a194e762734e3bec4231
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/client/multi.py#L73-L86
train
basho/riak-python-client
riak/client/multi.py
MultiPool.start
def start(self): """ Starts the worker threads if they are not already started. This method is thread-safe and will be called automatically when executing an operation. """ # Check whether we are already started, skip if we are. if not self._started.is_set(): # If we are not started, try to capture the lock. if self._lock.acquire(False): # If we got the lock, go ahead and start the worker # threads, set the started flag, and release the lock. for i in range(self._size): name = "riak.client.multi-worker-{0}-{1}".format( self._name, i) worker = Thread(target=self._worker_method, name=name) worker.daemon = False worker.start() self._workers.append(worker) self._started.set() self._lock.release() else: # We didn't get the lock, so someone else is already # starting the worker threads. Wait until they have # signaled that the threads are started. self._started.wait()
python
def start(self): """ Starts the worker threads if they are not already started. This method is thread-safe and will be called automatically when executing an operation. """ # Check whether we are already started, skip if we are. if not self._started.is_set(): # If we are not started, try to capture the lock. if self._lock.acquire(False): # If we got the lock, go ahead and start the worker # threads, set the started flag, and release the lock. for i in range(self._size): name = "riak.client.multi-worker-{0}-{1}".format( self._name, i) worker = Thread(target=self._worker_method, name=name) worker.daemon = False worker.start() self._workers.append(worker) self._started.set() self._lock.release() else: # We didn't get the lock, so someone else is already # starting the worker threads. Wait until they have # signaled that the threads are started. self._started.wait()
[ "def", "start", "(", "self", ")", ":", "# Check whether we are already started, skip if we are.", "if", "not", "self", ".", "_started", ".", "is_set", "(", ")", ":", "# If we are not started, try to capture the lock.", "if", "self", ".", "_lock", ".", "acquire", "(", "False", ")", ":", "# If we got the lock, go ahead and start the worker", "# threads, set the started flag, and release the lock.", "for", "i", "in", "range", "(", "self", ".", "_size", ")", ":", "name", "=", "\"riak.client.multi-worker-{0}-{1}\"", ".", "format", "(", "self", ".", "_name", ",", "i", ")", "worker", "=", "Thread", "(", "target", "=", "self", ".", "_worker_method", ",", "name", "=", "name", ")", "worker", ".", "daemon", "=", "False", "worker", ".", "start", "(", ")", "self", ".", "_workers", ".", "append", "(", "worker", ")", "self", ".", "_started", ".", "set", "(", ")", "self", ".", "_lock", ".", "release", "(", ")", "else", ":", "# We didn't get the lock, so someone else is already", "# starting the worker threads. Wait until they have", "# signaled that the threads are started.", "self", ".", "_started", ".", "wait", "(", ")" ]
Starts the worker threads if they are not already started. This method is thread-safe and will be called automatically when executing an operation.
[ "Starts", "the", "worker", "threads", "if", "they", "are", "not", "already", "started", ".", "This", "method", "is", "thread", "-", "safe", "and", "will", "be", "called", "automatically", "when", "executing", "an", "operation", "." ]
91de13a16607cdf553d1a194e762734e3bec4231
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/client/multi.py#L88-L113
train
basho/riak-python-client
riak/client/multi.py
MultiPool.stop
def stop(self): """ Signals the worker threads to exit and waits on them. """ if not self.stopped(): self._stop.set() for worker in self._workers: worker.join()
python
def stop(self): """ Signals the worker threads to exit and waits on them. """ if not self.stopped(): self._stop.set() for worker in self._workers: worker.join()
[ "def", "stop", "(", "self", ")", ":", "if", "not", "self", ".", "stopped", "(", ")", ":", "self", ".", "_stop", ".", "set", "(", ")", "for", "worker", "in", "self", ".", "_workers", ":", "worker", ".", "join", "(", ")" ]
Signals the worker threads to exit and waits on them.
[ "Signals", "the", "worker", "threads", "to", "exit", "and", "waits", "on", "them", "." ]
91de13a16607cdf553d1a194e762734e3bec4231
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/client/multi.py#L115-L122
train
basho/riak-python-client
riak/datatypes/map.py
Map._check_key
def _check_key(self, key): """ Ensures well-formedness of a key. """ if not len(key) == 2: raise TypeError('invalid key: %r' % key) elif key[1] not in TYPES: raise TypeError('invalid datatype: %s' % key[1])
python
def _check_key(self, key): """ Ensures well-formedness of a key. """ if not len(key) == 2: raise TypeError('invalid key: %r' % key) elif key[1] not in TYPES: raise TypeError('invalid datatype: %s' % key[1])
[ "def", "_check_key", "(", "self", ",", "key", ")", ":", "if", "not", "len", "(", "key", ")", "==", "2", ":", "raise", "TypeError", "(", "'invalid key: %r'", "%", "key", ")", "elif", "key", "[", "1", "]", "not", "in", "TYPES", ":", "raise", "TypeError", "(", "'invalid datatype: %s'", "%", "key", "[", "1", "]", ")" ]
Ensures well-formedness of a key.
[ "Ensures", "well", "-", "formedness", "of", "a", "key", "." ]
91de13a16607cdf553d1a194e762734e3bec4231
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/datatypes/map.py#L227-L234
train
basho/riak-python-client
riak/datatypes/map.py
Map.modified
def modified(self): """ Whether the map has staged local modifications. """ if self._removes: return True for v in self._value: if self._value[v].modified: return True for v in self._updates: if self._updates[v].modified: return True return False
python
def modified(self): """ Whether the map has staged local modifications. """ if self._removes: return True for v in self._value: if self._value[v].modified: return True for v in self._updates: if self._updates[v].modified: return True return False
[ "def", "modified", "(", "self", ")", ":", "if", "self", ".", "_removes", ":", "return", "True", "for", "v", "in", "self", ".", "_value", ":", "if", "self", ".", "_value", "[", "v", "]", ".", "modified", ":", "return", "True", "for", "v", "in", "self", ".", "_updates", ":", "if", "self", ".", "_updates", "[", "v", "]", ".", "modified", ":", "return", "True", "return", "False" ]
Whether the map has staged local modifications.
[ "Whether", "the", "map", "has", "staged", "local", "modifications", "." ]
91de13a16607cdf553d1a194e762734e3bec4231
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/datatypes/map.py#L252-L264
train
basho/riak-python-client
commands.py
build_messages._format_python2_or_3
def _format_python2_or_3(self): """ Change the PB files to use full pathnames for Python 3.x and modify the metaclasses to be version agnostic """ pb_files = set() with open(self.source, 'r', buffering=1) as csvfile: reader = csv.reader(csvfile) for row in reader: _, _, proto = row pb_files.add('riak/pb/{0}_pb2.py'.format(proto)) for im in sorted(pb_files): with open(im, 'r', buffering=1) as pbfile: contents = 'from six import *\n' + pbfile.read() contents = re.sub(r'riak_pb2', r'riak.pb.riak_pb2', contents) # Look for this pattern in the protoc-generated file: # # class RpbCounterGetResp(_message.Message): # __metaclass__ = _reflection.GeneratedProtocolMessageType # # and convert it to: # # @add_metaclass(_reflection.GeneratedProtocolMessageType) # class RpbCounterGetResp(_message.Message): contents = re.sub( r'class\s+(\S+)\((\S+)\):\s*\n' '\s+__metaclass__\s+=\s+(\S+)\s*\n', r'@add_metaclass(\3)\nclass \1(\2):\n', contents) with open(im, 'w', buffering=1) as pbfile: pbfile.write(contents)
python
def _format_python2_or_3(self): """ Change the PB files to use full pathnames for Python 3.x and modify the metaclasses to be version agnostic """ pb_files = set() with open(self.source, 'r', buffering=1) as csvfile: reader = csv.reader(csvfile) for row in reader: _, _, proto = row pb_files.add('riak/pb/{0}_pb2.py'.format(proto)) for im in sorted(pb_files): with open(im, 'r', buffering=1) as pbfile: contents = 'from six import *\n' + pbfile.read() contents = re.sub(r'riak_pb2', r'riak.pb.riak_pb2', contents) # Look for this pattern in the protoc-generated file: # # class RpbCounterGetResp(_message.Message): # __metaclass__ = _reflection.GeneratedProtocolMessageType # # and convert it to: # # @add_metaclass(_reflection.GeneratedProtocolMessageType) # class RpbCounterGetResp(_message.Message): contents = re.sub( r'class\s+(\S+)\((\S+)\):\s*\n' '\s+__metaclass__\s+=\s+(\S+)\s*\n', r'@add_metaclass(\3)\nclass \1(\2):\n', contents) with open(im, 'w', buffering=1) as pbfile: pbfile.write(contents)
[ "def", "_format_python2_or_3", "(", "self", ")", ":", "pb_files", "=", "set", "(", ")", "with", "open", "(", "self", ".", "source", ",", "'r'", ",", "buffering", "=", "1", ")", "as", "csvfile", ":", "reader", "=", "csv", ".", "reader", "(", "csvfile", ")", "for", "row", "in", "reader", ":", "_", ",", "_", ",", "proto", "=", "row", "pb_files", ".", "add", "(", "'riak/pb/{0}_pb2.py'", ".", "format", "(", "proto", ")", ")", "for", "im", "in", "sorted", "(", "pb_files", ")", ":", "with", "open", "(", "im", ",", "'r'", ",", "buffering", "=", "1", ")", "as", "pbfile", ":", "contents", "=", "'from six import *\\n'", "+", "pbfile", ".", "read", "(", ")", "contents", "=", "re", ".", "sub", "(", "r'riak_pb2'", ",", "r'riak.pb.riak_pb2'", ",", "contents", ")", "# Look for this pattern in the protoc-generated file:", "#", "# class RpbCounterGetResp(_message.Message):", "# __metaclass__ = _reflection.GeneratedProtocolMessageType", "#", "# and convert it to:", "#", "# @add_metaclass(_reflection.GeneratedProtocolMessageType)", "# class RpbCounterGetResp(_message.Message):", "contents", "=", "re", ".", "sub", "(", "r'class\\s+(\\S+)\\((\\S+)\\):\\s*\\n'", "'\\s+__metaclass__\\s+=\\s+(\\S+)\\s*\\n'", ",", "r'@add_metaclass(\\3)\\nclass \\1(\\2):\\n'", ",", "contents", ")", "with", "open", "(", "im", ",", "'w'", ",", "buffering", "=", "1", ")", "as", "pbfile", ":", "pbfile", ".", "write", "(", "contents", ")" ]
Change the PB files to use full pathnames for Python 3.x and modify the metaclasses to be version agnostic
[ "Change", "the", "PB", "files", "to", "use", "full", "pathnames", "for", "Python", "3", ".", "x", "and", "modify", "the", "metaclasses", "to", "be", "version", "agnostic" ]
91de13a16607cdf553d1a194e762734e3bec4231
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/commands.py#L372-L405
train
basho/riak-python-client
riak/datatypes/datatype.py
Datatype.reload
def reload(self, **params): """ Reloads the datatype from Riak. .. warning: This clears any local modifications you might have made. :param r: the read quorum :type r: integer, string, None :param pr: the primary read quorum :type pr: integer, string, None :param basic_quorum: whether to use the "basic quorum" policy for not-founds :type basic_quorum: bool :param notfound_ok: whether to treat not-found responses as successful :type notfound_ok: bool :param timeout: a timeout value in milliseconds :type timeout: int :param include_context: whether to return the opaque context as well as the value, which is useful for removal operations on sets and maps :type include_context: bool :rtype: :class:`Datatype` """ if not self.bucket: raise ValueError('bucket property not assigned') if not self.key: raise ValueError('key property not assigned') dtype, value, context = self.bucket._client._fetch_datatype( self.bucket, self.key, **params) if not dtype == self.type_name: raise TypeError("Expected datatype {} but " "got datatype {}".format(self.__class__, TYPES[dtype])) self.clear() self._context = context self._set_value(value) return self
python
def reload(self, **params): """ Reloads the datatype from Riak. .. warning: This clears any local modifications you might have made. :param r: the read quorum :type r: integer, string, None :param pr: the primary read quorum :type pr: integer, string, None :param basic_quorum: whether to use the "basic quorum" policy for not-founds :type basic_quorum: bool :param notfound_ok: whether to treat not-found responses as successful :type notfound_ok: bool :param timeout: a timeout value in milliseconds :type timeout: int :param include_context: whether to return the opaque context as well as the value, which is useful for removal operations on sets and maps :type include_context: bool :rtype: :class:`Datatype` """ if not self.bucket: raise ValueError('bucket property not assigned') if not self.key: raise ValueError('key property not assigned') dtype, value, context = self.bucket._client._fetch_datatype( self.bucket, self.key, **params) if not dtype == self.type_name: raise TypeError("Expected datatype {} but " "got datatype {}".format(self.__class__, TYPES[dtype])) self.clear() self._context = context self._set_value(value) return self
[ "def", "reload", "(", "self", ",", "*", "*", "params", ")", ":", "if", "not", "self", ".", "bucket", ":", "raise", "ValueError", "(", "'bucket property not assigned'", ")", "if", "not", "self", ".", "key", ":", "raise", "ValueError", "(", "'key property not assigned'", ")", "dtype", ",", "value", ",", "context", "=", "self", ".", "bucket", ".", "_client", ".", "_fetch_datatype", "(", "self", ".", "bucket", ",", "self", ".", "key", ",", "*", "*", "params", ")", "if", "not", "dtype", "==", "self", ".", "type_name", ":", "raise", "TypeError", "(", "\"Expected datatype {} but \"", "\"got datatype {}\"", ".", "format", "(", "self", ".", "__class__", ",", "TYPES", "[", "dtype", "]", ")", ")", "self", ".", "clear", "(", ")", "self", ".", "_context", "=", "context", "self", ".", "_set_value", "(", "value", ")", "return", "self" ]
Reloads the datatype from Riak. .. warning: This clears any local modifications you might have made. :param r: the read quorum :type r: integer, string, None :param pr: the primary read quorum :type pr: integer, string, None :param basic_quorum: whether to use the "basic quorum" policy for not-founds :type basic_quorum: bool :param notfound_ok: whether to treat not-found responses as successful :type notfound_ok: bool :param timeout: a timeout value in milliseconds :type timeout: int :param include_context: whether to return the opaque context as well as the value, which is useful for removal operations on sets and maps :type include_context: bool :rtype: :class:`Datatype`
[ "Reloads", "the", "datatype", "from", "Riak", "." ]
91de13a16607cdf553d1a194e762734e3bec4231
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/datatypes/datatype.py#L79-L120
train
basho/riak-python-client
riak/datatypes/datatype.py
Datatype.update
def update(self, **params): """ Sends locally staged mutations to Riak. :param w: W-value, wait for this many partitions to respond before returning to client. :type w: integer :param dw: DW-value, wait for this many partitions to confirm the write before returning to client. :type dw: integer :param pw: PW-value, require this many primary partitions to be available before performing the put :type pw: integer :param return_body: if the newly stored object should be retrieved, defaults to True :type return_body: bool :param include_context: whether to return the new opaque context when `return_body` is `True` :type include_context: bool :param timeout: a timeout value in milliseconds :type timeout: int :rtype: a subclass of :class:`~riak.datatypes.Datatype` """ if not self.modified: raise ValueError("No operation to perform") params.setdefault('return_body', True) self.bucket._client.update_datatype(self, **params) self.clear() return self
python
def update(self, **params): """ Sends locally staged mutations to Riak. :param w: W-value, wait for this many partitions to respond before returning to client. :type w: integer :param dw: DW-value, wait for this many partitions to confirm the write before returning to client. :type dw: integer :param pw: PW-value, require this many primary partitions to be available before performing the put :type pw: integer :param return_body: if the newly stored object should be retrieved, defaults to True :type return_body: bool :param include_context: whether to return the new opaque context when `return_body` is `True` :type include_context: bool :param timeout: a timeout value in milliseconds :type timeout: int :rtype: a subclass of :class:`~riak.datatypes.Datatype` """ if not self.modified: raise ValueError("No operation to perform") params.setdefault('return_body', True) self.bucket._client.update_datatype(self, **params) self.clear() return self
[ "def", "update", "(", "self", ",", "*", "*", "params", ")", ":", "if", "not", "self", ".", "modified", ":", "raise", "ValueError", "(", "\"No operation to perform\"", ")", "params", ".", "setdefault", "(", "'return_body'", ",", "True", ")", "self", ".", "bucket", ".", "_client", ".", "update_datatype", "(", "self", ",", "*", "*", "params", ")", "self", ".", "clear", "(", ")", "return", "self" ]
Sends locally staged mutations to Riak. :param w: W-value, wait for this many partitions to respond before returning to client. :type w: integer :param dw: DW-value, wait for this many partitions to confirm the write before returning to client. :type dw: integer :param pw: PW-value, require this many primary partitions to be available before performing the put :type pw: integer :param return_body: if the newly stored object should be retrieved, defaults to True :type return_body: bool :param include_context: whether to return the new opaque context when `return_body` is `True` :type include_context: bool :param timeout: a timeout value in milliseconds :type timeout: int :rtype: a subclass of :class:`~riak.datatypes.Datatype`
[ "Sends", "locally", "staged", "mutations", "to", "Riak", "." ]
91de13a16607cdf553d1a194e762734e3bec4231
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/datatypes/datatype.py#L133-L163
train
basho/riak-python-client
riak/codecs/pbuf.py
PbufCodec.encode_quorum
def encode_quorum(self, rw): """ Converts a symbolic quorum value into its on-the-wire equivalent. :param rw: the quorum :type rw: string, integer :rtype: integer """ if rw in QUORUM_TO_PB: return QUORUM_TO_PB[rw] elif type(rw) is int and rw >= 0: return rw else: return None
python
def encode_quorum(self, rw): """ Converts a symbolic quorum value into its on-the-wire equivalent. :param rw: the quorum :type rw: string, integer :rtype: integer """ if rw in QUORUM_TO_PB: return QUORUM_TO_PB[rw] elif type(rw) is int and rw >= 0: return rw else: return None
[ "def", "encode_quorum", "(", "self", ",", "rw", ")", ":", "if", "rw", "in", "QUORUM_TO_PB", ":", "return", "QUORUM_TO_PB", "[", "rw", "]", "elif", "type", "(", "rw", ")", "is", "int", "and", "rw", ">=", "0", ":", "return", "rw", "else", ":", "return", "None" ]
Converts a symbolic quorum value into its on-the-wire equivalent. :param rw: the quorum :type rw: string, integer :rtype: integer
[ "Converts", "a", "symbolic", "quorum", "value", "into", "its", "on", "-", "the", "-", "wire", "equivalent", "." ]
91de13a16607cdf553d1a194e762734e3bec4231
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/codecs/pbuf.py#L124-L138
train
basho/riak-python-client
riak/codecs/pbuf.py
PbufCodec.decode_contents
def decode_contents(self, contents, obj): """ Decodes the list of siblings from the protobuf representation into the object. :param contents: a list of RpbContent messages :type contents: list :param obj: a RiakObject :type obj: RiakObject :rtype RiakObject """ obj.siblings = [self.decode_content(c, RiakContent(obj)) for c in contents] # Invoke sibling-resolution logic if len(obj.siblings) > 1 and obj.resolver is not None: obj.resolver(obj) return obj
python
def decode_contents(self, contents, obj): """ Decodes the list of siblings from the protobuf representation into the object. :param contents: a list of RpbContent messages :type contents: list :param obj: a RiakObject :type obj: RiakObject :rtype RiakObject """ obj.siblings = [self.decode_content(c, RiakContent(obj)) for c in contents] # Invoke sibling-resolution logic if len(obj.siblings) > 1 and obj.resolver is not None: obj.resolver(obj) return obj
[ "def", "decode_contents", "(", "self", ",", "contents", ",", "obj", ")", ":", "obj", ".", "siblings", "=", "[", "self", ".", "decode_content", "(", "c", ",", "RiakContent", "(", "obj", ")", ")", "for", "c", "in", "contents", "]", "# Invoke sibling-resolution logic", "if", "len", "(", "obj", ".", "siblings", ")", ">", "1", "and", "obj", ".", "resolver", "is", "not", "None", ":", "obj", ".", "resolver", "(", "obj", ")", "return", "obj" ]
Decodes the list of siblings from the protobuf representation into the object. :param contents: a list of RpbContent messages :type contents: list :param obj: a RiakObject :type obj: RiakObject :rtype RiakObject
[ "Decodes", "the", "list", "of", "siblings", "from", "the", "protobuf", "representation", "into", "the", "object", "." ]
91de13a16607cdf553d1a194e762734e3bec4231
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/codecs/pbuf.py#L154-L170
train
basho/riak-python-client
riak/codecs/pbuf.py
PbufCodec.decode_content
def decode_content(self, rpb_content, sibling): """ Decodes a single sibling from the protobuf representation into a RiakObject. :param rpb_content: a single RpbContent message :type rpb_content: riak.pb.riak_pb2.RpbContent :param sibling: a RiakContent sibling container :type sibling: RiakContent :rtype: RiakContent """ if rpb_content.HasField("deleted") and rpb_content.deleted: sibling.exists = False else: sibling.exists = True if rpb_content.HasField("content_type"): sibling.content_type = bytes_to_str(rpb_content.content_type) if rpb_content.HasField("charset"): sibling.charset = bytes_to_str(rpb_content.charset) if rpb_content.HasField("content_encoding"): sibling.content_encoding = \ bytes_to_str(rpb_content.content_encoding) if rpb_content.HasField("vtag"): sibling.etag = bytes_to_str(rpb_content.vtag) sibling.links = [self.decode_link(link) for link in rpb_content.links] if rpb_content.HasField("last_mod"): sibling.last_modified = float(rpb_content.last_mod) if rpb_content.HasField("last_mod_usecs"): sibling.last_modified += rpb_content.last_mod_usecs / 1000000.0 sibling.usermeta = dict([(bytes_to_str(usermd.key), bytes_to_str(usermd.value)) for usermd in rpb_content.usermeta]) sibling.indexes = set([(bytes_to_str(index.key), decode_index_value(index.key, index.value)) for index in rpb_content.indexes]) sibling.encoded_data = rpb_content.value return sibling
python
def decode_content(self, rpb_content, sibling): """ Decodes a single sibling from the protobuf representation into a RiakObject. :param rpb_content: a single RpbContent message :type rpb_content: riak.pb.riak_pb2.RpbContent :param sibling: a RiakContent sibling container :type sibling: RiakContent :rtype: RiakContent """ if rpb_content.HasField("deleted") and rpb_content.deleted: sibling.exists = False else: sibling.exists = True if rpb_content.HasField("content_type"): sibling.content_type = bytes_to_str(rpb_content.content_type) if rpb_content.HasField("charset"): sibling.charset = bytes_to_str(rpb_content.charset) if rpb_content.HasField("content_encoding"): sibling.content_encoding = \ bytes_to_str(rpb_content.content_encoding) if rpb_content.HasField("vtag"): sibling.etag = bytes_to_str(rpb_content.vtag) sibling.links = [self.decode_link(link) for link in rpb_content.links] if rpb_content.HasField("last_mod"): sibling.last_modified = float(rpb_content.last_mod) if rpb_content.HasField("last_mod_usecs"): sibling.last_modified += rpb_content.last_mod_usecs / 1000000.0 sibling.usermeta = dict([(bytes_to_str(usermd.key), bytes_to_str(usermd.value)) for usermd in rpb_content.usermeta]) sibling.indexes = set([(bytes_to_str(index.key), decode_index_value(index.key, index.value)) for index in rpb_content.indexes]) sibling.encoded_data = rpb_content.value return sibling
[ "def", "decode_content", "(", "self", ",", "rpb_content", ",", "sibling", ")", ":", "if", "rpb_content", ".", "HasField", "(", "\"deleted\"", ")", "and", "rpb_content", ".", "deleted", ":", "sibling", ".", "exists", "=", "False", "else", ":", "sibling", ".", "exists", "=", "True", "if", "rpb_content", ".", "HasField", "(", "\"content_type\"", ")", ":", "sibling", ".", "content_type", "=", "bytes_to_str", "(", "rpb_content", ".", "content_type", ")", "if", "rpb_content", ".", "HasField", "(", "\"charset\"", ")", ":", "sibling", ".", "charset", "=", "bytes_to_str", "(", "rpb_content", ".", "charset", ")", "if", "rpb_content", ".", "HasField", "(", "\"content_encoding\"", ")", ":", "sibling", ".", "content_encoding", "=", "bytes_to_str", "(", "rpb_content", ".", "content_encoding", ")", "if", "rpb_content", ".", "HasField", "(", "\"vtag\"", ")", ":", "sibling", ".", "etag", "=", "bytes_to_str", "(", "rpb_content", ".", "vtag", ")", "sibling", ".", "links", "=", "[", "self", ".", "decode_link", "(", "link", ")", "for", "link", "in", "rpb_content", ".", "links", "]", "if", "rpb_content", ".", "HasField", "(", "\"last_mod\"", ")", ":", "sibling", ".", "last_modified", "=", "float", "(", "rpb_content", ".", "last_mod", ")", "if", "rpb_content", ".", "HasField", "(", "\"last_mod_usecs\"", ")", ":", "sibling", ".", "last_modified", "+=", "rpb_content", ".", "last_mod_usecs", "/", "1000000.0", "sibling", ".", "usermeta", "=", "dict", "(", "[", "(", "bytes_to_str", "(", "usermd", ".", "key", ")", ",", "bytes_to_str", "(", "usermd", ".", "value", ")", ")", "for", "usermd", "in", "rpb_content", ".", "usermeta", "]", ")", "sibling", ".", "indexes", "=", "set", "(", "[", "(", "bytes_to_str", "(", "index", ".", "key", ")", ",", "decode_index_value", "(", "index", ".", "key", ",", "index", ".", "value", ")", ")", "for", "index", "in", "rpb_content", ".", "indexes", "]", ")", "sibling", ".", "encoded_data", "=", "rpb_content", ".", "value", "return", "sibling" ]
Decodes a single sibling from the protobuf representation into a RiakObject. :param rpb_content: a single RpbContent message :type rpb_content: riak.pb.riak_pb2.RpbContent :param sibling: a RiakContent sibling container :type sibling: RiakContent :rtype: RiakContent
[ "Decodes", "a", "single", "sibling", "from", "the", "protobuf", "representation", "into", "a", "RiakObject", "." ]
91de13a16607cdf553d1a194e762734e3bec4231
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/codecs/pbuf.py#L172-L213
train
basho/riak-python-client
riak/codecs/pbuf.py
PbufCodec.encode_content
def encode_content(self, robj, rpb_content): """ Fills an RpbContent message with the appropriate data and metadata from a RiakObject. :param robj: a RiakObject :type robj: RiakObject :param rpb_content: the protobuf message to fill :type rpb_content: riak.pb.riak_pb2.RpbContent """ if robj.content_type: rpb_content.content_type = str_to_bytes(robj.content_type) if robj.charset: rpb_content.charset = str_to_bytes(robj.charset) if robj.content_encoding: rpb_content.content_encoding = str_to_bytes(robj.content_encoding) for uk in robj.usermeta: pair = rpb_content.usermeta.add() pair.key = str_to_bytes(uk) pair.value = str_to_bytes(robj.usermeta[uk]) for link in robj.links: pb_link = rpb_content.links.add() try: bucket, key, tag = link except ValueError: raise RiakError("Invalid link tuple %s" % link) pb_link.bucket = str_to_bytes(bucket) pb_link.key = str_to_bytes(key) if tag: pb_link.tag = str_to_bytes(tag) else: pb_link.tag = str_to_bytes('') for field, value in robj.indexes: pair = rpb_content.indexes.add() pair.key = str_to_bytes(field) pair.value = str_to_bytes(str(value)) # Python 2.x data is stored in a string if six.PY2: rpb_content.value = str(robj.encoded_data) else: rpb_content.value = robj.encoded_data
python
def encode_content(self, robj, rpb_content): """ Fills an RpbContent message with the appropriate data and metadata from a RiakObject. :param robj: a RiakObject :type robj: RiakObject :param rpb_content: the protobuf message to fill :type rpb_content: riak.pb.riak_pb2.RpbContent """ if robj.content_type: rpb_content.content_type = str_to_bytes(robj.content_type) if robj.charset: rpb_content.charset = str_to_bytes(robj.charset) if robj.content_encoding: rpb_content.content_encoding = str_to_bytes(robj.content_encoding) for uk in robj.usermeta: pair = rpb_content.usermeta.add() pair.key = str_to_bytes(uk) pair.value = str_to_bytes(robj.usermeta[uk]) for link in robj.links: pb_link = rpb_content.links.add() try: bucket, key, tag = link except ValueError: raise RiakError("Invalid link tuple %s" % link) pb_link.bucket = str_to_bytes(bucket) pb_link.key = str_to_bytes(key) if tag: pb_link.tag = str_to_bytes(tag) else: pb_link.tag = str_to_bytes('') for field, value in robj.indexes: pair = rpb_content.indexes.add() pair.key = str_to_bytes(field) pair.value = str_to_bytes(str(value)) # Python 2.x data is stored in a string if six.PY2: rpb_content.value = str(robj.encoded_data) else: rpb_content.value = robj.encoded_data
[ "def", "encode_content", "(", "self", ",", "robj", ",", "rpb_content", ")", ":", "if", "robj", ".", "content_type", ":", "rpb_content", ".", "content_type", "=", "str_to_bytes", "(", "robj", ".", "content_type", ")", "if", "robj", ".", "charset", ":", "rpb_content", ".", "charset", "=", "str_to_bytes", "(", "robj", ".", "charset", ")", "if", "robj", ".", "content_encoding", ":", "rpb_content", ".", "content_encoding", "=", "str_to_bytes", "(", "robj", ".", "content_encoding", ")", "for", "uk", "in", "robj", ".", "usermeta", ":", "pair", "=", "rpb_content", ".", "usermeta", ".", "add", "(", ")", "pair", ".", "key", "=", "str_to_bytes", "(", "uk", ")", "pair", ".", "value", "=", "str_to_bytes", "(", "robj", ".", "usermeta", "[", "uk", "]", ")", "for", "link", "in", "robj", ".", "links", ":", "pb_link", "=", "rpb_content", ".", "links", ".", "add", "(", ")", "try", ":", "bucket", ",", "key", ",", "tag", "=", "link", "except", "ValueError", ":", "raise", "RiakError", "(", "\"Invalid link tuple %s\"", "%", "link", ")", "pb_link", ".", "bucket", "=", "str_to_bytes", "(", "bucket", ")", "pb_link", ".", "key", "=", "str_to_bytes", "(", "key", ")", "if", "tag", ":", "pb_link", ".", "tag", "=", "str_to_bytes", "(", "tag", ")", "else", ":", "pb_link", ".", "tag", "=", "str_to_bytes", "(", "''", ")", "for", "field", ",", "value", "in", "robj", ".", "indexes", ":", "pair", "=", "rpb_content", ".", "indexes", ".", "add", "(", ")", "pair", ".", "key", "=", "str_to_bytes", "(", "field", ")", "pair", ".", "value", "=", "str_to_bytes", "(", "str", "(", "value", ")", ")", "# Python 2.x data is stored in a string", "if", "six", ".", "PY2", ":", "rpb_content", ".", "value", "=", "str", "(", "robj", ".", "encoded_data", ")", "else", ":", "rpb_content", ".", "value", "=", "robj", ".", "encoded_data" ]
Fills an RpbContent message with the appropriate data and metadata from a RiakObject. :param robj: a RiakObject :type robj: RiakObject :param rpb_content: the protobuf message to fill :type rpb_content: riak.pb.riak_pb2.RpbContent
[ "Fills", "an", "RpbContent", "message", "with", "the", "appropriate", "data", "and", "metadata", "from", "a", "RiakObject", "." ]
91de13a16607cdf553d1a194e762734e3bec4231
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/codecs/pbuf.py#L215-L258
train
basho/riak-python-client
riak/codecs/pbuf.py
PbufCodec.decode_link
def decode_link(self, link): """ Decodes an RpbLink message into a tuple :param link: an RpbLink message :type link: riak.pb.riak_pb2.RpbLink :rtype tuple """ if link.HasField("bucket"): bucket = bytes_to_str(link.bucket) else: bucket = None if link.HasField("key"): key = bytes_to_str(link.key) else: key = None if link.HasField("tag"): tag = bytes_to_str(link.tag) else: tag = None return (bucket, key, tag)
python
def decode_link(self, link): """ Decodes an RpbLink message into a tuple :param link: an RpbLink message :type link: riak.pb.riak_pb2.RpbLink :rtype tuple """ if link.HasField("bucket"): bucket = bytes_to_str(link.bucket) else: bucket = None if link.HasField("key"): key = bytes_to_str(link.key) else: key = None if link.HasField("tag"): tag = bytes_to_str(link.tag) else: tag = None return (bucket, key, tag)
[ "def", "decode_link", "(", "self", ",", "link", ")", ":", "if", "link", ".", "HasField", "(", "\"bucket\"", ")", ":", "bucket", "=", "bytes_to_str", "(", "link", ".", "bucket", ")", "else", ":", "bucket", "=", "None", "if", "link", ".", "HasField", "(", "\"key\"", ")", ":", "key", "=", "bytes_to_str", "(", "link", ".", "key", ")", "else", ":", "key", "=", "None", "if", "link", ".", "HasField", "(", "\"tag\"", ")", ":", "tag", "=", "bytes_to_str", "(", "link", ".", "tag", ")", "else", ":", "tag", "=", "None", "return", "(", "bucket", ",", "key", ",", "tag", ")" ]
Decodes an RpbLink message into a tuple :param link: an RpbLink message :type link: riak.pb.riak_pb2.RpbLink :rtype tuple
[ "Decodes", "an", "RpbLink", "message", "into", "a", "tuple" ]
91de13a16607cdf553d1a194e762734e3bec4231
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/codecs/pbuf.py#L260-L282
train
basho/riak-python-client
riak/codecs/pbuf.py
PbufCodec.encode_bucket_props
def encode_bucket_props(self, props, msg): """ Encodes a dict of bucket properties into the protobuf message. :param props: bucket properties :type props: dict :param msg: the protobuf message to fill :type msg: riak.pb.riak_pb2.RpbSetBucketReq """ for prop in NORMAL_PROPS: if prop in props and props[prop] is not None: if isinstance(props[prop], six.string_types): setattr(msg.props, prop, str_to_bytes(props[prop])) else: setattr(msg.props, prop, props[prop]) for prop in COMMIT_HOOK_PROPS: if prop in props: setattr(msg.props, 'has_' + prop, True) self.encode_hooklist(props[prop], getattr(msg.props, prop)) for prop in MODFUN_PROPS: if prop in props and props[prop] is not None: self.encode_modfun(props[prop], getattr(msg.props, prop)) for prop in QUORUM_PROPS: if prop in props and props[prop] not in (None, 'default'): value = self.encode_quorum(props[prop]) if value is not None: if isinstance(value, six.string_types): setattr(msg.props, prop, str_to_bytes(value)) else: setattr(msg.props, prop, value) if 'repl' in props: msg.props.repl = REPL_TO_PB[props['repl']] return msg
python
def encode_bucket_props(self, props, msg): """ Encodes a dict of bucket properties into the protobuf message. :param props: bucket properties :type props: dict :param msg: the protobuf message to fill :type msg: riak.pb.riak_pb2.RpbSetBucketReq """ for prop in NORMAL_PROPS: if prop in props and props[prop] is not None: if isinstance(props[prop], six.string_types): setattr(msg.props, prop, str_to_bytes(props[prop])) else: setattr(msg.props, prop, props[prop]) for prop in COMMIT_HOOK_PROPS: if prop in props: setattr(msg.props, 'has_' + prop, True) self.encode_hooklist(props[prop], getattr(msg.props, prop)) for prop in MODFUN_PROPS: if prop in props and props[prop] is not None: self.encode_modfun(props[prop], getattr(msg.props, prop)) for prop in QUORUM_PROPS: if prop in props and props[prop] not in (None, 'default'): value = self.encode_quorum(props[prop]) if value is not None: if isinstance(value, six.string_types): setattr(msg.props, prop, str_to_bytes(value)) else: setattr(msg.props, prop, value) if 'repl' in props: msg.props.repl = REPL_TO_PB[props['repl']] return msg
[ "def", "encode_bucket_props", "(", "self", ",", "props", ",", "msg", ")", ":", "for", "prop", "in", "NORMAL_PROPS", ":", "if", "prop", "in", "props", "and", "props", "[", "prop", "]", "is", "not", "None", ":", "if", "isinstance", "(", "props", "[", "prop", "]", ",", "six", ".", "string_types", ")", ":", "setattr", "(", "msg", ".", "props", ",", "prop", ",", "str_to_bytes", "(", "props", "[", "prop", "]", ")", ")", "else", ":", "setattr", "(", "msg", ".", "props", ",", "prop", ",", "props", "[", "prop", "]", ")", "for", "prop", "in", "COMMIT_HOOK_PROPS", ":", "if", "prop", "in", "props", ":", "setattr", "(", "msg", ".", "props", ",", "'has_'", "+", "prop", ",", "True", ")", "self", ".", "encode_hooklist", "(", "props", "[", "prop", "]", ",", "getattr", "(", "msg", ".", "props", ",", "prop", ")", ")", "for", "prop", "in", "MODFUN_PROPS", ":", "if", "prop", "in", "props", "and", "props", "[", "prop", "]", "is", "not", "None", ":", "self", ".", "encode_modfun", "(", "props", "[", "prop", "]", ",", "getattr", "(", "msg", ".", "props", ",", "prop", ")", ")", "for", "prop", "in", "QUORUM_PROPS", ":", "if", "prop", "in", "props", "and", "props", "[", "prop", "]", "not", "in", "(", "None", ",", "'default'", ")", ":", "value", "=", "self", ".", "encode_quorum", "(", "props", "[", "prop", "]", ")", "if", "value", "is", "not", "None", ":", "if", "isinstance", "(", "value", ",", "six", ".", "string_types", ")", ":", "setattr", "(", "msg", ".", "props", ",", "prop", ",", "str_to_bytes", "(", "value", ")", ")", "else", ":", "setattr", "(", "msg", ".", "props", ",", "prop", ",", "value", ")", "if", "'repl'", "in", "props", ":", "msg", ".", "props", ".", "repl", "=", "REPL_TO_PB", "[", "props", "[", "'repl'", "]", "]", "return", "msg" ]
Encodes a dict of bucket properties into the protobuf message. :param props: bucket properties :type props: dict :param msg: the protobuf message to fill :type msg: riak.pb.riak_pb2.RpbSetBucketReq
[ "Encodes", "a", "dict", "of", "bucket", "properties", "into", "the", "protobuf", "message", "." ]
91de13a16607cdf553d1a194e762734e3bec4231
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/codecs/pbuf.py#L298-L331
train
basho/riak-python-client
riak/codecs/pbuf.py
PbufCodec.decode_bucket_props
def decode_bucket_props(self, msg): """ Decodes the protobuf bucket properties message into a dict. :param msg: the protobuf message to decode :type msg: riak.pb.riak_pb2.RpbBucketProps :rtype dict """ props = {} for prop in NORMAL_PROPS: if msg.HasField(prop): props[prop] = getattr(msg, prop) if isinstance(props[prop], bytes): props[prop] = bytes_to_str(props[prop]) for prop in COMMIT_HOOK_PROPS: if getattr(msg, 'has_' + prop): props[prop] = self.decode_hooklist(getattr(msg, prop)) for prop in MODFUN_PROPS: if msg.HasField(prop): props[prop] = self.decode_modfun(getattr(msg, prop)) for prop in QUORUM_PROPS: if msg.HasField(prop): props[prop] = self.decode_quorum(getattr(msg, prop)) if msg.HasField('repl'): props['repl'] = REPL_TO_PY[msg.repl] return props
python
def decode_bucket_props(self, msg): """ Decodes the protobuf bucket properties message into a dict. :param msg: the protobuf message to decode :type msg: riak.pb.riak_pb2.RpbBucketProps :rtype dict """ props = {} for prop in NORMAL_PROPS: if msg.HasField(prop): props[prop] = getattr(msg, prop) if isinstance(props[prop], bytes): props[prop] = bytes_to_str(props[prop]) for prop in COMMIT_HOOK_PROPS: if getattr(msg, 'has_' + prop): props[prop] = self.decode_hooklist(getattr(msg, prop)) for prop in MODFUN_PROPS: if msg.HasField(prop): props[prop] = self.decode_modfun(getattr(msg, prop)) for prop in QUORUM_PROPS: if msg.HasField(prop): props[prop] = self.decode_quorum(getattr(msg, prop)) if msg.HasField('repl'): props['repl'] = REPL_TO_PY[msg.repl] return props
[ "def", "decode_bucket_props", "(", "self", ",", "msg", ")", ":", "props", "=", "{", "}", "for", "prop", "in", "NORMAL_PROPS", ":", "if", "msg", ".", "HasField", "(", "prop", ")", ":", "props", "[", "prop", "]", "=", "getattr", "(", "msg", ",", "prop", ")", "if", "isinstance", "(", "props", "[", "prop", "]", ",", "bytes", ")", ":", "props", "[", "prop", "]", "=", "bytes_to_str", "(", "props", "[", "prop", "]", ")", "for", "prop", "in", "COMMIT_HOOK_PROPS", ":", "if", "getattr", "(", "msg", ",", "'has_'", "+", "prop", ")", ":", "props", "[", "prop", "]", "=", "self", ".", "decode_hooklist", "(", "getattr", "(", "msg", ",", "prop", ")", ")", "for", "prop", "in", "MODFUN_PROPS", ":", "if", "msg", ".", "HasField", "(", "prop", ")", ":", "props", "[", "prop", "]", "=", "self", ".", "decode_modfun", "(", "getattr", "(", "msg", ",", "prop", ")", ")", "for", "prop", "in", "QUORUM_PROPS", ":", "if", "msg", ".", "HasField", "(", "prop", ")", ":", "props", "[", "prop", "]", "=", "self", ".", "decode_quorum", "(", "getattr", "(", "msg", ",", "prop", ")", ")", "if", "msg", ".", "HasField", "(", "'repl'", ")", ":", "props", "[", "'repl'", "]", "=", "REPL_TO_PY", "[", "msg", ".", "repl", "]", "return", "props" ]
Decodes the protobuf bucket properties message into a dict. :param msg: the protobuf message to decode :type msg: riak.pb.riak_pb2.RpbBucketProps :rtype dict
[ "Decodes", "the", "protobuf", "bucket", "properties", "message", "into", "a", "dict", "." ]
91de13a16607cdf553d1a194e762734e3bec4231
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/codecs/pbuf.py#L333-L358
train
basho/riak-python-client
riak/codecs/pbuf.py
PbufCodec.encode_modfun
def encode_modfun(self, props, msg=None): """ Encodes a dict with 'mod' and 'fun' keys into a protobuf modfun pair. Used in bucket properties. :param props: the module/function pair :type props: dict :param msg: the protobuf message to fill :type msg: riak.pb.riak_pb2.RpbModFun :rtype riak.pb.riak_pb2.RpbModFun """ if msg is None: msg = riak.pb.riak_pb2.RpbModFun() msg.module = str_to_bytes(props['mod']) msg.function = str_to_bytes(props['fun']) return msg
python
def encode_modfun(self, props, msg=None): """ Encodes a dict with 'mod' and 'fun' keys into a protobuf modfun pair. Used in bucket properties. :param props: the module/function pair :type props: dict :param msg: the protobuf message to fill :type msg: riak.pb.riak_pb2.RpbModFun :rtype riak.pb.riak_pb2.RpbModFun """ if msg is None: msg = riak.pb.riak_pb2.RpbModFun() msg.module = str_to_bytes(props['mod']) msg.function = str_to_bytes(props['fun']) return msg
[ "def", "encode_modfun", "(", "self", ",", "props", ",", "msg", "=", "None", ")", ":", "if", "msg", "is", "None", ":", "msg", "=", "riak", ".", "pb", ".", "riak_pb2", ".", "RpbModFun", "(", ")", "msg", ".", "module", "=", "str_to_bytes", "(", "props", "[", "'mod'", "]", ")", "msg", ".", "function", "=", "str_to_bytes", "(", "props", "[", "'fun'", "]", ")", "return", "msg" ]
Encodes a dict with 'mod' and 'fun' keys into a protobuf modfun pair. Used in bucket properties. :param props: the module/function pair :type props: dict :param msg: the protobuf message to fill :type msg: riak.pb.riak_pb2.RpbModFun :rtype riak.pb.riak_pb2.RpbModFun
[ "Encodes", "a", "dict", "with", "mod", "and", "fun", "keys", "into", "a", "protobuf", "modfun", "pair", ".", "Used", "in", "bucket", "properties", "." ]
91de13a16607cdf553d1a194e762734e3bec4231
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/codecs/pbuf.py#L372-L387
train
basho/riak-python-client
riak/codecs/pbuf.py
PbufCodec.encode_hooklist
def encode_hooklist(self, hooklist, msg): """ Encodes a list of commit hooks into their protobuf equivalent. Used in bucket properties. :param hooklist: a list of commit hooks :type hooklist: list :param msg: a protobuf field that is a list of commit hooks """ for hook in hooklist: pbhook = msg.add() self.encode_hook(hook, pbhook)
python
def encode_hooklist(self, hooklist, msg): """ Encodes a list of commit hooks into their protobuf equivalent. Used in bucket properties. :param hooklist: a list of commit hooks :type hooklist: list :param msg: a protobuf field that is a list of commit hooks """ for hook in hooklist: pbhook = msg.add() self.encode_hook(hook, pbhook)
[ "def", "encode_hooklist", "(", "self", ",", "hooklist", ",", "msg", ")", ":", "for", "hook", "in", "hooklist", ":", "pbhook", "=", "msg", ".", "add", "(", ")", "self", ".", "encode_hook", "(", "hook", ",", "pbhook", ")" ]
Encodes a list of commit hooks into their protobuf equivalent. Used in bucket properties. :param hooklist: a list of commit hooks :type hooklist: list :param msg: a protobuf field that is a list of commit hooks
[ "Encodes", "a", "list", "of", "commit", "hooks", "into", "their", "protobuf", "equivalent", ".", "Used", "in", "bucket", "properties", "." ]
91de13a16607cdf553d1a194e762734e3bec4231
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/codecs/pbuf.py#L400-L411
train
basho/riak-python-client
riak/codecs/pbuf.py
PbufCodec.decode_hook
def decode_hook(self, hook): """ Decodes a protobuf commit hook message into a dict. Used in bucket properties. :param hook: the hook to decode :type hook: riak.pb.riak_pb2.RpbCommitHook :rtype dict """ if hook.HasField('modfun'): return self.decode_modfun(hook.modfun) else: return {'name': bytes_to_str(hook.name)}
python
def decode_hook(self, hook): """ Decodes a protobuf commit hook message into a dict. Used in bucket properties. :param hook: the hook to decode :type hook: riak.pb.riak_pb2.RpbCommitHook :rtype dict """ if hook.HasField('modfun'): return self.decode_modfun(hook.modfun) else: return {'name': bytes_to_str(hook.name)}
[ "def", "decode_hook", "(", "self", ",", "hook", ")", ":", "if", "hook", ".", "HasField", "(", "'modfun'", ")", ":", "return", "self", ".", "decode_modfun", "(", "hook", ".", "modfun", ")", "else", ":", "return", "{", "'name'", ":", "bytes_to_str", "(", "hook", ".", "name", ")", "}" ]
Decodes a protobuf commit hook message into a dict. Used in bucket properties. :param hook: the hook to decode :type hook: riak.pb.riak_pb2.RpbCommitHook :rtype dict
[ "Decodes", "a", "protobuf", "commit", "hook", "message", "into", "a", "dict", ".", "Used", "in", "bucket", "properties", "." ]
91de13a16607cdf553d1a194e762734e3bec4231
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/codecs/pbuf.py#L413-L425
train
basho/riak-python-client
riak/codecs/pbuf.py
PbufCodec.encode_hook
def encode_hook(self, hook, msg): """ Encodes a commit hook dict into the protobuf message. Used in bucket properties. :param hook: the hook to encode :type hook: dict :param msg: the protobuf message to fill :type msg: riak.pb.riak_pb2.RpbCommitHook :rtype riak.pb.riak_pb2.RpbCommitHook """ if 'name' in hook: msg.name = str_to_bytes(hook['name']) else: self.encode_modfun(hook, msg.modfun) return msg
python
def encode_hook(self, hook, msg): """ Encodes a commit hook dict into the protobuf message. Used in bucket properties. :param hook: the hook to encode :type hook: dict :param msg: the protobuf message to fill :type msg: riak.pb.riak_pb2.RpbCommitHook :rtype riak.pb.riak_pb2.RpbCommitHook """ if 'name' in hook: msg.name = str_to_bytes(hook['name']) else: self.encode_modfun(hook, msg.modfun) return msg
[ "def", "encode_hook", "(", "self", ",", "hook", ",", "msg", ")", ":", "if", "'name'", "in", "hook", ":", "msg", ".", "name", "=", "str_to_bytes", "(", "hook", "[", "'name'", "]", ")", "else", ":", "self", ".", "encode_modfun", "(", "hook", ",", "msg", ".", "modfun", ")", "return", "msg" ]
Encodes a commit hook dict into the protobuf message. Used in bucket properties. :param hook: the hook to encode :type hook: dict :param msg: the protobuf message to fill :type msg: riak.pb.riak_pb2.RpbCommitHook :rtype riak.pb.riak_pb2.RpbCommitHook
[ "Encodes", "a", "commit", "hook", "dict", "into", "the", "protobuf", "message", ".", "Used", "in", "bucket", "properties", "." ]
91de13a16607cdf553d1a194e762734e3bec4231
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/codecs/pbuf.py#L427-L442
train
basho/riak-python-client
riak/codecs/pbuf.py
PbufCodec.encode_index_req
def encode_index_req(self, bucket, index, startkey, endkey=None, return_terms=None, max_results=None, continuation=None, timeout=None, term_regex=None, streaming=False): """ Encodes a secondary index request into the protobuf message. :param bucket: the bucket whose index to query :type bucket: string :param index: the index to query :type index: string :param startkey: the value or beginning of the range :type startkey: integer, string :param endkey: the end of the range :type endkey: integer, string :param return_terms: whether to return the index term with the key :type return_terms: bool :param max_results: the maximum number of results to return (page size) :type max_results: integer :param continuation: the opaque continuation returned from a previous paginated request :type continuation: string :param timeout: a timeout value in milliseconds, or 'infinity' :type timeout: int :param term_regex: a regular expression used to filter index terms :type term_regex: string :param streaming: encode as streaming request :type streaming: bool :rtype riak.pb.riak_kv_pb2.RpbIndexReq """ req = riak.pb.riak_kv_pb2.RpbIndexReq( bucket=str_to_bytes(bucket.name), index=str_to_bytes(index)) self._add_bucket_type(req, bucket.bucket_type) if endkey is not None: req.qtype = riak.pb.riak_kv_pb2.RpbIndexReq.range req.range_min = str_to_bytes(str(startkey)) req.range_max = str_to_bytes(str(endkey)) else: req.qtype = riak.pb.riak_kv_pb2.RpbIndexReq.eq req.key = str_to_bytes(str(startkey)) if return_terms is not None: req.return_terms = return_terms if max_results: req.max_results = max_results if continuation: req.continuation = str_to_bytes(continuation) if timeout: if timeout == 'infinity': req.timeout = 0 else: req.timeout = timeout if term_regex: req.term_regex = str_to_bytes(term_regex) req.stream = streaming mc = riak.pb.messages.MSG_CODE_INDEX_REQ rc = riak.pb.messages.MSG_CODE_INDEX_RESP return Msg(mc, req.SerializeToString(), rc)
python
def encode_index_req(self, bucket, index, startkey, endkey=None, return_terms=None, max_results=None, continuation=None, timeout=None, term_regex=None, streaming=False): """ Encodes a secondary index request into the protobuf message. :param bucket: the bucket whose index to query :type bucket: string :param index: the index to query :type index: string :param startkey: the value or beginning of the range :type startkey: integer, string :param endkey: the end of the range :type endkey: integer, string :param return_terms: whether to return the index term with the key :type return_terms: bool :param max_results: the maximum number of results to return (page size) :type max_results: integer :param continuation: the opaque continuation returned from a previous paginated request :type continuation: string :param timeout: a timeout value in milliseconds, or 'infinity' :type timeout: int :param term_regex: a regular expression used to filter index terms :type term_regex: string :param streaming: encode as streaming request :type streaming: bool :rtype riak.pb.riak_kv_pb2.RpbIndexReq """ req = riak.pb.riak_kv_pb2.RpbIndexReq( bucket=str_to_bytes(bucket.name), index=str_to_bytes(index)) self._add_bucket_type(req, bucket.bucket_type) if endkey is not None: req.qtype = riak.pb.riak_kv_pb2.RpbIndexReq.range req.range_min = str_to_bytes(str(startkey)) req.range_max = str_to_bytes(str(endkey)) else: req.qtype = riak.pb.riak_kv_pb2.RpbIndexReq.eq req.key = str_to_bytes(str(startkey)) if return_terms is not None: req.return_terms = return_terms if max_results: req.max_results = max_results if continuation: req.continuation = str_to_bytes(continuation) if timeout: if timeout == 'infinity': req.timeout = 0 else: req.timeout = timeout if term_regex: req.term_regex = str_to_bytes(term_regex) req.stream = streaming mc = riak.pb.messages.MSG_CODE_INDEX_REQ rc = riak.pb.messages.MSG_CODE_INDEX_RESP return Msg(mc, req.SerializeToString(), rc)
[ "def", "encode_index_req", "(", "self", ",", "bucket", ",", "index", ",", "startkey", ",", "endkey", "=", "None", ",", "return_terms", "=", "None", ",", "max_results", "=", "None", ",", "continuation", "=", "None", ",", "timeout", "=", "None", ",", "term_regex", "=", "None", ",", "streaming", "=", "False", ")", ":", "req", "=", "riak", ".", "pb", ".", "riak_kv_pb2", ".", "RpbIndexReq", "(", "bucket", "=", "str_to_bytes", "(", "bucket", ".", "name", ")", ",", "index", "=", "str_to_bytes", "(", "index", ")", ")", "self", ".", "_add_bucket_type", "(", "req", ",", "bucket", ".", "bucket_type", ")", "if", "endkey", "is", "not", "None", ":", "req", ".", "qtype", "=", "riak", ".", "pb", ".", "riak_kv_pb2", ".", "RpbIndexReq", ".", "range", "req", ".", "range_min", "=", "str_to_bytes", "(", "str", "(", "startkey", ")", ")", "req", ".", "range_max", "=", "str_to_bytes", "(", "str", "(", "endkey", ")", ")", "else", ":", "req", ".", "qtype", "=", "riak", ".", "pb", ".", "riak_kv_pb2", ".", "RpbIndexReq", ".", "eq", "req", ".", "key", "=", "str_to_bytes", "(", "str", "(", "startkey", ")", ")", "if", "return_terms", "is", "not", "None", ":", "req", ".", "return_terms", "=", "return_terms", "if", "max_results", ":", "req", ".", "max_results", "=", "max_results", "if", "continuation", ":", "req", ".", "continuation", "=", "str_to_bytes", "(", "continuation", ")", "if", "timeout", ":", "if", "timeout", "==", "'infinity'", ":", "req", ".", "timeout", "=", "0", "else", ":", "req", ".", "timeout", "=", "timeout", "if", "term_regex", ":", "req", ".", "term_regex", "=", "str_to_bytes", "(", "term_regex", ")", "req", ".", "stream", "=", "streaming", "mc", "=", "riak", ".", "pb", ".", "messages", ".", "MSG_CODE_INDEX_REQ", "rc", "=", "riak", ".", "pb", ".", "messages", ".", "MSG_CODE_INDEX_RESP", "return", "Msg", "(", "mc", ",", "req", ".", "SerializeToString", "(", ")", ",", "rc", ")" ]
Encodes a secondary index request into the protobuf message. :param bucket: the bucket whose index to query :type bucket: string :param index: the index to query :type index: string :param startkey: the value or beginning of the range :type startkey: integer, string :param endkey: the end of the range :type endkey: integer, string :param return_terms: whether to return the index term with the key :type return_terms: bool :param max_results: the maximum number of results to return (page size) :type max_results: integer :param continuation: the opaque continuation returned from a previous paginated request :type continuation: string :param timeout: a timeout value in milliseconds, or 'infinity' :type timeout: int :param term_regex: a regular expression used to filter index terms :type term_regex: string :param streaming: encode as streaming request :type streaming: bool :rtype riak.pb.riak_kv_pb2.RpbIndexReq
[ "Encodes", "a", "secondary", "index", "request", "into", "the", "protobuf", "message", "." ]
91de13a16607cdf553d1a194e762734e3bec4231
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/codecs/pbuf.py#L444-L501
train
basho/riak-python-client
riak/codecs/pbuf.py
PbufCodec.decode_search_index
def decode_search_index(self, index): """ Fills an RpbYokozunaIndex message with the appropriate data. :param index: a yz index message :type index: riak.pb.riak_yokozuna_pb2.RpbYokozunaIndex :rtype dict """ result = {} result['name'] = bytes_to_str(index.name) if index.HasField('schema'): result['schema'] = bytes_to_str(index.schema) if index.HasField('n_val'): result['n_val'] = index.n_val return result
python
def decode_search_index(self, index): """ Fills an RpbYokozunaIndex message with the appropriate data. :param index: a yz index message :type index: riak.pb.riak_yokozuna_pb2.RpbYokozunaIndex :rtype dict """ result = {} result['name'] = bytes_to_str(index.name) if index.HasField('schema'): result['schema'] = bytes_to_str(index.schema) if index.HasField('n_val'): result['n_val'] = index.n_val return result
[ "def", "decode_search_index", "(", "self", ",", "index", ")", ":", "result", "=", "{", "}", "result", "[", "'name'", "]", "=", "bytes_to_str", "(", "index", ".", "name", ")", "if", "index", ".", "HasField", "(", "'schema'", ")", ":", "result", "[", "'schema'", "]", "=", "bytes_to_str", "(", "index", ".", "schema", ")", "if", "index", ".", "HasField", "(", "'n_val'", ")", ":", "result", "[", "'n_val'", "]", "=", "index", ".", "n_val", "return", "result" ]
Fills an RpbYokozunaIndex message with the appropriate data. :param index: a yz index message :type index: riak.pb.riak_yokozuna_pb2.RpbYokozunaIndex :rtype dict
[ "Fills", "an", "RpbYokozunaIndex", "message", "with", "the", "appropriate", "data", "." ]
91de13a16607cdf553d1a194e762734e3bec4231
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/codecs/pbuf.py#L519-L533
train
basho/riak-python-client
riak/codecs/pbuf.py
PbufCodec.encode_timeseries_put
def encode_timeseries_put(self, tsobj): """ Fills an TsPutReq message with the appropriate data and metadata from a TsObject. :param tsobj: a TsObject :type tsobj: TsObject :param req: the protobuf message to fill :type req: riak.pb.riak_ts_pb2.TsPutReq """ req = riak.pb.riak_ts_pb2.TsPutReq() req.table = str_to_bytes(tsobj.table.name) if tsobj.columns: raise NotImplementedError("columns are not implemented yet") if tsobj.rows and isinstance(tsobj.rows, list): for row in tsobj.rows: tsr = req.rows.add() # NB: type TsRow if not isinstance(row, list): raise ValueError("TsObject row must be a list of values") for cell in row: tsc = tsr.cells.add() # NB: type TsCell self.encode_to_ts_cell(cell, tsc) else: raise RiakError("TsObject requires a list of rows") mc = riak.pb.messages.MSG_CODE_TS_PUT_REQ rc = riak.pb.messages.MSG_CODE_TS_PUT_RESP return Msg(mc, req.SerializeToString(), rc)
python
def encode_timeseries_put(self, tsobj): """ Fills an TsPutReq message with the appropriate data and metadata from a TsObject. :param tsobj: a TsObject :type tsobj: TsObject :param req: the protobuf message to fill :type req: riak.pb.riak_ts_pb2.TsPutReq """ req = riak.pb.riak_ts_pb2.TsPutReq() req.table = str_to_bytes(tsobj.table.name) if tsobj.columns: raise NotImplementedError("columns are not implemented yet") if tsobj.rows and isinstance(tsobj.rows, list): for row in tsobj.rows: tsr = req.rows.add() # NB: type TsRow if not isinstance(row, list): raise ValueError("TsObject row must be a list of values") for cell in row: tsc = tsr.cells.add() # NB: type TsCell self.encode_to_ts_cell(cell, tsc) else: raise RiakError("TsObject requires a list of rows") mc = riak.pb.messages.MSG_CODE_TS_PUT_REQ rc = riak.pb.messages.MSG_CODE_TS_PUT_RESP return Msg(mc, req.SerializeToString(), rc)
[ "def", "encode_timeseries_put", "(", "self", ",", "tsobj", ")", ":", "req", "=", "riak", ".", "pb", ".", "riak_ts_pb2", ".", "TsPutReq", "(", ")", "req", ".", "table", "=", "str_to_bytes", "(", "tsobj", ".", "table", ".", "name", ")", "if", "tsobj", ".", "columns", ":", "raise", "NotImplementedError", "(", "\"columns are not implemented yet\"", ")", "if", "tsobj", ".", "rows", "and", "isinstance", "(", "tsobj", ".", "rows", ",", "list", ")", ":", "for", "row", "in", "tsobj", ".", "rows", ":", "tsr", "=", "req", ".", "rows", ".", "add", "(", ")", "# NB: type TsRow", "if", "not", "isinstance", "(", "row", ",", "list", ")", ":", "raise", "ValueError", "(", "\"TsObject row must be a list of values\"", ")", "for", "cell", "in", "row", ":", "tsc", "=", "tsr", ".", "cells", ".", "add", "(", ")", "# NB: type TsCell", "self", ".", "encode_to_ts_cell", "(", "cell", ",", "tsc", ")", "else", ":", "raise", "RiakError", "(", "\"TsObject requires a list of rows\"", ")", "mc", "=", "riak", ".", "pb", ".", "messages", ".", "MSG_CODE_TS_PUT_REQ", "rc", "=", "riak", ".", "pb", ".", "messages", ".", "MSG_CODE_TS_PUT_RESP", "return", "Msg", "(", "mc", ",", "req", ".", "SerializeToString", "(", ")", ",", "rc", ")" ]
Fills an TsPutReq message with the appropriate data and metadata from a TsObject. :param tsobj: a TsObject :type tsobj: TsObject :param req: the protobuf message to fill :type req: riak.pb.riak_ts_pb2.TsPutReq
[ "Fills", "an", "TsPutReq", "message", "with", "the", "appropriate", "data", "and", "metadata", "from", "a", "TsObject", "." ]
91de13a16607cdf553d1a194e762734e3bec4231
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/codecs/pbuf.py#L758-L787
train
basho/riak-python-client
riak/codecs/pbuf.py
PbufCodec.decode_timeseries_row
def decode_timeseries_row(self, tsrow, tscols=None, convert_timestamp=False): """ Decodes a TsRow into a list :param tsrow: the protobuf TsRow to decode. :type tsrow: riak.pb.riak_ts_pb2.TsRow :param tscols: the protobuf TsColumn data to help decode. :type tscols: list :rtype list """ row = [] for i, cell in enumerate(tsrow.cells): col = None if tscols is not None: col = tscols[i] if cell.HasField('varchar_value'): if col and not (col.type == TsColumnType.Value('VARCHAR') or col.type == TsColumnType.Value('BLOB')): raise TypeError('expected VARCHAR or BLOB column') else: row.append(cell.varchar_value) elif cell.HasField('sint64_value'): if col and col.type != TsColumnType.Value('SINT64'): raise TypeError('expected SINT64 column') else: row.append(cell.sint64_value) elif cell.HasField('double_value'): if col and col.type != TsColumnType.Value('DOUBLE'): raise TypeError('expected DOUBLE column') else: row.append(cell.double_value) elif cell.HasField('timestamp_value'): if col and col.type != TsColumnType.Value('TIMESTAMP'): raise TypeError('expected TIMESTAMP column') else: dt = cell.timestamp_value if convert_timestamp: dt = datetime_from_unix_time_millis( cell.timestamp_value) row.append(dt) elif cell.HasField('boolean_value'): if col and col.type != TsColumnType.Value('BOOLEAN'): raise TypeError('expected BOOLEAN column') else: row.append(cell.boolean_value) else: row.append(None) return row
python
def decode_timeseries_row(self, tsrow, tscols=None, convert_timestamp=False): """ Decodes a TsRow into a list :param tsrow: the protobuf TsRow to decode. :type tsrow: riak.pb.riak_ts_pb2.TsRow :param tscols: the protobuf TsColumn data to help decode. :type tscols: list :rtype list """ row = [] for i, cell in enumerate(tsrow.cells): col = None if tscols is not None: col = tscols[i] if cell.HasField('varchar_value'): if col and not (col.type == TsColumnType.Value('VARCHAR') or col.type == TsColumnType.Value('BLOB')): raise TypeError('expected VARCHAR or BLOB column') else: row.append(cell.varchar_value) elif cell.HasField('sint64_value'): if col and col.type != TsColumnType.Value('SINT64'): raise TypeError('expected SINT64 column') else: row.append(cell.sint64_value) elif cell.HasField('double_value'): if col and col.type != TsColumnType.Value('DOUBLE'): raise TypeError('expected DOUBLE column') else: row.append(cell.double_value) elif cell.HasField('timestamp_value'): if col and col.type != TsColumnType.Value('TIMESTAMP'): raise TypeError('expected TIMESTAMP column') else: dt = cell.timestamp_value if convert_timestamp: dt = datetime_from_unix_time_millis( cell.timestamp_value) row.append(dt) elif cell.HasField('boolean_value'): if col and col.type != TsColumnType.Value('BOOLEAN'): raise TypeError('expected BOOLEAN column') else: row.append(cell.boolean_value) else: row.append(None) return row
[ "def", "decode_timeseries_row", "(", "self", ",", "tsrow", ",", "tscols", "=", "None", ",", "convert_timestamp", "=", "False", ")", ":", "row", "=", "[", "]", "for", "i", ",", "cell", "in", "enumerate", "(", "tsrow", ".", "cells", ")", ":", "col", "=", "None", "if", "tscols", "is", "not", "None", ":", "col", "=", "tscols", "[", "i", "]", "if", "cell", ".", "HasField", "(", "'varchar_value'", ")", ":", "if", "col", "and", "not", "(", "col", ".", "type", "==", "TsColumnType", ".", "Value", "(", "'VARCHAR'", ")", "or", "col", ".", "type", "==", "TsColumnType", ".", "Value", "(", "'BLOB'", ")", ")", ":", "raise", "TypeError", "(", "'expected VARCHAR or BLOB column'", ")", "else", ":", "row", ".", "append", "(", "cell", ".", "varchar_value", ")", "elif", "cell", ".", "HasField", "(", "'sint64_value'", ")", ":", "if", "col", "and", "col", ".", "type", "!=", "TsColumnType", ".", "Value", "(", "'SINT64'", ")", ":", "raise", "TypeError", "(", "'expected SINT64 column'", ")", "else", ":", "row", ".", "append", "(", "cell", ".", "sint64_value", ")", "elif", "cell", ".", "HasField", "(", "'double_value'", ")", ":", "if", "col", "and", "col", ".", "type", "!=", "TsColumnType", ".", "Value", "(", "'DOUBLE'", ")", ":", "raise", "TypeError", "(", "'expected DOUBLE column'", ")", "else", ":", "row", ".", "append", "(", "cell", ".", "double_value", ")", "elif", "cell", ".", "HasField", "(", "'timestamp_value'", ")", ":", "if", "col", "and", "col", ".", "type", "!=", "TsColumnType", ".", "Value", "(", "'TIMESTAMP'", ")", ":", "raise", "TypeError", "(", "'expected TIMESTAMP column'", ")", "else", ":", "dt", "=", "cell", ".", "timestamp_value", "if", "convert_timestamp", ":", "dt", "=", "datetime_from_unix_time_millis", "(", "cell", ".", "timestamp_value", ")", "row", ".", "append", "(", "dt", ")", "elif", "cell", ".", "HasField", "(", "'boolean_value'", ")", ":", "if", "col", "and", "col", ".", "type", "!=", "TsColumnType", ".", "Value", "(", "'BOOLEAN'", ")", ":", "raise", "TypeError", "(", "'expected BOOLEAN column'", ")", "else", ":", "row", ".", "append", "(", "cell", ".", "boolean_value", ")", "else", ":", "row", ".", "append", "(", "None", ")", "return", "row" ]
Decodes a TsRow into a list :param tsrow: the protobuf TsRow to decode. :type tsrow: riak.pb.riak_ts_pb2.TsRow :param tscols: the protobuf TsColumn data to help decode. :type tscols: list :rtype list
[ "Decodes", "a", "TsRow", "into", "a", "list" ]
91de13a16607cdf553d1a194e762734e3bec4231
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/codecs/pbuf.py#L847-L895
train
basho/riak-python-client
riak/codecs/pbuf.py
PbufCodec.decode_preflist
def decode_preflist(self, item): """ Decodes a preflist response :param preflist: a bucket/key preflist :type preflist: list of riak.pb.riak_kv_pb2.RpbBucketKeyPreflistItem :rtype dict """ result = {'partition': item.partition, 'node': bytes_to_str(item.node), 'primary': item. primary} return result
python
def decode_preflist(self, item): """ Decodes a preflist response :param preflist: a bucket/key preflist :type preflist: list of riak.pb.riak_kv_pb2.RpbBucketKeyPreflistItem :rtype dict """ result = {'partition': item.partition, 'node': bytes_to_str(item.node), 'primary': item. primary} return result
[ "def", "decode_preflist", "(", "self", ",", "item", ")", ":", "result", "=", "{", "'partition'", ":", "item", ".", "partition", ",", "'node'", ":", "bytes_to_str", "(", "item", ".", "node", ")", ",", "'primary'", ":", "item", ".", "primary", "}", "return", "result" ]
Decodes a preflist response :param preflist: a bucket/key preflist :type preflist: list of riak.pb.riak_kv_pb2.RpbBucketKeyPreflistItem :rtype dict
[ "Decodes", "a", "preflist", "response" ]
91de13a16607cdf553d1a194e762734e3bec4231
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/codecs/pbuf.py#L897-L909
train
basho/riak-python-client
riak/transports/tcp/transport.py
TcpTransport.ping
def ping(self): """ Ping the remote server """ msg_code = riak.pb.messages.MSG_CODE_PING_REQ codec = self._get_codec(msg_code) msg = codec.encode_ping() resp_code, _ = self._request(msg, codec) if resp_code == riak.pb.messages.MSG_CODE_PING_RESP: return True else: return False
python
def ping(self): """ Ping the remote server """ msg_code = riak.pb.messages.MSG_CODE_PING_REQ codec = self._get_codec(msg_code) msg = codec.encode_ping() resp_code, _ = self._request(msg, codec) if resp_code == riak.pb.messages.MSG_CODE_PING_RESP: return True else: return False
[ "def", "ping", "(", "self", ")", ":", "msg_code", "=", "riak", ".", "pb", ".", "messages", ".", "MSG_CODE_PING_REQ", "codec", "=", "self", ".", "_get_codec", "(", "msg_code", ")", "msg", "=", "codec", ".", "encode_ping", "(", ")", "resp_code", ",", "_", "=", "self", ".", "_request", "(", "msg", ",", "codec", ")", "if", "resp_code", "==", "riak", ".", "pb", ".", "messages", ".", "MSG_CODE_PING_RESP", ":", "return", "True", "else", ":", "return", "False" ]
Ping the remote server
[ "Ping", "the", "remote", "server" ]
91de13a16607cdf553d1a194e762734e3bec4231
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/transports/tcp/transport.py#L107-L118
train
basho/riak-python-client
riak/transports/tcp/transport.py
TcpTransport.get_server_info
def get_server_info(self): """ Get information about the server """ # NB: can't do it this way due to recursion # codec = self._get_codec(ttb_supported=False) codec = PbufCodec() msg = Msg(riak.pb.messages.MSG_CODE_GET_SERVER_INFO_REQ, None, riak.pb.messages.MSG_CODE_GET_SERVER_INFO_RESP) resp_code, resp = self._request(msg, codec) return codec.decode_get_server_info(resp)
python
def get_server_info(self): """ Get information about the server """ # NB: can't do it this way due to recursion # codec = self._get_codec(ttb_supported=False) codec = PbufCodec() msg = Msg(riak.pb.messages.MSG_CODE_GET_SERVER_INFO_REQ, None, riak.pb.messages.MSG_CODE_GET_SERVER_INFO_RESP) resp_code, resp = self._request(msg, codec) return codec.decode_get_server_info(resp)
[ "def", "get_server_info", "(", "self", ")", ":", "# NB: can't do it this way due to recursion", "# codec = self._get_codec(ttb_supported=False)", "codec", "=", "PbufCodec", "(", ")", "msg", "=", "Msg", "(", "riak", ".", "pb", ".", "messages", ".", "MSG_CODE_GET_SERVER_INFO_REQ", ",", "None", ",", "riak", ".", "pb", ".", "messages", ".", "MSG_CODE_GET_SERVER_INFO_RESP", ")", "resp_code", ",", "resp", "=", "self", ".", "_request", "(", "msg", ",", "codec", ")", "return", "codec", ".", "decode_get_server_info", "(", "resp", ")" ]
Get information about the server
[ "Get", "information", "about", "the", "server" ]
91de13a16607cdf553d1a194e762734e3bec4231
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/transports/tcp/transport.py#L120-L130
train
basho/riak-python-client
riak/transports/tcp/transport.py
TcpTransport.get
def get(self, robj, r=None, pr=None, timeout=None, basic_quorum=None, notfound_ok=None, head_only=False): """ Serialize get request and deserialize response """ msg_code = riak.pb.messages.MSG_CODE_GET_REQ codec = self._get_codec(msg_code) msg = codec.encode_get(robj, r, pr, timeout, basic_quorum, notfound_ok, head_only) resp_code, resp = self._request(msg, codec) return codec.decode_get(robj, resp)
python
def get(self, robj, r=None, pr=None, timeout=None, basic_quorum=None, notfound_ok=None, head_only=False): """ Serialize get request and deserialize response """ msg_code = riak.pb.messages.MSG_CODE_GET_REQ codec = self._get_codec(msg_code) msg = codec.encode_get(robj, r, pr, timeout, basic_quorum, notfound_ok, head_only) resp_code, resp = self._request(msg, codec) return codec.decode_get(robj, resp)
[ "def", "get", "(", "self", ",", "robj", ",", "r", "=", "None", ",", "pr", "=", "None", ",", "timeout", "=", "None", ",", "basic_quorum", "=", "None", ",", "notfound_ok", "=", "None", ",", "head_only", "=", "False", ")", ":", "msg_code", "=", "riak", ".", "pb", ".", "messages", ".", "MSG_CODE_GET_REQ", "codec", "=", "self", ".", "_get_codec", "(", "msg_code", ")", "msg", "=", "codec", ".", "encode_get", "(", "robj", ",", "r", ",", "pr", ",", "timeout", ",", "basic_quorum", ",", "notfound_ok", ",", "head_only", ")", "resp_code", ",", "resp", "=", "self", ".", "_request", "(", "msg", ",", "codec", ")", "return", "codec", ".", "decode_get", "(", "robj", ",", "resp", ")" ]
Serialize get request and deserialize response
[ "Serialize", "get", "request", "and", "deserialize", "response" ]
91de13a16607cdf553d1a194e762734e3bec4231
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/transports/tcp/transport.py#L149-L160
train
basho/riak-python-client
riak/transports/tcp/transport.py
TcpTransport.ts_stream_keys
def ts_stream_keys(self, table, timeout=None): """ Streams keys from a timeseries table, returning an iterator that yields lists of keys. """ msg_code = riak.pb.messages.MSG_CODE_TS_LIST_KEYS_REQ codec = self._get_codec(msg_code) msg = codec.encode_timeseries_listkeysreq(table, timeout) self._send_msg(msg.msg_code, msg.data) return PbufTsKeyStream(self, codec, self._ts_convert_timestamp)
python
def ts_stream_keys(self, table, timeout=None): """ Streams keys from a timeseries table, returning an iterator that yields lists of keys. """ msg_code = riak.pb.messages.MSG_CODE_TS_LIST_KEYS_REQ codec = self._get_codec(msg_code) msg = codec.encode_timeseries_listkeysreq(table, timeout) self._send_msg(msg.msg_code, msg.data) return PbufTsKeyStream(self, codec, self._ts_convert_timestamp)
[ "def", "ts_stream_keys", "(", "self", ",", "table", ",", "timeout", "=", "None", ")", ":", "msg_code", "=", "riak", ".", "pb", ".", "messages", ".", "MSG_CODE_TS_LIST_KEYS_REQ", "codec", "=", "self", ".", "_get_codec", "(", "msg_code", ")", "msg", "=", "codec", ".", "encode_timeseries_listkeysreq", "(", "table", ",", "timeout", ")", "self", ".", "_send_msg", "(", "msg", ".", "msg_code", ",", "msg", ".", "data", ")", "return", "PbufTsKeyStream", "(", "self", ",", "codec", ",", "self", ".", "_ts_convert_timestamp", ")" ]
Streams keys from a timeseries table, returning an iterator that yields lists of keys.
[ "Streams", "keys", "from", "a", "timeseries", "table", "returning", "an", "iterator", "that", "yields", "lists", "of", "keys", "." ]
91de13a16607cdf553d1a194e762734e3bec4231
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/transports/tcp/transport.py#L212-L221
train
basho/riak-python-client
riak/transports/tcp/transport.py
TcpTransport.get_keys
def get_keys(self, bucket, timeout=None): """ Lists all keys within a bucket. """ msg_code = riak.pb.messages.MSG_CODE_LIST_KEYS_REQ codec = self._get_codec(msg_code) stream = self.stream_keys(bucket, timeout=timeout) return codec.decode_get_keys(stream)
python
def get_keys(self, bucket, timeout=None): """ Lists all keys within a bucket. """ msg_code = riak.pb.messages.MSG_CODE_LIST_KEYS_REQ codec = self._get_codec(msg_code) stream = self.stream_keys(bucket, timeout=timeout) return codec.decode_get_keys(stream)
[ "def", "get_keys", "(", "self", ",", "bucket", ",", "timeout", "=", "None", ")", ":", "msg_code", "=", "riak", ".", "pb", ".", "messages", ".", "MSG_CODE_LIST_KEYS_REQ", "codec", "=", "self", ".", "_get_codec", "(", "msg_code", ")", "stream", "=", "self", ".", "stream_keys", "(", "bucket", ",", "timeout", "=", "timeout", ")", "return", "codec", ".", "decode_get_keys", "(", "stream", ")" ]
Lists all keys within a bucket.
[ "Lists", "all", "keys", "within", "a", "bucket", "." ]
91de13a16607cdf553d1a194e762734e3bec4231
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/transports/tcp/transport.py#L231-L238
train
basho/riak-python-client
riak/transports/tcp/transport.py
TcpTransport.stream_keys
def stream_keys(self, bucket, timeout=None): """ Streams keys from a bucket, returning an iterator that yields lists of keys. """ msg_code = riak.pb.messages.MSG_CODE_LIST_KEYS_REQ codec = self._get_codec(msg_code) msg = codec.encode_stream_keys(bucket, timeout) self._send_msg(msg.msg_code, msg.data) return PbufKeyStream(self, codec)
python
def stream_keys(self, bucket, timeout=None): """ Streams keys from a bucket, returning an iterator that yields lists of keys. """ msg_code = riak.pb.messages.MSG_CODE_LIST_KEYS_REQ codec = self._get_codec(msg_code) msg = codec.encode_stream_keys(bucket, timeout) self._send_msg(msg.msg_code, msg.data) return PbufKeyStream(self, codec)
[ "def", "stream_keys", "(", "self", ",", "bucket", ",", "timeout", "=", "None", ")", ":", "msg_code", "=", "riak", ".", "pb", ".", "messages", ".", "MSG_CODE_LIST_KEYS_REQ", "codec", "=", "self", ".", "_get_codec", "(", "msg_code", ")", "msg", "=", "codec", ".", "encode_stream_keys", "(", "bucket", ",", "timeout", ")", "self", ".", "_send_msg", "(", "msg", ".", "msg_code", ",", "msg", ".", "data", ")", "return", "PbufKeyStream", "(", "self", ",", "codec", ")" ]
Streams keys from a bucket, returning an iterator that yields lists of keys.
[ "Streams", "keys", "from", "a", "bucket", "returning", "an", "iterator", "that", "yields", "lists", "of", "keys", "." ]
91de13a16607cdf553d1a194e762734e3bec4231
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/transports/tcp/transport.py#L240-L249
train
basho/riak-python-client
riak/transports/tcp/transport.py
TcpTransport.get_buckets
def get_buckets(self, bucket_type=None, timeout=None): """ Serialize bucket listing request and deserialize response """ msg_code = riak.pb.messages.MSG_CODE_LIST_BUCKETS_REQ codec = self._get_codec(msg_code) msg = codec.encode_get_buckets(bucket_type, timeout, streaming=False) resp_code, resp = self._request(msg, codec) return resp.buckets
python
def get_buckets(self, bucket_type=None, timeout=None): """ Serialize bucket listing request and deserialize response """ msg_code = riak.pb.messages.MSG_CODE_LIST_BUCKETS_REQ codec = self._get_codec(msg_code) msg = codec.encode_get_buckets(bucket_type, timeout, streaming=False) resp_code, resp = self._request(msg, codec) return resp.buckets
[ "def", "get_buckets", "(", "self", ",", "bucket_type", "=", "None", ",", "timeout", "=", "None", ")", ":", "msg_code", "=", "riak", ".", "pb", ".", "messages", ".", "MSG_CODE_LIST_BUCKETS_REQ", "codec", "=", "self", ".", "_get_codec", "(", "msg_code", ")", "msg", "=", "codec", ".", "encode_get_buckets", "(", "bucket_type", ",", "timeout", ",", "streaming", "=", "False", ")", "resp_code", ",", "resp", "=", "self", ".", "_request", "(", "msg", ",", "codec", ")", "return", "resp", ".", "buckets" ]
Serialize bucket listing request and deserialize response
[ "Serialize", "bucket", "listing", "request", "and", "deserialize", "response" ]
91de13a16607cdf553d1a194e762734e3bec4231
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/transports/tcp/transport.py#L251-L260
train
basho/riak-python-client
riak/transports/tcp/transport.py
TcpTransport.get_bucket_props
def get_bucket_props(self, bucket): """ Serialize bucket property request and deserialize response """ msg_code = riak.pb.messages.MSG_CODE_GET_BUCKET_REQ codec = self._get_codec(msg_code) msg = codec.encode_get_bucket_props(bucket) resp_code, resp = self._request(msg, codec) return codec.decode_bucket_props(resp.props)
python
def get_bucket_props(self, bucket): """ Serialize bucket property request and deserialize response """ msg_code = riak.pb.messages.MSG_CODE_GET_BUCKET_REQ codec = self._get_codec(msg_code) msg = codec.encode_get_bucket_props(bucket) resp_code, resp = self._request(msg, codec) return codec.decode_bucket_props(resp.props)
[ "def", "get_bucket_props", "(", "self", ",", "bucket", ")", ":", "msg_code", "=", "riak", ".", "pb", ".", "messages", ".", "MSG_CODE_GET_BUCKET_REQ", "codec", "=", "self", ".", "_get_codec", "(", "msg_code", ")", "msg", "=", "codec", ".", "encode_get_bucket_props", "(", "bucket", ")", "resp_code", ",", "resp", "=", "self", ".", "_request", "(", "msg", ",", "codec", ")", "return", "codec", ".", "decode_bucket_props", "(", "resp", ".", "props", ")" ]
Serialize bucket property request and deserialize response
[ "Serialize", "bucket", "property", "request", "and", "deserialize", "response" ]
91de13a16607cdf553d1a194e762734e3bec4231
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/transports/tcp/transport.py#L276-L284
train
basho/riak-python-client
riak/transports/tcp/transport.py
TcpTransport.set_bucket_props
def set_bucket_props(self, bucket, props): """ Serialize set bucket property request and deserialize response """ if not self.pb_all_bucket_props(): for key in props: if key not in ('n_val', 'allow_mult'): raise NotImplementedError('Server only supports n_val and ' 'allow_mult properties over PBC') msg_code = riak.pb.messages.MSG_CODE_SET_BUCKET_REQ codec = self._get_codec(msg_code) msg = codec.encode_set_bucket_props(bucket, props) resp_code, resp = self._request(msg, codec) return True
python
def set_bucket_props(self, bucket, props): """ Serialize set bucket property request and deserialize response """ if not self.pb_all_bucket_props(): for key in props: if key not in ('n_val', 'allow_mult'): raise NotImplementedError('Server only supports n_val and ' 'allow_mult properties over PBC') msg_code = riak.pb.messages.MSG_CODE_SET_BUCKET_REQ codec = self._get_codec(msg_code) msg = codec.encode_set_bucket_props(bucket, props) resp_code, resp = self._request(msg, codec) return True
[ "def", "set_bucket_props", "(", "self", ",", "bucket", ",", "props", ")", ":", "if", "not", "self", ".", "pb_all_bucket_props", "(", ")", ":", "for", "key", "in", "props", ":", "if", "key", "not", "in", "(", "'n_val'", ",", "'allow_mult'", ")", ":", "raise", "NotImplementedError", "(", "'Server only supports n_val and '", "'allow_mult properties over PBC'", ")", "msg_code", "=", "riak", ".", "pb", ".", "messages", ".", "MSG_CODE_SET_BUCKET_REQ", "codec", "=", "self", ".", "_get_codec", "(", "msg_code", ")", "msg", "=", "codec", ".", "encode_set_bucket_props", "(", "bucket", ",", "props", ")", "resp_code", ",", "resp", "=", "self", ".", "_request", "(", "msg", ",", "codec", ")", "return", "True" ]
Serialize set bucket property request and deserialize response
[ "Serialize", "set", "bucket", "property", "request", "and", "deserialize", "response" ]
91de13a16607cdf553d1a194e762734e3bec4231
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/transports/tcp/transport.py#L286-L299
train
basho/riak-python-client
riak/transports/tcp/transport.py
TcpTransport.clear_bucket_props
def clear_bucket_props(self, bucket): """ Clear bucket properties, resetting them to their defaults """ if not self.pb_clear_bucket_props(): return False msg_code = riak.pb.messages.MSG_CODE_RESET_BUCKET_REQ codec = self._get_codec(msg_code) msg = codec.encode_clear_bucket_props(bucket) self._request(msg, codec) return True
python
def clear_bucket_props(self, bucket): """ Clear bucket properties, resetting them to their defaults """ if not self.pb_clear_bucket_props(): return False msg_code = riak.pb.messages.MSG_CODE_RESET_BUCKET_REQ codec = self._get_codec(msg_code) msg = codec.encode_clear_bucket_props(bucket) self._request(msg, codec) return True
[ "def", "clear_bucket_props", "(", "self", ",", "bucket", ")", ":", "if", "not", "self", ".", "pb_clear_bucket_props", "(", ")", ":", "return", "False", "msg_code", "=", "riak", ".", "pb", ".", "messages", ".", "MSG_CODE_RESET_BUCKET_REQ", "codec", "=", "self", ".", "_get_codec", "(", "msg_code", ")", "msg", "=", "codec", ".", "encode_clear_bucket_props", "(", "bucket", ")", "self", ".", "_request", "(", "msg", ",", "codec", ")", "return", "True" ]
Clear bucket properties, resetting them to their defaults
[ "Clear", "bucket", "properties", "resetting", "them", "to", "their", "defaults" ]
91de13a16607cdf553d1a194e762734e3bec4231
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/transports/tcp/transport.py#L301-L311
train
basho/riak-python-client
riak/transports/tcp/transport.py
TcpTransport.get_bucket_type_props
def get_bucket_type_props(self, bucket_type): """ Fetch bucket-type properties """ self._check_bucket_types(bucket_type) msg_code = riak.pb.messages.MSG_CODE_GET_BUCKET_TYPE_REQ codec = self._get_codec(msg_code) msg = codec.encode_get_bucket_type_props(bucket_type) resp_code, resp = self._request(msg, codec) return codec.decode_bucket_props(resp.props)
python
def get_bucket_type_props(self, bucket_type): """ Fetch bucket-type properties """ self._check_bucket_types(bucket_type) msg_code = riak.pb.messages.MSG_CODE_GET_BUCKET_TYPE_REQ codec = self._get_codec(msg_code) msg = codec.encode_get_bucket_type_props(bucket_type) resp_code, resp = self._request(msg, codec) return codec.decode_bucket_props(resp.props)
[ "def", "get_bucket_type_props", "(", "self", ",", "bucket_type", ")", ":", "self", ".", "_check_bucket_types", "(", "bucket_type", ")", "msg_code", "=", "riak", ".", "pb", ".", "messages", ".", "MSG_CODE_GET_BUCKET_TYPE_REQ", "codec", "=", "self", ".", "_get_codec", "(", "msg_code", ")", "msg", "=", "codec", ".", "encode_get_bucket_type_props", "(", "bucket_type", ")", "resp_code", ",", "resp", "=", "self", ".", "_request", "(", "msg", ",", "codec", ")", "return", "codec", ".", "decode_bucket_props", "(", "resp", ".", "props", ")" ]
Fetch bucket-type properties
[ "Fetch", "bucket", "-", "type", "properties" ]
91de13a16607cdf553d1a194e762734e3bec4231
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/transports/tcp/transport.py#L313-L322
train
basho/riak-python-client
riak/transports/tcp/transport.py
TcpTransport.set_bucket_type_props
def set_bucket_type_props(self, bucket_type, props): """ Set bucket-type properties """ self._check_bucket_types(bucket_type) msg_code = riak.pb.messages.MSG_CODE_SET_BUCKET_TYPE_REQ codec = self._get_codec(msg_code) msg = codec.encode_set_bucket_type_props(bucket_type, props) resp_code, resp = self._request(msg, codec) return True
python
def set_bucket_type_props(self, bucket_type, props): """ Set bucket-type properties """ self._check_bucket_types(bucket_type) msg_code = riak.pb.messages.MSG_CODE_SET_BUCKET_TYPE_REQ codec = self._get_codec(msg_code) msg = codec.encode_set_bucket_type_props(bucket_type, props) resp_code, resp = self._request(msg, codec) return True
[ "def", "set_bucket_type_props", "(", "self", ",", "bucket_type", ",", "props", ")", ":", "self", ".", "_check_bucket_types", "(", "bucket_type", ")", "msg_code", "=", "riak", ".", "pb", ".", "messages", ".", "MSG_CODE_SET_BUCKET_TYPE_REQ", "codec", "=", "self", ".", "_get_codec", "(", "msg_code", ")", "msg", "=", "codec", ".", "encode_set_bucket_type_props", "(", "bucket_type", ",", "props", ")", "resp_code", ",", "resp", "=", "self", ".", "_request", "(", "msg", ",", "codec", ")", "return", "True" ]
Set bucket-type properties
[ "Set", "bucket", "-", "type", "properties" ]
91de13a16607cdf553d1a194e762734e3bec4231
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/transports/tcp/transport.py#L324-L333
train
basho/riak-python-client
riak/benchmark.py
print_report
def print_report(label, user, system, real): """ Prints the report of one step of a benchmark. """ print("{:<12s} {:12f} {:12f} ( {:12f} )".format(label, user, system, real))
python
def print_report(label, user, system, real): """ Prints the report of one step of a benchmark. """ print("{:<12s} {:12f} {:12f} ( {:12f} )".format(label, user, system, real))
[ "def", "print_report", "(", "label", ",", "user", ",", "system", ",", "real", ")", ":", "print", "(", "\"{:<12s} {:12f} {:12f} ( {:12f} )\"", ".", "format", "(", "label", ",", "user", ",", "system", ",", "real", ")", ")" ]
Prints the report of one step of a benchmark.
[ "Prints", "the", "report", "of", "one", "step", "of", "a", "benchmark", "." ]
91de13a16607cdf553d1a194e762734e3bec4231
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/benchmark.py#L134-L141
train
basho/riak-python-client
riak/benchmark.py
Benchmark.next
def next(self): """ Runs the next iteration of the benchmark. """ if self.count == 0: raise StopIteration elif self.count > 1: print_rehearsal_header() else: if self.rehearse: gc.collect() print("-" * 59) print() print_header() self.count -= 1 return self
python
def next(self): """ Runs the next iteration of the benchmark. """ if self.count == 0: raise StopIteration elif self.count > 1: print_rehearsal_header() else: if self.rehearse: gc.collect() print("-" * 59) print() print_header() self.count -= 1 return self
[ "def", "next", "(", "self", ")", ":", "if", "self", ".", "count", "==", "0", ":", "raise", "StopIteration", "elif", "self", ".", "count", ">", "1", ":", "print_rehearsal_header", "(", ")", "else", ":", "if", "self", ".", "rehearse", ":", "gc", ".", "collect", "(", ")", "print", "(", "\"-\"", "*", "59", ")", "print", "(", ")", "print_header", "(", ")", "self", ".", "count", "-=", "1", "return", "self" ]
Runs the next iteration of the benchmark.
[ "Runs", "the", "next", "iteration", "of", "the", "benchmark", "." ]
91de13a16607cdf553d1a194e762734e3bec4231
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/benchmark.py#L96-L112
train
basho/riak-python-client
riak/mapreduce.py
RiakMapReduce.add_object
def add_object(self, obj): """ Adds a RiakObject to the inputs. :param obj: the object to add :type obj: RiakObject :rtype: :class:`RiakMapReduce` """ return self.add_bucket_key_data(obj._bucket._name, obj._key, None)
python
def add_object(self, obj): """ Adds a RiakObject to the inputs. :param obj: the object to add :type obj: RiakObject :rtype: :class:`RiakMapReduce` """ return self.add_bucket_key_data(obj._bucket._name, obj._key, None)
[ "def", "add_object", "(", "self", ",", "obj", ")", ":", "return", "self", ".", "add_bucket_key_data", "(", "obj", ".", "_bucket", ".", "_name", ",", "obj", ".", "_key", ",", "None", ")" ]
Adds a RiakObject to the inputs. :param obj: the object to add :type obj: RiakObject :rtype: :class:`RiakMapReduce`
[ "Adds", "a", "RiakObject", "to", "the", "inputs", "." ]
91de13a16607cdf553d1a194e762734e3bec4231
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/mapreduce.py#L77-L85
train
basho/riak-python-client
riak/mapreduce.py
RiakMapReduce.add_bucket
def add_bucket(self, bucket, bucket_type=None): """ Adds all keys in a bucket to the inputs. :param bucket: the bucket :type bucket: string :param bucket_type: Optional name of a bucket type :type bucket_type: string, None :rtype: :class:`RiakMapReduce` """ if not riak.disable_list_exceptions: raise riak.ListError() self._input_mode = 'bucket' if isinstance(bucket, riak.RiakBucket): if bucket.bucket_type.is_default(): self._inputs = {'bucket': bucket.name} else: self._inputs = {'bucket': [bucket.bucket_type.name, bucket.name]} elif bucket_type is not None and bucket_type != "default": self._inputs = {'bucket': [bucket_type, bucket]} else: self._inputs = {'bucket': bucket} return self
python
def add_bucket(self, bucket, bucket_type=None): """ Adds all keys in a bucket to the inputs. :param bucket: the bucket :type bucket: string :param bucket_type: Optional name of a bucket type :type bucket_type: string, None :rtype: :class:`RiakMapReduce` """ if not riak.disable_list_exceptions: raise riak.ListError() self._input_mode = 'bucket' if isinstance(bucket, riak.RiakBucket): if bucket.bucket_type.is_default(): self._inputs = {'bucket': bucket.name} else: self._inputs = {'bucket': [bucket.bucket_type.name, bucket.name]} elif bucket_type is not None and bucket_type != "default": self._inputs = {'bucket': [bucket_type, bucket]} else: self._inputs = {'bucket': bucket} return self
[ "def", "add_bucket", "(", "self", ",", "bucket", ",", "bucket_type", "=", "None", ")", ":", "if", "not", "riak", ".", "disable_list_exceptions", ":", "raise", "riak", ".", "ListError", "(", ")", "self", ".", "_input_mode", "=", "'bucket'", "if", "isinstance", "(", "bucket", ",", "riak", ".", "RiakBucket", ")", ":", "if", "bucket", ".", "bucket_type", ".", "is_default", "(", ")", ":", "self", ".", "_inputs", "=", "{", "'bucket'", ":", "bucket", ".", "name", "}", "else", ":", "self", ".", "_inputs", "=", "{", "'bucket'", ":", "[", "bucket", ".", "bucket_type", ".", "name", ",", "bucket", ".", "name", "]", "}", "elif", "bucket_type", "is", "not", "None", "and", "bucket_type", "!=", "\"default\"", ":", "self", ".", "_inputs", "=", "{", "'bucket'", ":", "[", "bucket_type", ",", "bucket", "]", "}", "else", ":", "self", ".", "_inputs", "=", "{", "'bucket'", ":", "bucket", "}", "return", "self" ]
Adds all keys in a bucket to the inputs. :param bucket: the bucket :type bucket: string :param bucket_type: Optional name of a bucket type :type bucket_type: string, None :rtype: :class:`RiakMapReduce`
[ "Adds", "all", "keys", "in", "a", "bucket", "to", "the", "inputs", "." ]
91de13a16607cdf553d1a194e762734e3bec4231
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/mapreduce.py#L121-L144
train
basho/riak-python-client
riak/mapreduce.py
RiakMapReduce.add_key_filters
def add_key_filters(self, key_filters): """ Adds key filters to the inputs. :param key_filters: a list of filters :type key_filters: list :rtype: :class:`RiakMapReduce` """ if self._input_mode == 'query': raise ValueError('Key filters are not supported in a query.') self._key_filters.extend(key_filters) return self
python
def add_key_filters(self, key_filters): """ Adds key filters to the inputs. :param key_filters: a list of filters :type key_filters: list :rtype: :class:`RiakMapReduce` """ if self._input_mode == 'query': raise ValueError('Key filters are not supported in a query.') self._key_filters.extend(key_filters) return self
[ "def", "add_key_filters", "(", "self", ",", "key_filters", ")", ":", "if", "self", ".", "_input_mode", "==", "'query'", ":", "raise", "ValueError", "(", "'Key filters are not supported in a query.'", ")", "self", ".", "_key_filters", ".", "extend", "(", "key_filters", ")", "return", "self" ]
Adds key filters to the inputs. :param key_filters: a list of filters :type key_filters: list :rtype: :class:`RiakMapReduce`
[ "Adds", "key", "filters", "to", "the", "inputs", "." ]
91de13a16607cdf553d1a194e762734e3bec4231
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/mapreduce.py#L146-L158
train
basho/riak-python-client
riak/mapreduce.py
RiakMapReduce.add_key_filter
def add_key_filter(self, *args): """ Add a single key filter to the inputs. :param args: a filter :type args: list :rtype: :class:`RiakMapReduce` """ if self._input_mode == 'query': raise ValueError('Key filters are not supported in a query.') self._key_filters.append(args) return self
python
def add_key_filter(self, *args): """ Add a single key filter to the inputs. :param args: a filter :type args: list :rtype: :class:`RiakMapReduce` """ if self._input_mode == 'query': raise ValueError('Key filters are not supported in a query.') self._key_filters.append(args) return self
[ "def", "add_key_filter", "(", "self", ",", "*", "args", ")", ":", "if", "self", ".", "_input_mode", "==", "'query'", ":", "raise", "ValueError", "(", "'Key filters are not supported in a query.'", ")", "self", ".", "_key_filters", ".", "append", "(", "args", ")", "return", "self" ]
Add a single key filter to the inputs. :param args: a filter :type args: list :rtype: :class:`RiakMapReduce`
[ "Add", "a", "single", "key", "filter", "to", "the", "inputs", "." ]
91de13a16607cdf553d1a194e762734e3bec4231
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/mapreduce.py#L160-L172
train
basho/riak-python-client
riak/mapreduce.py
RiakMapReduce.reduce_sort
def reduce_sort(self, js_cmp=None, options=None): """ Adds the Javascript built-in ``Riak.reduceSort`` to the query as a reduce phase. :param js_cmp: A Javascript comparator function as specified by Array.sort() :type js_cmp: string :param options: phase options, containing 'language', 'keep' flag, and/or 'arg'. :type options: dict """ if options is None: options = dict() if js_cmp: options['arg'] = js_cmp return self.reduce("Riak.reduceSort", options=options)
python
def reduce_sort(self, js_cmp=None, options=None): """ Adds the Javascript built-in ``Riak.reduceSort`` to the query as a reduce phase. :param js_cmp: A Javascript comparator function as specified by Array.sort() :type js_cmp: string :param options: phase options, containing 'language', 'keep' flag, and/or 'arg'. :type options: dict """ if options is None: options = dict() if js_cmp: options['arg'] = js_cmp return self.reduce("Riak.reduceSort", options=options)
[ "def", "reduce_sort", "(", "self", ",", "js_cmp", "=", "None", ",", "options", "=", "None", ")", ":", "if", "options", "is", "None", ":", "options", "=", "dict", "(", ")", "if", "js_cmp", ":", "options", "[", "'arg'", "]", "=", "js_cmp", "return", "self", ".", "reduce", "(", "\"Riak.reduceSort\"", ",", "options", "=", "options", ")" ]
Adds the Javascript built-in ``Riak.reduceSort`` to the query as a reduce phase. :param js_cmp: A Javascript comparator function as specified by Array.sort() :type js_cmp: string :param options: phase options, containing 'language', 'keep' flag, and/or 'arg'. :type options: dict
[ "Adds", "the", "Javascript", "built", "-", "in", "Riak", ".", "reduceSort", "to", "the", "query", "as", "a", "reduce", "phase", "." ]
91de13a16607cdf553d1a194e762734e3bec4231
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/mapreduce.py#L448-L466
train
basho/riak-python-client
riak/mapreduce.py
RiakMapReduce.reduce_slice
def reduce_slice(self, start, end, options=None): """ Adds the Javascript built-in ``Riak.reduceSlice`` to the query as a reduce phase. :param start: the beginning of the slice :type start: integer :param end: the end of the slice :type end: integer :param options: phase options, containing 'language', 'keep' flag, and/or 'arg'. :type options: dict """ if options is None: options = dict() options['arg'] = [start, end] return self.reduce("Riak.reduceSlice", options=options)
python
def reduce_slice(self, start, end, options=None): """ Adds the Javascript built-in ``Riak.reduceSlice`` to the query as a reduce phase. :param start: the beginning of the slice :type start: integer :param end: the end of the slice :type end: integer :param options: phase options, containing 'language', 'keep' flag, and/or 'arg'. :type options: dict """ if options is None: options = dict() options['arg'] = [start, end] return self.reduce("Riak.reduceSlice", options=options)
[ "def", "reduce_slice", "(", "self", ",", "start", ",", "end", ",", "options", "=", "None", ")", ":", "if", "options", "is", "None", ":", "options", "=", "dict", "(", ")", "options", "[", "'arg'", "]", "=", "[", "start", ",", "end", "]", "return", "self", ".", "reduce", "(", "\"Riak.reduceSlice\"", ",", "options", "=", "options", ")" ]
Adds the Javascript built-in ``Riak.reduceSlice`` to the query as a reduce phase. :param start: the beginning of the slice :type start: integer :param end: the end of the slice :type end: integer :param options: phase options, containing 'language', 'keep' flag, and/or 'arg'. :type options: dict
[ "Adds", "the", "Javascript", "built", "-", "in", "Riak", ".", "reduceSlice", "to", "the", "query", "as", "a", "reduce", "phase", "." ]
91de13a16607cdf553d1a194e762734e3bec4231
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/mapreduce.py#L500-L517
train
basho/riak-python-client
riak/mapreduce.py
RiakMapReducePhase.to_array
def to_array(self): """ Convert the RiakMapReducePhase to a format that can be output into JSON. Used internally. :rtype: dict """ stepdef = {'keep': self._keep, 'language': self._language, 'arg': self._arg} if self._language == 'javascript': if isinstance(self._function, list): stepdef['bucket'] = self._function[0] stepdef['key'] = self._function[1] elif isinstance(self._function, string_types): if ("{" in self._function): stepdef['source'] = self._function else: stepdef['name'] = self._function elif (self._language == 'erlang' and isinstance(self._function, list)): stepdef['module'] = self._function[0] stepdef['function'] = self._function[1] elif (self._language == 'erlang' and isinstance(self._function, string_types)): stepdef['source'] = self._function return {self._type: stepdef}
python
def to_array(self): """ Convert the RiakMapReducePhase to a format that can be output into JSON. Used internally. :rtype: dict """ stepdef = {'keep': self._keep, 'language': self._language, 'arg': self._arg} if self._language == 'javascript': if isinstance(self._function, list): stepdef['bucket'] = self._function[0] stepdef['key'] = self._function[1] elif isinstance(self._function, string_types): if ("{" in self._function): stepdef['source'] = self._function else: stepdef['name'] = self._function elif (self._language == 'erlang' and isinstance(self._function, list)): stepdef['module'] = self._function[0] stepdef['function'] = self._function[1] elif (self._language == 'erlang' and isinstance(self._function, string_types)): stepdef['source'] = self._function return {self._type: stepdef}
[ "def", "to_array", "(", "self", ")", ":", "stepdef", "=", "{", "'keep'", ":", "self", ".", "_keep", ",", "'language'", ":", "self", ".", "_language", ",", "'arg'", ":", "self", ".", "_arg", "}", "if", "self", ".", "_language", "==", "'javascript'", ":", "if", "isinstance", "(", "self", ".", "_function", ",", "list", ")", ":", "stepdef", "[", "'bucket'", "]", "=", "self", ".", "_function", "[", "0", "]", "stepdef", "[", "'key'", "]", "=", "self", ".", "_function", "[", "1", "]", "elif", "isinstance", "(", "self", ".", "_function", ",", "string_types", ")", ":", "if", "(", "\"{\"", "in", "self", ".", "_function", ")", ":", "stepdef", "[", "'source'", "]", "=", "self", ".", "_function", "else", ":", "stepdef", "[", "'name'", "]", "=", "self", ".", "_function", "elif", "(", "self", ".", "_language", "==", "'erlang'", "and", "isinstance", "(", "self", ".", "_function", ",", "list", ")", ")", ":", "stepdef", "[", "'module'", "]", "=", "self", ".", "_function", "[", "0", "]", "stepdef", "[", "'function'", "]", "=", "self", ".", "_function", "[", "1", "]", "elif", "(", "self", ".", "_language", "==", "'erlang'", "and", "isinstance", "(", "self", ".", "_function", ",", "string_types", ")", ")", ":", "stepdef", "[", "'source'", "]", "=", "self", ".", "_function", "return", "{", "self", ".", "_type", ":", "stepdef", "}" ]
Convert the RiakMapReducePhase to a format that can be output into JSON. Used internally. :rtype: dict
[ "Convert", "the", "RiakMapReducePhase", "to", "a", "format", "that", "can", "be", "output", "into", "JSON", ".", "Used", "internally", "." ]
91de13a16607cdf553d1a194e762734e3bec4231
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/mapreduce.py#L569-L598
train
basho/riak-python-client
riak/mapreduce.py
RiakLinkPhase.to_array
def to_array(self): """ Convert the RiakLinkPhase to a format that can be output into JSON. Used internally. """ stepdef = {'bucket': self._bucket, 'tag': self._tag, 'keep': self._keep} return {'link': stepdef}
python
def to_array(self): """ Convert the RiakLinkPhase to a format that can be output into JSON. Used internally. """ stepdef = {'bucket': self._bucket, 'tag': self._tag, 'keep': self._keep} return {'link': stepdef}
[ "def", "to_array", "(", "self", ")", ":", "stepdef", "=", "{", "'bucket'", ":", "self", ".", "_bucket", ",", "'tag'", ":", "self", ".", "_tag", ",", "'keep'", ":", "self", ".", "_keep", "}", "return", "{", "'link'", ":", "stepdef", "}" ]
Convert the RiakLinkPhase to a format that can be output into JSON. Used internally.
[ "Convert", "the", "RiakLinkPhase", "to", "a", "format", "that", "can", "be", "output", "into", "JSON", ".", "Used", "internally", "." ]
91de13a16607cdf553d1a194e762734e3bec4231
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/mapreduce.py#L626-L634
train
basho/riak-python-client
riak/resolver.py
last_written_resolver
def last_written_resolver(riak_object): """ A conflict-resolution function that resolves by selecting the most recently-modified sibling by timestamp. :param riak_object: an object-in-conflict that will be resolved :type riak_object: :class:`RiakObject <riak.riak_object.RiakObject>` """ riak_object.siblings = [max(riak_object.siblings, key=lambda x: x.last_modified), ]
python
def last_written_resolver(riak_object): """ A conflict-resolution function that resolves by selecting the most recently-modified sibling by timestamp. :param riak_object: an object-in-conflict that will be resolved :type riak_object: :class:`RiakObject <riak.riak_object.RiakObject>` """ riak_object.siblings = [max(riak_object.siblings, key=lambda x: x.last_modified), ]
[ "def", "last_written_resolver", "(", "riak_object", ")", ":", "riak_object", ".", "siblings", "=", "[", "max", "(", "riak_object", ".", "siblings", ",", "key", "=", "lambda", "x", ":", "x", ".", "last_modified", ")", ",", "]" ]
A conflict-resolution function that resolves by selecting the most recently-modified sibling by timestamp. :param riak_object: an object-in-conflict that will be resolved :type riak_object: :class:`RiakObject <riak.riak_object.RiakObject>`
[ "A", "conflict", "-", "resolution", "function", "that", "resolves", "by", "selecting", "the", "most", "recently", "-", "modified", "sibling", "by", "timestamp", "." ]
91de13a16607cdf553d1a194e762734e3bec4231
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/resolver.py#L31-L40
train
basho/riak-python-client
riak/transports/security.py
verify_cb
def verify_cb(conn, cert, errnum, depth, ok): """ The default OpenSSL certificate verification callback. """ if not ok: raise SecurityError("Could not verify CA certificate {0}" .format(cert.get_subject())) return ok
python
def verify_cb(conn, cert, errnum, depth, ok): """ The default OpenSSL certificate verification callback. """ if not ok: raise SecurityError("Could not verify CA certificate {0}" .format(cert.get_subject())) return ok
[ "def", "verify_cb", "(", "conn", ",", "cert", ",", "errnum", ",", "depth", ",", "ok", ")", ":", "if", "not", "ok", ":", "raise", "SecurityError", "(", "\"Could not verify CA certificate {0}\"", ".", "format", "(", "cert", ".", "get_subject", "(", ")", ")", ")", "return", "ok" ]
The default OpenSSL certificate verification callback.
[ "The", "default", "OpenSSL", "certificate", "verification", "callback", "." ]
91de13a16607cdf553d1a194e762734e3bec4231
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/transports/security.py#L27-L34
train
basho/riak-python-client
riak/client/index_page.py
IndexPage.next_page
def next_page(self, timeout=None, stream=None): """ Fetches the next page using the same parameters as the original query. Note that if streaming was used before, it will be used again unless overridden. :param stream: whether to enable streaming. `True` enables, `False` disables, `None` uses previous value. :type stream: boolean :param timeout: a timeout value in milliseconds, or 'infinity' :type timeout: int """ if not self.continuation: raise ValueError("Cannot get next index page, no continuation") if stream is not None: self.stream = stream args = {'bucket': self.bucket, 'index': self.index, 'startkey': self.startkey, 'endkey': self.endkey, 'return_terms': self.return_terms, 'max_results': self.max_results, 'continuation': self.continuation, 'timeout': timeout, 'term_regex': self.term_regex} if self.stream: return self.client.stream_index(**args) else: return self.client.get_index(**args)
python
def next_page(self, timeout=None, stream=None): """ Fetches the next page using the same parameters as the original query. Note that if streaming was used before, it will be used again unless overridden. :param stream: whether to enable streaming. `True` enables, `False` disables, `None` uses previous value. :type stream: boolean :param timeout: a timeout value in milliseconds, or 'infinity' :type timeout: int """ if not self.continuation: raise ValueError("Cannot get next index page, no continuation") if stream is not None: self.stream = stream args = {'bucket': self.bucket, 'index': self.index, 'startkey': self.startkey, 'endkey': self.endkey, 'return_terms': self.return_terms, 'max_results': self.max_results, 'continuation': self.continuation, 'timeout': timeout, 'term_regex': self.term_regex} if self.stream: return self.client.stream_index(**args) else: return self.client.get_index(**args)
[ "def", "next_page", "(", "self", ",", "timeout", "=", "None", ",", "stream", "=", "None", ")", ":", "if", "not", "self", ".", "continuation", ":", "raise", "ValueError", "(", "\"Cannot get next index page, no continuation\"", ")", "if", "stream", "is", "not", "None", ":", "self", ".", "stream", "=", "stream", "args", "=", "{", "'bucket'", ":", "self", ".", "bucket", ",", "'index'", ":", "self", ".", "index", ",", "'startkey'", ":", "self", ".", "startkey", ",", "'endkey'", ":", "self", ".", "endkey", ",", "'return_terms'", ":", "self", ".", "return_terms", ",", "'max_results'", ":", "self", ".", "max_results", ",", "'continuation'", ":", "self", ".", "continuation", ",", "'timeout'", ":", "timeout", ",", "'term_regex'", ":", "self", ".", "term_regex", "}", "if", "self", ".", "stream", ":", "return", "self", ".", "client", ".", "stream_index", "(", "*", "*", "args", ")", "else", ":", "return", "self", ".", "client", ".", "get_index", "(", "*", "*", "args", ")" ]
Fetches the next page using the same parameters as the original query. Note that if streaming was used before, it will be used again unless overridden. :param stream: whether to enable streaming. `True` enables, `False` disables, `None` uses previous value. :type stream: boolean :param timeout: a timeout value in milliseconds, or 'infinity' :type timeout: int
[ "Fetches", "the", "next", "page", "using", "the", "same", "parameters", "as", "the", "original", "query", "." ]
91de13a16607cdf553d1a194e762734e3bec4231
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/client/index_page.py#L117-L150
train
basho/riak-python-client
riak/client/operations.py
_validate_timeout
def _validate_timeout(timeout, infinity_ok=False): """ Raises an exception if the given timeout is an invalid value. """ if timeout is None: return if timeout == 'infinity': if infinity_ok: return else: raise ValueError( 'timeout must be a positive integer ' '("infinity" is not valid)') if isinstance(timeout, six.integer_types) and timeout > 0: return raise ValueError('timeout must be a positive integer')
python
def _validate_timeout(timeout, infinity_ok=False): """ Raises an exception if the given timeout is an invalid value. """ if timeout is None: return if timeout == 'infinity': if infinity_ok: return else: raise ValueError( 'timeout must be a positive integer ' '("infinity" is not valid)') if isinstance(timeout, six.integer_types) and timeout > 0: return raise ValueError('timeout must be a positive integer')
[ "def", "_validate_timeout", "(", "timeout", ",", "infinity_ok", "=", "False", ")", ":", "if", "timeout", "is", "None", ":", "return", "if", "timeout", "==", "'infinity'", ":", "if", "infinity_ok", ":", "return", "else", ":", "raise", "ValueError", "(", "'timeout must be a positive integer '", "'(\"infinity\" is not valid)'", ")", "if", "isinstance", "(", "timeout", ",", "six", ".", "integer_types", ")", "and", "timeout", ">", "0", ":", "return", "raise", "ValueError", "(", "'timeout must be a positive integer'", ")" ]
Raises an exception if the given timeout is an invalid value.
[ "Raises", "an", "exception", "if", "the", "given", "timeout", "is", "an", "invalid", "value", "." ]
91de13a16607cdf553d1a194e762734e3bec4231
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/client/operations.py#L1270-L1288
train
basho/riak-python-client
riak/client/operations.py
RiakClientOperations.stream_buckets
def stream_buckets(self, bucket_type=None, timeout=None): """ Streams the list of buckets. This is a generator method that should be iterated over. .. warning:: Do not use this in production, as it requires traversing through all keys stored in a cluster. The caller should explicitly close the returned iterator, either using :func:`contextlib.closing` or calling ``close()`` explicitly. Consuming the entire iterator will also close the stream. If it does not, the associated connection might not be returned to the pool. Example:: from contextlib import closing # Using contextlib.closing with closing(client.stream_buckets()) as buckets: for bucket_list in buckets: do_something(bucket_list) # Explicit close() stream = client.stream_buckets() for bucket_list in stream: do_something(bucket_list) stream.close() :param bucket_type: the optional containing bucket type :type bucket_type: :class:`~riak.bucket.BucketType` :param timeout: a timeout value in milliseconds :type timeout: int :rtype: iterator that yields lists of :class:`RiakBucket <riak.bucket.RiakBucket>` instances """ if not riak.disable_list_exceptions: raise ListError() _validate_timeout(timeout) if bucket_type: bucketfn = self._bucket_type_bucket_builder else: bucketfn = self._default_type_bucket_builder def make_op(transport): return transport.stream_buckets( bucket_type=bucket_type, timeout=timeout) for bucket_list in self._stream_with_retry(make_op): bucket_list = [bucketfn(bytes_to_str(name), bucket_type) for name in bucket_list] if len(bucket_list) > 0: yield bucket_list
python
def stream_buckets(self, bucket_type=None, timeout=None): """ Streams the list of buckets. This is a generator method that should be iterated over. .. warning:: Do not use this in production, as it requires traversing through all keys stored in a cluster. The caller should explicitly close the returned iterator, either using :func:`contextlib.closing` or calling ``close()`` explicitly. Consuming the entire iterator will also close the stream. If it does not, the associated connection might not be returned to the pool. Example:: from contextlib import closing # Using contextlib.closing with closing(client.stream_buckets()) as buckets: for bucket_list in buckets: do_something(bucket_list) # Explicit close() stream = client.stream_buckets() for bucket_list in stream: do_something(bucket_list) stream.close() :param bucket_type: the optional containing bucket type :type bucket_type: :class:`~riak.bucket.BucketType` :param timeout: a timeout value in milliseconds :type timeout: int :rtype: iterator that yields lists of :class:`RiakBucket <riak.bucket.RiakBucket>` instances """ if not riak.disable_list_exceptions: raise ListError() _validate_timeout(timeout) if bucket_type: bucketfn = self._bucket_type_bucket_builder else: bucketfn = self._default_type_bucket_builder def make_op(transport): return transport.stream_buckets( bucket_type=bucket_type, timeout=timeout) for bucket_list in self._stream_with_retry(make_op): bucket_list = [bucketfn(bytes_to_str(name), bucket_type) for name in bucket_list] if len(bucket_list) > 0: yield bucket_list
[ "def", "stream_buckets", "(", "self", ",", "bucket_type", "=", "None", ",", "timeout", "=", "None", ")", ":", "if", "not", "riak", ".", "disable_list_exceptions", ":", "raise", "ListError", "(", ")", "_validate_timeout", "(", "timeout", ")", "if", "bucket_type", ":", "bucketfn", "=", "self", ".", "_bucket_type_bucket_builder", "else", ":", "bucketfn", "=", "self", ".", "_default_type_bucket_builder", "def", "make_op", "(", "transport", ")", ":", "return", "transport", ".", "stream_buckets", "(", "bucket_type", "=", "bucket_type", ",", "timeout", "=", "timeout", ")", "for", "bucket_list", "in", "self", ".", "_stream_with_retry", "(", "make_op", ")", ":", "bucket_list", "=", "[", "bucketfn", "(", "bytes_to_str", "(", "name", ")", ",", "bucket_type", ")", "for", "name", "in", "bucket_list", "]", "if", "len", "(", "bucket_list", ")", ">", "0", ":", "yield", "bucket_list" ]
Streams the list of buckets. This is a generator method that should be iterated over. .. warning:: Do not use this in production, as it requires traversing through all keys stored in a cluster. The caller should explicitly close the returned iterator, either using :func:`contextlib.closing` or calling ``close()`` explicitly. Consuming the entire iterator will also close the stream. If it does not, the associated connection might not be returned to the pool. Example:: from contextlib import closing # Using contextlib.closing with closing(client.stream_buckets()) as buckets: for bucket_list in buckets: do_something(bucket_list) # Explicit close() stream = client.stream_buckets() for bucket_list in stream: do_something(bucket_list) stream.close() :param bucket_type: the optional containing bucket type :type bucket_type: :class:`~riak.bucket.BucketType` :param timeout: a timeout value in milliseconds :type timeout: int :rtype: iterator that yields lists of :class:`RiakBucket <riak.bucket.RiakBucket>` instances
[ "Streams", "the", "list", "of", "buckets", ".", "This", "is", "a", "generator", "method", "that", "should", "be", "iterated", "over", "." ]
91de13a16607cdf553d1a194e762734e3bec4231
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/client/operations.py#L72-L125
train
basho/riak-python-client
riak/client/operations.py
RiakClientOperations.stream_index
def stream_index(self, bucket, index, startkey, endkey=None, return_terms=None, max_results=None, continuation=None, timeout=None, term_regex=None): """ Queries a secondary index, streaming matching keys through an iterator. The caller should explicitly close the returned iterator, either using :func:`contextlib.closing` or calling ``close()`` explicitly. Consuming the entire iterator will also close the stream. If it does not, the associated connection might not be returned to the pool. Example:: from contextlib import closing # Using contextlib.closing with closing(client.stream_index(mybucket, 'name_bin', 'Smith')) as index: for key in index: do_something(key) # Explicit close() stream = client.stream_index(mybucket, 'name_bin', 'Smith') for key in stream: do_something(key) stream.close() :param bucket: the bucket whose index will be queried :type bucket: RiakBucket :param index: the index to query :type index: string :param startkey: the sole key to query, or beginning of the query range :type startkey: string, integer :param endkey: the end of the query range (optional if equality) :type endkey: string, integer :param return_terms: whether to include the secondary index value :type return_terms: boolean :param max_results: the maximum number of results to return (page size) :type max_results: integer :param continuation: the opaque continuation returned from a previous paginated request :type continuation: string :param timeout: a timeout value in milliseconds, or 'infinity' :type timeout: int :param term_regex: a regular expression used to filter index terms :type term_regex: string :rtype: :class:`~riak.client.index_page.IndexPage` """ # TODO FUTURE: implement "retry on connection closed" # as in stream_mapred _validate_timeout(timeout, infinity_ok=True) page = IndexPage(self, bucket, index, startkey, endkey, return_terms, max_results, term_regex) page.stream = True resource = self._acquire() transport = resource.object page.results = transport.stream_index( bucket, index, startkey, endkey, return_terms=return_terms, max_results=max_results, continuation=continuation, timeout=timeout, term_regex=term_regex) page.results.attach(resource) return page
python
def stream_index(self, bucket, index, startkey, endkey=None, return_terms=None, max_results=None, continuation=None, timeout=None, term_regex=None): """ Queries a secondary index, streaming matching keys through an iterator. The caller should explicitly close the returned iterator, either using :func:`contextlib.closing` or calling ``close()`` explicitly. Consuming the entire iterator will also close the stream. If it does not, the associated connection might not be returned to the pool. Example:: from contextlib import closing # Using contextlib.closing with closing(client.stream_index(mybucket, 'name_bin', 'Smith')) as index: for key in index: do_something(key) # Explicit close() stream = client.stream_index(mybucket, 'name_bin', 'Smith') for key in stream: do_something(key) stream.close() :param bucket: the bucket whose index will be queried :type bucket: RiakBucket :param index: the index to query :type index: string :param startkey: the sole key to query, or beginning of the query range :type startkey: string, integer :param endkey: the end of the query range (optional if equality) :type endkey: string, integer :param return_terms: whether to include the secondary index value :type return_terms: boolean :param max_results: the maximum number of results to return (page size) :type max_results: integer :param continuation: the opaque continuation returned from a previous paginated request :type continuation: string :param timeout: a timeout value in milliseconds, or 'infinity' :type timeout: int :param term_regex: a regular expression used to filter index terms :type term_regex: string :rtype: :class:`~riak.client.index_page.IndexPage` """ # TODO FUTURE: implement "retry on connection closed" # as in stream_mapred _validate_timeout(timeout, infinity_ok=True) page = IndexPage(self, bucket, index, startkey, endkey, return_terms, max_results, term_regex) page.stream = True resource = self._acquire() transport = resource.object page.results = transport.stream_index( bucket, index, startkey, endkey, return_terms=return_terms, max_results=max_results, continuation=continuation, timeout=timeout, term_regex=term_regex) page.results.attach(resource) return page
[ "def", "stream_index", "(", "self", ",", "bucket", ",", "index", ",", "startkey", ",", "endkey", "=", "None", ",", "return_terms", "=", "None", ",", "max_results", "=", "None", ",", "continuation", "=", "None", ",", "timeout", "=", "None", ",", "term_regex", "=", "None", ")", ":", "# TODO FUTURE: implement \"retry on connection closed\"", "# as in stream_mapred", "_validate_timeout", "(", "timeout", ",", "infinity_ok", "=", "True", ")", "page", "=", "IndexPage", "(", "self", ",", "bucket", ",", "index", ",", "startkey", ",", "endkey", ",", "return_terms", ",", "max_results", ",", "term_regex", ")", "page", ".", "stream", "=", "True", "resource", "=", "self", ".", "_acquire", "(", ")", "transport", "=", "resource", ".", "object", "page", ".", "results", "=", "transport", ".", "stream_index", "(", "bucket", ",", "index", ",", "startkey", ",", "endkey", ",", "return_terms", "=", "return_terms", ",", "max_results", "=", "max_results", ",", "continuation", "=", "continuation", ",", "timeout", "=", "timeout", ",", "term_regex", "=", "term_regex", ")", "page", ".", "results", ".", "attach", "(", "resource", ")", "return", "page" ]
Queries a secondary index, streaming matching keys through an iterator. The caller should explicitly close the returned iterator, either using :func:`contextlib.closing` or calling ``close()`` explicitly. Consuming the entire iterator will also close the stream. If it does not, the associated connection might not be returned to the pool. Example:: from contextlib import closing # Using contextlib.closing with closing(client.stream_index(mybucket, 'name_bin', 'Smith')) as index: for key in index: do_something(key) # Explicit close() stream = client.stream_index(mybucket, 'name_bin', 'Smith') for key in stream: do_something(key) stream.close() :param bucket: the bucket whose index will be queried :type bucket: RiakBucket :param index: the index to query :type index: string :param startkey: the sole key to query, or beginning of the query range :type startkey: string, integer :param endkey: the end of the query range (optional if equality) :type endkey: string, integer :param return_terms: whether to include the secondary index value :type return_terms: boolean :param max_results: the maximum number of results to return (page size) :type max_results: integer :param continuation: the opaque continuation returned from a previous paginated request :type continuation: string :param timeout: a timeout value in milliseconds, or 'infinity' :type timeout: int :param term_regex: a regular expression used to filter index terms :type term_regex: string :rtype: :class:`~riak.client.index_page.IndexPage`
[ "Queries", "a", "secondary", "index", "streaming", "matching", "keys", "through", "an", "iterator", "." ]
91de13a16607cdf553d1a194e762734e3bec4231
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/client/operations.py#L238-L301
train
basho/riak-python-client
riak/client/operations.py
RiakClientOperations.stream_keys
def stream_keys(self, bucket, timeout=None): """ Lists all keys in a bucket via a stream. This is a generator method which should be iterated over. .. warning:: Do not use this in production, as it requires traversing through all keys stored in a cluster. The caller should explicitly close the returned iterator, either using :func:`contextlib.closing` or calling ``close()`` explicitly. Consuming the entire iterator will also close the stream. If it does not, the associated connection might not be returned to the pool. Example:: from contextlib import closing # Using contextlib.closing with closing(client.stream_keys(mybucket)) as keys: for key_list in keys: do_something(key_list) # Explicit close() stream = client.stream_keys(mybucket) for key_list in stream: do_something(key_list) stream.close() :param bucket: the bucket whose properties will be set :type bucket: RiakBucket :param timeout: a timeout value in milliseconds :type timeout: int :rtype: iterator """ if not riak.disable_list_exceptions: raise ListError() _validate_timeout(timeout) def make_op(transport): return transport.stream_keys(bucket, timeout=timeout) for keylist in self._stream_with_retry(make_op): if len(keylist) > 0: if six.PY2: yield keylist else: yield [bytes_to_str(item) for item in keylist]
python
def stream_keys(self, bucket, timeout=None): """ Lists all keys in a bucket via a stream. This is a generator method which should be iterated over. .. warning:: Do not use this in production, as it requires traversing through all keys stored in a cluster. The caller should explicitly close the returned iterator, either using :func:`contextlib.closing` or calling ``close()`` explicitly. Consuming the entire iterator will also close the stream. If it does not, the associated connection might not be returned to the pool. Example:: from contextlib import closing # Using contextlib.closing with closing(client.stream_keys(mybucket)) as keys: for key_list in keys: do_something(key_list) # Explicit close() stream = client.stream_keys(mybucket) for key_list in stream: do_something(key_list) stream.close() :param bucket: the bucket whose properties will be set :type bucket: RiakBucket :param timeout: a timeout value in milliseconds :type timeout: int :rtype: iterator """ if not riak.disable_list_exceptions: raise ListError() _validate_timeout(timeout) def make_op(transport): return transport.stream_keys(bucket, timeout=timeout) for keylist in self._stream_with_retry(make_op): if len(keylist) > 0: if six.PY2: yield keylist else: yield [bytes_to_str(item) for item in keylist]
[ "def", "stream_keys", "(", "self", ",", "bucket", ",", "timeout", "=", "None", ")", ":", "if", "not", "riak", ".", "disable_list_exceptions", ":", "raise", "ListError", "(", ")", "_validate_timeout", "(", "timeout", ")", "def", "make_op", "(", "transport", ")", ":", "return", "transport", ".", "stream_keys", "(", "bucket", ",", "timeout", "=", "timeout", ")", "for", "keylist", "in", "self", ".", "_stream_with_retry", "(", "make_op", ")", ":", "if", "len", "(", "keylist", ")", ">", "0", ":", "if", "six", ".", "PY2", ":", "yield", "keylist", "else", ":", "yield", "[", "bytes_to_str", "(", "item", ")", "for", "item", "in", "keylist", "]" ]
Lists all keys in a bucket via a stream. This is a generator method which should be iterated over. .. warning:: Do not use this in production, as it requires traversing through all keys stored in a cluster. The caller should explicitly close the returned iterator, either using :func:`contextlib.closing` or calling ``close()`` explicitly. Consuming the entire iterator will also close the stream. If it does not, the associated connection might not be returned to the pool. Example:: from contextlib import closing # Using contextlib.closing with closing(client.stream_keys(mybucket)) as keys: for key_list in keys: do_something(key_list) # Explicit close() stream = client.stream_keys(mybucket) for key_list in stream: do_something(key_list) stream.close() :param bucket: the bucket whose properties will be set :type bucket: RiakBucket :param timeout: a timeout value in milliseconds :type timeout: int :rtype: iterator
[ "Lists", "all", "keys", "in", "a", "bucket", "via", "a", "stream", ".", "This", "is", "a", "generator", "method", "which", "should", "be", "iterated", "over", "." ]
91de13a16607cdf553d1a194e762734e3bec4231
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/client/operations.py#L484-L530
train
basho/riak-python-client
riak/client/operations.py
RiakClientOperations.ts_stream_keys
def ts_stream_keys(self, table, timeout=None): """ Lists all keys in a time series table via a stream. This is a generator method which should be iterated over. The caller should explicitly close the returned iterator, either using :func:`contextlib.closing` or calling ``close()`` explicitly. Consuming the entire iterator will also close the stream. If it does not, the associated connection might not be returned to the pool. Example:: from contextlib import closing # Using contextlib.closing with closing(client.ts_stream_keys(mytable)) as keys: for key_list in keys: do_something(key_list) # Explicit close() stream = client.ts_stream_keys(mytable) for key_list in stream: do_something(key_list) stream.close() :param table: the table from which to stream keys :type table: string or :class:`Table <riak.table.Table>` :param timeout: a timeout value in milliseconds :type timeout: int :rtype: iterator """ if not riak.disable_list_exceptions: raise ListError() t = table if isinstance(t, six.string_types): t = Table(self, table) _validate_timeout(timeout) resource = self._acquire() transport = resource.object stream = transport.ts_stream_keys(t, timeout) stream.attach(resource) try: for keylist in stream: if len(keylist) > 0: yield keylist finally: stream.close()
python
def ts_stream_keys(self, table, timeout=None): """ Lists all keys in a time series table via a stream. This is a generator method which should be iterated over. The caller should explicitly close the returned iterator, either using :func:`contextlib.closing` or calling ``close()`` explicitly. Consuming the entire iterator will also close the stream. If it does not, the associated connection might not be returned to the pool. Example:: from contextlib import closing # Using contextlib.closing with closing(client.ts_stream_keys(mytable)) as keys: for key_list in keys: do_something(key_list) # Explicit close() stream = client.ts_stream_keys(mytable) for key_list in stream: do_something(key_list) stream.close() :param table: the table from which to stream keys :type table: string or :class:`Table <riak.table.Table>` :param timeout: a timeout value in milliseconds :type timeout: int :rtype: iterator """ if not riak.disable_list_exceptions: raise ListError() t = table if isinstance(t, six.string_types): t = Table(self, table) _validate_timeout(timeout) resource = self._acquire() transport = resource.object stream = transport.ts_stream_keys(t, timeout) stream.attach(resource) try: for keylist in stream: if len(keylist) > 0: yield keylist finally: stream.close()
[ "def", "ts_stream_keys", "(", "self", ",", "table", ",", "timeout", "=", "None", ")", ":", "if", "not", "riak", ".", "disable_list_exceptions", ":", "raise", "ListError", "(", ")", "t", "=", "table", "if", "isinstance", "(", "t", ",", "six", ".", "string_types", ")", ":", "t", "=", "Table", "(", "self", ",", "table", ")", "_validate_timeout", "(", "timeout", ")", "resource", "=", "self", ".", "_acquire", "(", ")", "transport", "=", "resource", ".", "object", "stream", "=", "transport", ".", "ts_stream_keys", "(", "t", ",", "timeout", ")", "stream", ".", "attach", "(", "resource", ")", "try", ":", "for", "keylist", "in", "stream", ":", "if", "len", "(", "keylist", ")", ">", "0", ":", "yield", "keylist", "finally", ":", "stream", ".", "close", "(", ")" ]
Lists all keys in a time series table via a stream. This is a generator method which should be iterated over. The caller should explicitly close the returned iterator, either using :func:`contextlib.closing` or calling ``close()`` explicitly. Consuming the entire iterator will also close the stream. If it does not, the associated connection might not be returned to the pool. Example:: from contextlib import closing # Using contextlib.closing with closing(client.ts_stream_keys(mytable)) as keys: for key_list in keys: do_something(key_list) # Explicit close() stream = client.ts_stream_keys(mytable) for key_list in stream: do_something(key_list) stream.close() :param table: the table from which to stream keys :type table: string or :class:`Table <riak.table.Table>` :param timeout: a timeout value in milliseconds :type timeout: int :rtype: iterator
[ "Lists", "all", "keys", "in", "a", "time", "series", "table", "via", "a", "stream", ".", "This", "is", "a", "generator", "method", "which", "should", "be", "iterated", "over", "." ]
91de13a16607cdf553d1a194e762734e3bec4231
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/client/operations.py#L665-L713
train
basho/riak-python-client
riak/client/operations.py
RiakClientOperations.multiget
def multiget(self, pairs, **params): """Fetches many keys in parallel via threads. :param pairs: list of bucket_type/bucket/key tuple triples :type pairs: list :param params: additional request flags, e.g. r, pr :type params: dict :rtype: list of :class:`RiakObjects <riak.riak_object.RiakObject>`, :class:`Datatypes <riak.datatypes.Datatype>`, or tuples of bucket_type, bucket, key, and the exception raised on fetch """ if self._multiget_pool: params['pool'] = self._multiget_pool return riak.client.multi.multiget(self, pairs, **params)
python
def multiget(self, pairs, **params): """Fetches many keys in parallel via threads. :param pairs: list of bucket_type/bucket/key tuple triples :type pairs: list :param params: additional request flags, e.g. r, pr :type params: dict :rtype: list of :class:`RiakObjects <riak.riak_object.RiakObject>`, :class:`Datatypes <riak.datatypes.Datatype>`, or tuples of bucket_type, bucket, key, and the exception raised on fetch """ if self._multiget_pool: params['pool'] = self._multiget_pool return riak.client.multi.multiget(self, pairs, **params)
[ "def", "multiget", "(", "self", ",", "pairs", ",", "*", "*", "params", ")", ":", "if", "self", ".", "_multiget_pool", ":", "params", "[", "'pool'", "]", "=", "self", ".", "_multiget_pool", "return", "riak", ".", "client", ".", "multi", ".", "multiget", "(", "self", ",", "pairs", ",", "*", "*", "params", ")" ]
Fetches many keys in parallel via threads. :param pairs: list of bucket_type/bucket/key tuple triples :type pairs: list :param params: additional request flags, e.g. r, pr :type params: dict :rtype: list of :class:`RiakObjects <riak.riak_object.RiakObject>`, :class:`Datatypes <riak.datatypes.Datatype>`, or tuples of bucket_type, bucket, key, and the exception raised on fetch
[ "Fetches", "many", "keys", "in", "parallel", "via", "threads", "." ]
91de13a16607cdf553d1a194e762734e3bec4231
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/client/operations.py#L1003-L1016
train
basho/riak-python-client
riak/client/operations.py
RiakClientOperations.multiput
def multiput(self, objs, **params): """ Stores objects in parallel via threads. :param objs: the objects to store :type objs: list of `RiakObject <riak.riak_object.RiakObject>` :param params: additional request flags, e.g. w, dw, pw :type params: dict :rtype: list of boolean or :class:`RiakObjects <riak.riak_object.RiakObject>`, """ if self._multiput_pool: params['pool'] = self._multiput_pool return riak.client.multi.multiput(self, objs, **params)
python
def multiput(self, objs, **params): """ Stores objects in parallel via threads. :param objs: the objects to store :type objs: list of `RiakObject <riak.riak_object.RiakObject>` :param params: additional request flags, e.g. w, dw, pw :type params: dict :rtype: list of boolean or :class:`RiakObjects <riak.riak_object.RiakObject>`, """ if self._multiput_pool: params['pool'] = self._multiput_pool return riak.client.multi.multiput(self, objs, **params)
[ "def", "multiput", "(", "self", ",", "objs", ",", "*", "*", "params", ")", ":", "if", "self", ".", "_multiput_pool", ":", "params", "[", "'pool'", "]", "=", "self", ".", "_multiput_pool", "return", "riak", ".", "client", ".", "multi", ".", "multiput", "(", "self", ",", "objs", ",", "*", "*", "params", ")" ]
Stores objects in parallel via threads. :param objs: the objects to store :type objs: list of `RiakObject <riak.riak_object.RiakObject>` :param params: additional request flags, e.g. w, dw, pw :type params: dict :rtype: list of boolean or :class:`RiakObjects <riak.riak_object.RiakObject>`,
[ "Stores", "objects", "in", "parallel", "via", "threads", "." ]
91de13a16607cdf553d1a194e762734e3bec4231
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/client/operations.py#L1018-L1031
train
basho/riak-python-client
riak/client/operations.py
RiakClientOperations.fetch_datatype
def fetch_datatype(self, bucket, key, r=None, pr=None, basic_quorum=None, notfound_ok=None, timeout=None, include_context=None): """ Fetches the value of a Riak Datatype. .. note:: This request is automatically retried :attr:`retries` times if it fails due to network error. :param bucket: the bucket of the datatype, which must belong to a :class:`~riak.bucket.BucketType` :type bucket: :class:`~riak.bucket.RiakBucket` :param key: the key of the datatype :type key: string :param r: the read quorum :type r: integer, string, None :param pr: the primary read quorum :type pr: integer, string, None :param basic_quorum: whether to use the "basic quorum" policy for not-founds :type basic_quorum: bool, None :param notfound_ok: whether to treat not-found responses as successful :type notfound_ok: bool, None :param timeout: a timeout value in milliseconds :type timeout: int, None :param include_context: whether to return the opaque context as well as the value, which is useful for removal operations on sets and maps :type include_context: bool, None :rtype: :class:`~riak.datatypes.Datatype` """ dtype, value, context = self._fetch_datatype( bucket, key, r=r, pr=pr, basic_quorum=basic_quorum, notfound_ok=notfound_ok, timeout=timeout, include_context=include_context) return TYPES[dtype](bucket=bucket, key=key, value=value, context=context)
python
def fetch_datatype(self, bucket, key, r=None, pr=None, basic_quorum=None, notfound_ok=None, timeout=None, include_context=None): """ Fetches the value of a Riak Datatype. .. note:: This request is automatically retried :attr:`retries` times if it fails due to network error. :param bucket: the bucket of the datatype, which must belong to a :class:`~riak.bucket.BucketType` :type bucket: :class:`~riak.bucket.RiakBucket` :param key: the key of the datatype :type key: string :param r: the read quorum :type r: integer, string, None :param pr: the primary read quorum :type pr: integer, string, None :param basic_quorum: whether to use the "basic quorum" policy for not-founds :type basic_quorum: bool, None :param notfound_ok: whether to treat not-found responses as successful :type notfound_ok: bool, None :param timeout: a timeout value in milliseconds :type timeout: int, None :param include_context: whether to return the opaque context as well as the value, which is useful for removal operations on sets and maps :type include_context: bool, None :rtype: :class:`~riak.datatypes.Datatype` """ dtype, value, context = self._fetch_datatype( bucket, key, r=r, pr=pr, basic_quorum=basic_quorum, notfound_ok=notfound_ok, timeout=timeout, include_context=include_context) return TYPES[dtype](bucket=bucket, key=key, value=value, context=context)
[ "def", "fetch_datatype", "(", "self", ",", "bucket", ",", "key", ",", "r", "=", "None", ",", "pr", "=", "None", ",", "basic_quorum", "=", "None", ",", "notfound_ok", "=", "None", ",", "timeout", "=", "None", ",", "include_context", "=", "None", ")", ":", "dtype", ",", "value", ",", "context", "=", "self", ".", "_fetch_datatype", "(", "bucket", ",", "key", ",", "r", "=", "r", ",", "pr", "=", "pr", ",", "basic_quorum", "=", "basic_quorum", ",", "notfound_ok", "=", "notfound_ok", ",", "timeout", "=", "timeout", ",", "include_context", "=", "include_context", ")", "return", "TYPES", "[", "dtype", "]", "(", "bucket", "=", "bucket", ",", "key", "=", "key", ",", "value", "=", "value", ",", "context", "=", "context", ")" ]
Fetches the value of a Riak Datatype. .. note:: This request is automatically retried :attr:`retries` times if it fails due to network error. :param bucket: the bucket of the datatype, which must belong to a :class:`~riak.bucket.BucketType` :type bucket: :class:`~riak.bucket.RiakBucket` :param key: the key of the datatype :type key: string :param r: the read quorum :type r: integer, string, None :param pr: the primary read quorum :type pr: integer, string, None :param basic_quorum: whether to use the "basic quorum" policy for not-founds :type basic_quorum: bool, None :param notfound_ok: whether to treat not-found responses as successful :type notfound_ok: bool, None :param timeout: a timeout value in milliseconds :type timeout: int, None :param include_context: whether to return the opaque context as well as the value, which is useful for removal operations on sets and maps :type include_context: bool, None :rtype: :class:`~riak.datatypes.Datatype`
[ "Fetches", "the", "value", "of", "a", "Riak", "Datatype", "." ]
91de13a16607cdf553d1a194e762734e3bec4231
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/client/operations.py#L1106-L1143
train
basho/riak-python-client
riak/client/operations.py
RiakClientOperations.update_datatype
def update_datatype(self, datatype, w=None, dw=None, pw=None, return_body=None, timeout=None, include_context=None): """ Sends an update to a Riak Datatype to the server. This operation is not idempotent and so will not be retried automatically. :param datatype: the datatype with pending updates :type datatype: :class:`~riak.datatypes.Datatype` :param w: the write quorum :type w: integer, string, None :param dw: the durable write quorum :type dw: integer, string, None :param pw: the primary write quorum :type pw: integer, string, None :param timeout: a timeout value in milliseconds :type timeout: int :param include_context: whether to return the opaque context as well as the value, which is useful for removal operations on sets and maps :type include_context: bool :rtype: tuple of datatype, opaque value and opaque context """ _validate_timeout(timeout) with self._transport() as transport: return transport.update_datatype(datatype, w=w, dw=dw, pw=pw, return_body=return_body, timeout=timeout, include_context=include_context)
python
def update_datatype(self, datatype, w=None, dw=None, pw=None, return_body=None, timeout=None, include_context=None): """ Sends an update to a Riak Datatype to the server. This operation is not idempotent and so will not be retried automatically. :param datatype: the datatype with pending updates :type datatype: :class:`~riak.datatypes.Datatype` :param w: the write quorum :type w: integer, string, None :param dw: the durable write quorum :type dw: integer, string, None :param pw: the primary write quorum :type pw: integer, string, None :param timeout: a timeout value in milliseconds :type timeout: int :param include_context: whether to return the opaque context as well as the value, which is useful for removal operations on sets and maps :type include_context: bool :rtype: tuple of datatype, opaque value and opaque context """ _validate_timeout(timeout) with self._transport() as transport: return transport.update_datatype(datatype, w=w, dw=dw, pw=pw, return_body=return_body, timeout=timeout, include_context=include_context)
[ "def", "update_datatype", "(", "self", ",", "datatype", ",", "w", "=", "None", ",", "dw", "=", "None", ",", "pw", "=", "None", ",", "return_body", "=", "None", ",", "timeout", "=", "None", ",", "include_context", "=", "None", ")", ":", "_validate_timeout", "(", "timeout", ")", "with", "self", ".", "_transport", "(", ")", "as", "transport", ":", "return", "transport", ".", "update_datatype", "(", "datatype", ",", "w", "=", "w", ",", "dw", "=", "dw", ",", "pw", "=", "pw", ",", "return_body", "=", "return_body", ",", "timeout", "=", "timeout", ",", "include_context", "=", "include_context", ")" ]
Sends an update to a Riak Datatype to the server. This operation is not idempotent and so will not be retried automatically. :param datatype: the datatype with pending updates :type datatype: :class:`~riak.datatypes.Datatype` :param w: the write quorum :type w: integer, string, None :param dw: the durable write quorum :type dw: integer, string, None :param pw: the primary write quorum :type pw: integer, string, None :param timeout: a timeout value in milliseconds :type timeout: int :param include_context: whether to return the opaque context as well as the value, which is useful for removal operations on sets and maps :type include_context: bool :rtype: tuple of datatype, opaque value and opaque context
[ "Sends", "an", "update", "to", "a", "Riak", "Datatype", "to", "the", "server", ".", "This", "operation", "is", "not", "idempotent", "and", "so", "will", "not", "be", "retried", "automatically", "." ]
91de13a16607cdf553d1a194e762734e3bec4231
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/client/operations.py#L1145-L1175
train
basho/riak-python-client
riak/transports/tcp/connection.py
TcpConnection._non_connect_send_recv
def _non_connect_send_recv(self, msg_code, data=None): """ Similar to self._send_recv, but doesn't try to initiate a connection, thus preventing an infinite loop. """ self._non_connect_send_msg(msg_code, data) return self._recv_msg()
python
def _non_connect_send_recv(self, msg_code, data=None): """ Similar to self._send_recv, but doesn't try to initiate a connection, thus preventing an infinite loop. """ self._non_connect_send_msg(msg_code, data) return self._recv_msg()
[ "def", "_non_connect_send_recv", "(", "self", ",", "msg_code", ",", "data", "=", "None", ")", ":", "self", ".", "_non_connect_send_msg", "(", "msg_code", ",", "data", ")", "return", "self", ".", "_recv_msg", "(", ")" ]
Similar to self._send_recv, but doesn't try to initiate a connection, thus preventing an infinite loop.
[ "Similar", "to", "self", ".", "_send_recv", "but", "doesn", "t", "try", "to", "initiate", "a", "connection", "thus", "preventing", "an", "infinite", "loop", "." ]
91de13a16607cdf553d1a194e762734e3bec4231
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/transports/tcp/connection.py#L53-L59
train
basho/riak-python-client
riak/transports/tcp/connection.py
TcpConnection._non_connect_send_msg
def _non_connect_send_msg(self, msg_code, data): """ Similar to self._send, but doesn't try to initiate a connection, thus preventing an infinite loop. """ try: self._socket.sendall(self._encode_msg(msg_code, data)) except (IOError, socket.error) as e: if e.errno == errno.EPIPE: raise ConnectionClosed(e) else: raise
python
def _non_connect_send_msg(self, msg_code, data): """ Similar to self._send, but doesn't try to initiate a connection, thus preventing an infinite loop. """ try: self._socket.sendall(self._encode_msg(msg_code, data)) except (IOError, socket.error) as e: if e.errno == errno.EPIPE: raise ConnectionClosed(e) else: raise
[ "def", "_non_connect_send_msg", "(", "self", ",", "msg_code", ",", "data", ")", ":", "try", ":", "self", ".", "_socket", ".", "sendall", "(", "self", ".", "_encode_msg", "(", "msg_code", ",", "data", ")", ")", "except", "(", "IOError", ",", "socket", ".", "error", ")", "as", "e", ":", "if", "e", ".", "errno", "==", "errno", ".", "EPIPE", ":", "raise", "ConnectionClosed", "(", "e", ")", "else", ":", "raise" ]
Similar to self._send, but doesn't try to initiate a connection, thus preventing an infinite loop.
[ "Similar", "to", "self", ".", "_send", "but", "doesn", "t", "try", "to", "initiate", "a", "connection", "thus", "preventing", "an", "infinite", "loop", "." ]
91de13a16607cdf553d1a194e762734e3bec4231
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/transports/tcp/connection.py#L65-L76
train
basho/riak-python-client
riak/transports/tcp/connection.py
TcpConnection._init_security
def _init_security(self): """ Initialize a secure connection to the server. """ if not self._starttls(): raise SecurityError("Could not start TLS connection") # _ssh_handshake() will throw an exception upon failure self._ssl_handshake() if not self._auth(): raise SecurityError("Could not authorize connection")
python
def _init_security(self): """ Initialize a secure connection to the server. """ if not self._starttls(): raise SecurityError("Could not start TLS connection") # _ssh_handshake() will throw an exception upon failure self._ssl_handshake() if not self._auth(): raise SecurityError("Could not authorize connection")
[ "def", "_init_security", "(", "self", ")", ":", "if", "not", "self", ".", "_starttls", "(", ")", ":", "raise", "SecurityError", "(", "\"Could not start TLS connection\"", ")", "# _ssh_handshake() will throw an exception upon failure", "self", ".", "_ssl_handshake", "(", ")", "if", "not", "self", ".", "_auth", "(", ")", ":", "raise", "SecurityError", "(", "\"Could not authorize connection\"", ")" ]
Initialize a secure connection to the server.
[ "Initialize", "a", "secure", "connection", "to", "the", "server", "." ]
91de13a16607cdf553d1a194e762734e3bec4231
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/transports/tcp/connection.py#L82-L91
train
basho/riak-python-client
riak/transports/tcp/connection.py
TcpConnection._starttls
def _starttls(self): """ Exchange a STARTTLS message with Riak to initiate secure communications return True is Riak responds with a STARTTLS response, False otherwise """ resp_code, _ = self._non_connect_send_recv( riak.pb.messages.MSG_CODE_START_TLS) if resp_code == riak.pb.messages.MSG_CODE_START_TLS: return True else: return False
python
def _starttls(self): """ Exchange a STARTTLS message with Riak to initiate secure communications return True is Riak responds with a STARTTLS response, False otherwise """ resp_code, _ = self._non_connect_send_recv( riak.pb.messages.MSG_CODE_START_TLS) if resp_code == riak.pb.messages.MSG_CODE_START_TLS: return True else: return False
[ "def", "_starttls", "(", "self", ")", ":", "resp_code", ",", "_", "=", "self", ".", "_non_connect_send_recv", "(", "riak", ".", "pb", ".", "messages", ".", "MSG_CODE_START_TLS", ")", "if", "resp_code", "==", "riak", ".", "pb", ".", "messages", ".", "MSG_CODE_START_TLS", ":", "return", "True", "else", ":", "return", "False" ]
Exchange a STARTTLS message with Riak to initiate secure communications return True is Riak responds with a STARTTLS response, False otherwise
[ "Exchange", "a", "STARTTLS", "message", "with", "Riak", "to", "initiate", "secure", "communications", "return", "True", "is", "Riak", "responds", "with", "a", "STARTTLS", "response", "False", "otherwise" ]
91de13a16607cdf553d1a194e762734e3bec4231
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/transports/tcp/connection.py#L93-L103
train
basho/riak-python-client
riak/transports/tcp/connection.py
TcpConnection.close
def close(self): """ Closes the underlying socket of the PB connection. """ if self._socket: if USE_STDLIB_SSL: # NB: Python 2.7.8 and earlier does not have a compatible # shutdown() method due to the SSL lib try: self._socket.shutdown(socket.SHUT_RDWR) except EnvironmentError: # NB: sometimes these exceptions are raised if the initial # connection didn't succeed correctly, or if shutdown() is # called after the connection dies logging.debug('Exception occurred while shutting ' 'down socket.', exc_info=True) self._socket.close() del self._socket
python
def close(self): """ Closes the underlying socket of the PB connection. """ if self._socket: if USE_STDLIB_SSL: # NB: Python 2.7.8 and earlier does not have a compatible # shutdown() method due to the SSL lib try: self._socket.shutdown(socket.SHUT_RDWR) except EnvironmentError: # NB: sometimes these exceptions are raised if the initial # connection didn't succeed correctly, or if shutdown() is # called after the connection dies logging.debug('Exception occurred while shutting ' 'down socket.', exc_info=True) self._socket.close() del self._socket
[ "def", "close", "(", "self", ")", ":", "if", "self", ".", "_socket", ":", "if", "USE_STDLIB_SSL", ":", "# NB: Python 2.7.8 and earlier does not have a compatible", "# shutdown() method due to the SSL lib", "try", ":", "self", ".", "_socket", ".", "shutdown", "(", "socket", ".", "SHUT_RDWR", ")", "except", "EnvironmentError", ":", "# NB: sometimes these exceptions are raised if the initial", "# connection didn't succeed correctly, or if shutdown() is", "# called after the connection dies", "logging", ".", "debug", "(", "'Exception occurred while shutting '", "'down socket.'", ",", "exc_info", "=", "True", ")", "self", ".", "_socket", ".", "close", "(", ")", "del", "self", ".", "_socket" ]
Closes the underlying socket of the PB connection.
[ "Closes", "the", "underlying", "socket", "of", "the", "PB", "connection", "." ]
91de13a16607cdf553d1a194e762734e3bec4231
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/transports/tcp/connection.py#L266-L283
train
basho/riak-python-client
riak/riak_object.py
content_property
def content_property(name, doc=None): """ Delegates a property to the first sibling in a RiakObject, raising an error when the object is in conflict. """ def _setter(self, value): if len(self.siblings) == 0: # In this case, assume that what the user wants is to # create a new sibling inside an empty object. self.siblings = [RiakContent(self)] if len(self.siblings) != 1: raise ConflictError() setattr(self.siblings[0], name, value) def _getter(self): if len(self.siblings) == 0: return if len(self.siblings) != 1: raise ConflictError() return getattr(self.siblings[0], name) return property(_getter, _setter, doc=doc)
python
def content_property(name, doc=None): """ Delegates a property to the first sibling in a RiakObject, raising an error when the object is in conflict. """ def _setter(self, value): if len(self.siblings) == 0: # In this case, assume that what the user wants is to # create a new sibling inside an empty object. self.siblings = [RiakContent(self)] if len(self.siblings) != 1: raise ConflictError() setattr(self.siblings[0], name, value) def _getter(self): if len(self.siblings) == 0: return if len(self.siblings) != 1: raise ConflictError() return getattr(self.siblings[0], name) return property(_getter, _setter, doc=doc)
[ "def", "content_property", "(", "name", ",", "doc", "=", "None", ")", ":", "def", "_setter", "(", "self", ",", "value", ")", ":", "if", "len", "(", "self", ".", "siblings", ")", "==", "0", ":", "# In this case, assume that what the user wants is to", "# create a new sibling inside an empty object.", "self", ".", "siblings", "=", "[", "RiakContent", "(", "self", ")", "]", "if", "len", "(", "self", ".", "siblings", ")", "!=", "1", ":", "raise", "ConflictError", "(", ")", "setattr", "(", "self", ".", "siblings", "[", "0", "]", ",", "name", ",", "value", ")", "def", "_getter", "(", "self", ")", ":", "if", "len", "(", "self", ".", "siblings", ")", "==", "0", ":", "return", "if", "len", "(", "self", ".", "siblings", ")", "!=", "1", ":", "raise", "ConflictError", "(", ")", "return", "getattr", "(", "self", ".", "siblings", "[", "0", "]", ",", "name", ")", "return", "property", "(", "_getter", ",", "_setter", ",", "doc", "=", "doc", ")" ]
Delegates a property to the first sibling in a RiakObject, raising an error when the object is in conflict.
[ "Delegates", "a", "property", "to", "the", "first", "sibling", "in", "a", "RiakObject", "raising", "an", "error", "when", "the", "object", "is", "in", "conflict", "." ]
91de13a16607cdf553d1a194e762734e3bec4231
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/riak_object.py#L22-L43
train
basho/riak-python-client
riak/riak_object.py
content_method
def content_method(name): """ Delegates a method to the first sibling in a RiakObject, raising an error when the object is in conflict. """ def _delegate(self, *args, **kwargs): if len(self.siblings) != 1: raise ConflictError() return getattr(self.siblings[0], name).__call__(*args, **kwargs) _delegate.__doc__ = getattr(RiakContent, name).__doc__ return _delegate
python
def content_method(name): """ Delegates a method to the first sibling in a RiakObject, raising an error when the object is in conflict. """ def _delegate(self, *args, **kwargs): if len(self.siblings) != 1: raise ConflictError() return getattr(self.siblings[0], name).__call__(*args, **kwargs) _delegate.__doc__ = getattr(RiakContent, name).__doc__ return _delegate
[ "def", "content_method", "(", "name", ")", ":", "def", "_delegate", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "if", "len", "(", "self", ".", "siblings", ")", "!=", "1", ":", "raise", "ConflictError", "(", ")", "return", "getattr", "(", "self", ".", "siblings", "[", "0", "]", ",", "name", ")", ".", "__call__", "(", "*", "args", ",", "*", "*", "kwargs", ")", "_delegate", ".", "__doc__", "=", "getattr", "(", "RiakContent", ",", "name", ")", ".", "__doc__", "return", "_delegate" ]
Delegates a method to the first sibling in a RiakObject, raising an error when the object is in conflict.
[ "Delegates", "a", "method", "to", "the", "first", "sibling", "in", "a", "RiakObject", "raising", "an", "error", "when", "the", "object", "is", "in", "conflict", "." ]
91de13a16607cdf553d1a194e762734e3bec4231
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/riak_object.py#L46-L58
train
basho/riak-python-client
riak/riak_object.py
RiakObject.store
def store(self, w=None, dw=None, pw=None, return_body=True, if_none_match=False, timeout=None): """ Store the object in Riak. When this operation completes, the object could contain new metadata and possibly new data if Riak contains a newer version of the object according to the object's vector clock. :param w: W-value, wait for this many partitions to respond before returning to client. :type w: integer :param dw: DW-value, wait for this many partitions to confirm the write before returning to client. :type dw: integer :param pw: PW-value, require this many primary partitions to be available before performing the put :type pw: integer :param return_body: if the newly stored object should be retrieved :type return_body: bool :param if_none_match: Should the object be stored only if there is no key previously defined :type if_none_match: bool :param timeout: a timeout value in milliseconds :type timeout: int :rtype: :class:`RiakObject` """ if len(self.siblings) != 1: raise ConflictError("Attempting to store an invalid object, " "resolve the siblings first") self.client.put(self, w=w, dw=dw, pw=pw, return_body=return_body, if_none_match=if_none_match, timeout=timeout) return self
python
def store(self, w=None, dw=None, pw=None, return_body=True, if_none_match=False, timeout=None): """ Store the object in Riak. When this operation completes, the object could contain new metadata and possibly new data if Riak contains a newer version of the object according to the object's vector clock. :param w: W-value, wait for this many partitions to respond before returning to client. :type w: integer :param dw: DW-value, wait for this many partitions to confirm the write before returning to client. :type dw: integer :param pw: PW-value, require this many primary partitions to be available before performing the put :type pw: integer :param return_body: if the newly stored object should be retrieved :type return_body: bool :param if_none_match: Should the object be stored only if there is no key previously defined :type if_none_match: bool :param timeout: a timeout value in milliseconds :type timeout: int :rtype: :class:`RiakObject` """ if len(self.siblings) != 1: raise ConflictError("Attempting to store an invalid object, " "resolve the siblings first") self.client.put(self, w=w, dw=dw, pw=pw, return_body=return_body, if_none_match=if_none_match, timeout=timeout) return self
[ "def", "store", "(", "self", ",", "w", "=", "None", ",", "dw", "=", "None", ",", "pw", "=", "None", ",", "return_body", "=", "True", ",", "if_none_match", "=", "False", ",", "timeout", "=", "None", ")", ":", "if", "len", "(", "self", ".", "siblings", ")", "!=", "1", ":", "raise", "ConflictError", "(", "\"Attempting to store an invalid object, \"", "\"resolve the siblings first\"", ")", "self", ".", "client", ".", "put", "(", "self", ",", "w", "=", "w", ",", "dw", "=", "dw", ",", "pw", "=", "pw", ",", "return_body", "=", "return_body", ",", "if_none_match", "=", "if_none_match", ",", "timeout", "=", "timeout", ")", "return", "self" ]
Store the object in Riak. When this operation completes, the object could contain new metadata and possibly new data if Riak contains a newer version of the object according to the object's vector clock. :param w: W-value, wait for this many partitions to respond before returning to client. :type w: integer :param dw: DW-value, wait for this many partitions to confirm the write before returning to client. :type dw: integer :param pw: PW-value, require this many primary partitions to be available before performing the put :type pw: integer :param return_body: if the newly stored object should be retrieved :type return_body: bool :param if_none_match: Should the object be stored only if there is no key previously defined :type if_none_match: bool :param timeout: a timeout value in milliseconds :type timeout: int :rtype: :class:`RiakObject`
[ "Store", "the", "object", "in", "Riak", ".", "When", "this", "operation", "completes", "the", "object", "could", "contain", "new", "metadata", "and", "possibly", "new", "data", "if", "Riak", "contains", "a", "newer", "version", "of", "the", "object", "according", "to", "the", "object", "s", "vector", "clock", "." ]
91de13a16607cdf553d1a194e762734e3bec4231
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/riak_object.py#L247-L283
train
basho/riak-python-client
riak/riak_object.py
RiakObject.reload
def reload(self, r=None, pr=None, timeout=None, basic_quorum=None, notfound_ok=None, head_only=False): """ Reload the object from Riak. When this operation completes, the object could contain new metadata and a new value, if the object was updated in Riak since it was last retrieved. .. note:: Even if the key is not found in Riak, this will return a :class:`RiakObject`. Check the :attr:`exists` property to see if the key was found. :param r: R-Value, wait for this many partitions to respond before returning to client. :type r: integer :param pr: PR-value, require this many primary partitions to be available before performing the read that precedes the put :type pr: integer :param timeout: a timeout value in milliseconds :type timeout: int :param basic_quorum: whether to use the "basic quorum" policy for not-founds :type basic_quorum: bool :param notfound_ok: whether to treat not-found responses as successful :type notfound_ok: bool :param head_only: whether to fetch without value, so only metadata (only available on PB transport) :type head_only: bool :rtype: :class:`RiakObject` """ self.client.get(self, r=r, pr=pr, timeout=timeout, head_only=head_only) return self
python
def reload(self, r=None, pr=None, timeout=None, basic_quorum=None, notfound_ok=None, head_only=False): """ Reload the object from Riak. When this operation completes, the object could contain new metadata and a new value, if the object was updated in Riak since it was last retrieved. .. note:: Even if the key is not found in Riak, this will return a :class:`RiakObject`. Check the :attr:`exists` property to see if the key was found. :param r: R-Value, wait for this many partitions to respond before returning to client. :type r: integer :param pr: PR-value, require this many primary partitions to be available before performing the read that precedes the put :type pr: integer :param timeout: a timeout value in milliseconds :type timeout: int :param basic_quorum: whether to use the "basic quorum" policy for not-founds :type basic_quorum: bool :param notfound_ok: whether to treat not-found responses as successful :type notfound_ok: bool :param head_only: whether to fetch without value, so only metadata (only available on PB transport) :type head_only: bool :rtype: :class:`RiakObject` """ self.client.get(self, r=r, pr=pr, timeout=timeout, head_only=head_only) return self
[ "def", "reload", "(", "self", ",", "r", "=", "None", ",", "pr", "=", "None", ",", "timeout", "=", "None", ",", "basic_quorum", "=", "None", ",", "notfound_ok", "=", "None", ",", "head_only", "=", "False", ")", ":", "self", ".", "client", ".", "get", "(", "self", ",", "r", "=", "r", ",", "pr", "=", "pr", ",", "timeout", "=", "timeout", ",", "head_only", "=", "head_only", ")", "return", "self" ]
Reload the object from Riak. When this operation completes, the object could contain new metadata and a new value, if the object was updated in Riak since it was last retrieved. .. note:: Even if the key is not found in Riak, this will return a :class:`RiakObject`. Check the :attr:`exists` property to see if the key was found. :param r: R-Value, wait for this many partitions to respond before returning to client. :type r: integer :param pr: PR-value, require this many primary partitions to be available before performing the read that precedes the put :type pr: integer :param timeout: a timeout value in milliseconds :type timeout: int :param basic_quorum: whether to use the "basic quorum" policy for not-founds :type basic_quorum: bool :param notfound_ok: whether to treat not-found responses as successful :type notfound_ok: bool :param head_only: whether to fetch without value, so only metadata (only available on PB transport) :type head_only: bool :rtype: :class:`RiakObject`
[ "Reload", "the", "object", "from", "Riak", ".", "When", "this", "operation", "completes", "the", "object", "could", "contain", "new", "metadata", "and", "a", "new", "value", "if", "the", "object", "was", "updated", "in", "Riak", "since", "it", "was", "last", "retrieved", "." ]
91de13a16607cdf553d1a194e762734e3bec4231
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/riak_object.py#L285-L317
train
basho/riak-python-client
riak/riak_object.py
RiakObject.delete
def delete(self, r=None, w=None, dw=None, pr=None, pw=None, timeout=None): """ Delete this object from Riak. :param r: R-value, wait for this many partitions to read object before performing the put :type r: integer :param w: W-value, wait for this many partitions to respond before returning to client. :type w: integer :param dw: DW-value, wait for this many partitions to confirm the write before returning to client. :type dw: integer :param pr: PR-value, require this many primary partitions to be available before performing the read that precedes the put :type pr: integer :param pw: PW-value, require this many primary partitions to be available before performing the put :type pw: integer :param timeout: a timeout value in milliseconds :type timeout: int :rtype: :class:`RiakObject` """ self.client.delete(self, r=r, w=w, dw=dw, pr=pr, pw=pw, timeout=timeout) self.clear() return self
python
def delete(self, r=None, w=None, dw=None, pr=None, pw=None, timeout=None): """ Delete this object from Riak. :param r: R-value, wait for this many partitions to read object before performing the put :type r: integer :param w: W-value, wait for this many partitions to respond before returning to client. :type w: integer :param dw: DW-value, wait for this many partitions to confirm the write before returning to client. :type dw: integer :param pr: PR-value, require this many primary partitions to be available before performing the read that precedes the put :type pr: integer :param pw: PW-value, require this many primary partitions to be available before performing the put :type pw: integer :param timeout: a timeout value in milliseconds :type timeout: int :rtype: :class:`RiakObject` """ self.client.delete(self, r=r, w=w, dw=dw, pr=pr, pw=pw, timeout=timeout) self.clear() return self
[ "def", "delete", "(", "self", ",", "r", "=", "None", ",", "w", "=", "None", ",", "dw", "=", "None", ",", "pr", "=", "None", ",", "pw", "=", "None", ",", "timeout", "=", "None", ")", ":", "self", ".", "client", ".", "delete", "(", "self", ",", "r", "=", "r", ",", "w", "=", "w", ",", "dw", "=", "dw", ",", "pr", "=", "pr", ",", "pw", "=", "pw", ",", "timeout", "=", "timeout", ")", "self", ".", "clear", "(", ")", "return", "self" ]
Delete this object from Riak. :param r: R-value, wait for this many partitions to read object before performing the put :type r: integer :param w: W-value, wait for this many partitions to respond before returning to client. :type w: integer :param dw: DW-value, wait for this many partitions to confirm the write before returning to client. :type dw: integer :param pr: PR-value, require this many primary partitions to be available before performing the read that precedes the put :type pr: integer :param pw: PW-value, require this many primary partitions to be available before performing the put :type pw: integer :param timeout: a timeout value in milliseconds :type timeout: int :rtype: :class:`RiakObject`
[ "Delete", "this", "object", "from", "Riak", "." ]
91de13a16607cdf553d1a194e762734e3bec4231
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/riak_object.py#L319-L348
train
basho/riak-python-client
riak/bucket.py
RiakBucket.get_encoder
def get_encoder(self, content_type): """ Get the encoding function for the provided content type for this bucket. :param content_type: the requested media type :type content_type: str :param content_type: Content type requested """ if content_type in self._encoders: return self._encoders[content_type] else: return self._client.get_encoder(content_type)
python
def get_encoder(self, content_type): """ Get the encoding function for the provided content type for this bucket. :param content_type: the requested media type :type content_type: str :param content_type: Content type requested """ if content_type in self._encoders: return self._encoders[content_type] else: return self._client.get_encoder(content_type)
[ "def", "get_encoder", "(", "self", ",", "content_type", ")", ":", "if", "content_type", "in", "self", ".", "_encoders", ":", "return", "self", ".", "_encoders", "[", "content_type", "]", "else", ":", "return", "self", ".", "_client", ".", "get_encoder", "(", "content_type", ")" ]
Get the encoding function for the provided content type for this bucket. :param content_type: the requested media type :type content_type: str :param content_type: Content type requested
[ "Get", "the", "encoding", "function", "for", "the", "provided", "content", "type", "for", "this", "bucket", "." ]
91de13a16607cdf553d1a194e762734e3bec4231
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/bucket.py#L88-L100
train
basho/riak-python-client
riak/bucket.py
RiakBucket.get_decoder
def get_decoder(self, content_type): """ Get the decoding function for the provided content type for this bucket. :param content_type: the requested media type :type content_type: str :rtype: function """ if content_type in self._decoders: return self._decoders[content_type] else: return self._client.get_decoder(content_type)
python
def get_decoder(self, content_type): """ Get the decoding function for the provided content type for this bucket. :param content_type: the requested media type :type content_type: str :rtype: function """ if content_type in self._decoders: return self._decoders[content_type] else: return self._client.get_decoder(content_type)
[ "def", "get_decoder", "(", "self", ",", "content_type", ")", ":", "if", "content_type", "in", "self", ".", "_decoders", ":", "return", "self", ".", "_decoders", "[", "content_type", "]", "else", ":", "return", "self", ".", "_client", ".", "get_decoder", "(", "content_type", ")" ]
Get the decoding function for the provided content type for this bucket. :param content_type: the requested media type :type content_type: str :rtype: function
[ "Get", "the", "decoding", "function", "for", "the", "provided", "content", "type", "for", "this", "bucket", "." ]
91de13a16607cdf553d1a194e762734e3bec4231
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/bucket.py#L116-L128
train
basho/riak-python-client
riak/bucket.py
RiakBucket.multiget
def multiget(self, keys, r=None, pr=None, timeout=None, basic_quorum=None, notfound_ok=None, head_only=False): """ Retrieves a list of keys belonging to this bucket in parallel. :param keys: the keys to fetch :type keys: list :param r: R-Value for the requests (defaults to bucket's R) :type r: integer :param pr: PR-Value for the requests (defaults to bucket's PR) :type pr: integer :param timeout: a timeout value in milliseconds :type timeout: int :param basic_quorum: whether to use the "basic quorum" policy for not-founds :type basic_quorum: bool :param notfound_ok: whether to treat not-found responses as successful :type notfound_ok: bool :param head_only: whether to fetch without value, so only metadata (only available on PB transport) :type head_only: bool :rtype: list of :class:`RiakObjects <riak.riak_object.RiakObject>`, :class:`Datatypes <riak.datatypes.Datatype>`, or tuples of bucket_type, bucket, key, and the exception raised on fetch """ bkeys = [(self.bucket_type.name, self.name, key) for key in keys] return self._client.multiget(bkeys, r=r, pr=pr, timeout=timeout, basic_quorum=basic_quorum, notfound_ok=notfound_ok, head_only=head_only)
python
def multiget(self, keys, r=None, pr=None, timeout=None, basic_quorum=None, notfound_ok=None, head_only=False): """ Retrieves a list of keys belonging to this bucket in parallel. :param keys: the keys to fetch :type keys: list :param r: R-Value for the requests (defaults to bucket's R) :type r: integer :param pr: PR-Value for the requests (defaults to bucket's PR) :type pr: integer :param timeout: a timeout value in milliseconds :type timeout: int :param basic_quorum: whether to use the "basic quorum" policy for not-founds :type basic_quorum: bool :param notfound_ok: whether to treat not-found responses as successful :type notfound_ok: bool :param head_only: whether to fetch without value, so only metadata (only available on PB transport) :type head_only: bool :rtype: list of :class:`RiakObjects <riak.riak_object.RiakObject>`, :class:`Datatypes <riak.datatypes.Datatype>`, or tuples of bucket_type, bucket, key, and the exception raised on fetch """ bkeys = [(self.bucket_type.name, self.name, key) for key in keys] return self._client.multiget(bkeys, r=r, pr=pr, timeout=timeout, basic_quorum=basic_quorum, notfound_ok=notfound_ok, head_only=head_only)
[ "def", "multiget", "(", "self", ",", "keys", ",", "r", "=", "None", ",", "pr", "=", "None", ",", "timeout", "=", "None", ",", "basic_quorum", "=", "None", ",", "notfound_ok", "=", "None", ",", "head_only", "=", "False", ")", ":", "bkeys", "=", "[", "(", "self", ".", "bucket_type", ".", "name", ",", "self", ".", "name", ",", "key", ")", "for", "key", "in", "keys", "]", "return", "self", ".", "_client", ".", "multiget", "(", "bkeys", ",", "r", "=", "r", ",", "pr", "=", "pr", ",", "timeout", "=", "timeout", ",", "basic_quorum", "=", "basic_quorum", ",", "notfound_ok", "=", "notfound_ok", ",", "head_only", "=", "head_only", ")" ]
Retrieves a list of keys belonging to this bucket in parallel. :param keys: the keys to fetch :type keys: list :param r: R-Value for the requests (defaults to bucket's R) :type r: integer :param pr: PR-Value for the requests (defaults to bucket's PR) :type pr: integer :param timeout: a timeout value in milliseconds :type timeout: int :param basic_quorum: whether to use the "basic quorum" policy for not-founds :type basic_quorum: bool :param notfound_ok: whether to treat not-found responses as successful :type notfound_ok: bool :param head_only: whether to fetch without value, so only metadata (only available on PB transport) :type head_only: bool :rtype: list of :class:`RiakObjects <riak.riak_object.RiakObject>`, :class:`Datatypes <riak.datatypes.Datatype>`, or tuples of bucket_type, bucket, key, and the exception raised on fetch
[ "Retrieves", "a", "list", "of", "keys", "belonging", "to", "this", "bucket", "in", "parallel", "." ]
91de13a16607cdf553d1a194e762734e3bec4231
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/bucket.py#L238-L268
train
basho/riak-python-client
riak/bucket.py
BucketType.stream_buckets
def stream_buckets(self, timeout=None): """ Streams the list of buckets under this bucket-type. This is a generator method that should be iterated over. The caller must close the stream when finished. See :meth:`RiakClient.stream_buckets() <riak.client.RiakClient.stream_buckets>` for more details. .. warning:: Do not use this in production, as it requires traversing through all keys stored in a cluster. :param timeout: a timeout value in milliseconds :type timeout: int :rtype: iterator that yields lists of :class:`RiakBucket <riak.bucket.RiakBucket>` instances """ return self._client.stream_buckets(bucket_type=self, timeout=timeout)
python
def stream_buckets(self, timeout=None): """ Streams the list of buckets under this bucket-type. This is a generator method that should be iterated over. The caller must close the stream when finished. See :meth:`RiakClient.stream_buckets() <riak.client.RiakClient.stream_buckets>` for more details. .. warning:: Do not use this in production, as it requires traversing through all keys stored in a cluster. :param timeout: a timeout value in milliseconds :type timeout: int :rtype: iterator that yields lists of :class:`RiakBucket <riak.bucket.RiakBucket>` instances """ return self._client.stream_buckets(bucket_type=self, timeout=timeout)
[ "def", "stream_buckets", "(", "self", ",", "timeout", "=", "None", ")", ":", "return", "self", ".", "_client", ".", "stream_buckets", "(", "bucket_type", "=", "self", ",", "timeout", "=", "timeout", ")" ]
Streams the list of buckets under this bucket-type. This is a generator method that should be iterated over. The caller must close the stream when finished. See :meth:`RiakClient.stream_buckets() <riak.client.RiakClient.stream_buckets>` for more details. .. warning:: Do not use this in production, as it requires traversing through all keys stored in a cluster. :param timeout: a timeout value in milliseconds :type timeout: int :rtype: iterator that yields lists of :class:`RiakBucket <riak.bucket.RiakBucket>` instances
[ "Streams", "the", "list", "of", "buckets", "under", "this", "bucket", "-", "type", ".", "This", "is", "a", "generator", "method", "that", "should", "be", "iterated", "over", "." ]
91de13a16607cdf553d1a194e762734e3bec4231
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/bucket.py#L712-L729
train
basho/riak-python-client
riak/node.py
Decaying.incr
def incr(self, d): """ Increases the value by the argument. :param d: the value to increase by :type d: float """ with self.lock: self.p = self.value() + d
python
def incr(self, d): """ Increases the value by the argument. :param d: the value to increase by :type d: float """ with self.lock: self.p = self.value() + d
[ "def", "incr", "(", "self", ",", "d", ")", ":", "with", "self", ".", "lock", ":", "self", ".", "p", "=", "self", ".", "value", "(", ")", "+", "d" ]
Increases the value by the argument. :param d: the value to increase by :type d: float
[ "Increases", "the", "value", "by", "the", "argument", "." ]
91de13a16607cdf553d1a194e762734e3bec4231
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/node.py#L46-L54
train
basho/riak-python-client
riak/transports/transport.py
Transport.make_random_client_id
def make_random_client_id(self): """ Returns a random client identifier """ if PY2: return ('py_%s' % base64.b64encode(str(random.randint(1, 0x40000000)))) else: return ('py_%s' % base64.b64encode(bytes(str(random.randint(1, 0x40000000)), 'ascii')))
python
def make_random_client_id(self): """ Returns a random client identifier """ if PY2: return ('py_%s' % base64.b64encode(str(random.randint(1, 0x40000000)))) else: return ('py_%s' % base64.b64encode(bytes(str(random.randint(1, 0x40000000)), 'ascii')))
[ "def", "make_random_client_id", "(", "self", ")", ":", "if", "PY2", ":", "return", "(", "'py_%s'", "%", "base64", ".", "b64encode", "(", "str", "(", "random", ".", "randint", "(", "1", ",", "0x40000000", ")", ")", ")", ")", "else", ":", "return", "(", "'py_%s'", "%", "base64", ".", "b64encode", "(", "bytes", "(", "str", "(", "random", ".", "randint", "(", "1", ",", "0x40000000", ")", ")", ",", "'ascii'", ")", ")", ")" ]
Returns a random client identifier
[ "Returns", "a", "random", "client", "identifier" ]
91de13a16607cdf553d1a194e762734e3bec4231
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/transports/transport.py#L42-L52
train
basho/riak-python-client
riak/transports/transport.py
Transport.get
def get(self, robj, r=None, pr=None, timeout=None, basic_quorum=None, notfound_ok=None, head_only=False): """ Fetches an object. """ raise NotImplementedError
python
def get(self, robj, r=None, pr=None, timeout=None, basic_quorum=None, notfound_ok=None, head_only=False): """ Fetches an object. """ raise NotImplementedError
[ "def", "get", "(", "self", ",", "robj", ",", "r", "=", "None", ",", "pr", "=", "None", ",", "timeout", "=", "None", ",", "basic_quorum", "=", "None", ",", "notfound_ok", "=", "None", ",", "head_only", "=", "False", ")", ":", "raise", "NotImplementedError" ]
Fetches an object.
[ "Fetches", "an", "object", "." ]
91de13a16607cdf553d1a194e762734e3bec4231
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/transports/transport.py#L70-L75
train
basho/riak-python-client
riak/transports/transport.py
Transport.put
def put(self, robj, w=None, dw=None, pw=None, return_body=None, if_none_match=None, timeout=None): """ Stores an object. """ raise NotImplementedError
python
def put(self, robj, w=None, dw=None, pw=None, return_body=None, if_none_match=None, timeout=None): """ Stores an object. """ raise NotImplementedError
[ "def", "put", "(", "self", ",", "robj", ",", "w", "=", "None", ",", "dw", "=", "None", ",", "pw", "=", "None", ",", "return_body", "=", "None", ",", "if_none_match", "=", "None", ",", "timeout", "=", "None", ")", ":", "raise", "NotImplementedError" ]
Stores an object.
[ "Stores", "an", "object", "." ]
91de13a16607cdf553d1a194e762734e3bec4231
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/transports/transport.py#L77-L82
train
basho/riak-python-client
riak/transports/transport.py
Transport.delete
def delete(self, robj, rw=None, r=None, w=None, dw=None, pr=None, pw=None, timeout=None): """ Deletes an object. """ raise NotImplementedError
python
def delete(self, robj, rw=None, r=None, w=None, dw=None, pr=None, pw=None, timeout=None): """ Deletes an object. """ raise NotImplementedError
[ "def", "delete", "(", "self", ",", "robj", ",", "rw", "=", "None", ",", "r", "=", "None", ",", "w", "=", "None", ",", "dw", "=", "None", ",", "pr", "=", "None", ",", "pw", "=", "None", ",", "timeout", "=", "None", ")", ":", "raise", "NotImplementedError" ]
Deletes an object.
[ "Deletes", "an", "object", "." ]
91de13a16607cdf553d1a194e762734e3bec4231
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/transports/transport.py#L84-L89
train
basho/riak-python-client
riak/transports/transport.py
Transport.update_counter
def update_counter(self, bucket, key, value, w=None, dw=None, pw=None, returnvalue=False): """ Updates a counter by the given value. """ raise NotImplementedError
python
def update_counter(self, bucket, key, value, w=None, dw=None, pw=None, returnvalue=False): """ Updates a counter by the given value. """ raise NotImplementedError
[ "def", "update_counter", "(", "self", ",", "bucket", ",", "key", ",", "value", ",", "w", "=", "None", ",", "dw", "=", "None", ",", "pw", "=", "None", ",", "returnvalue", "=", "False", ")", ":", "raise", "NotImplementedError" ]
Updates a counter by the given value.
[ "Updates", "a", "counter", "by", "the", "given", "value", "." ]
91de13a16607cdf553d1a194e762734e3bec4231
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/transports/transport.py#L285-L290
train
basho/riak-python-client
riak/transports/transport.py
Transport.fetch_datatype
def fetch_datatype(self, bucket, key, r=None, pr=None, basic_quorum=None, notfound_ok=None, timeout=None, include_context=None): """ Fetches a Riak Datatype. """ raise NotImplementedError
python
def fetch_datatype(self, bucket, key, r=None, pr=None, basic_quorum=None, notfound_ok=None, timeout=None, include_context=None): """ Fetches a Riak Datatype. """ raise NotImplementedError
[ "def", "fetch_datatype", "(", "self", ",", "bucket", ",", "key", ",", "r", "=", "None", ",", "pr", "=", "None", ",", "basic_quorum", "=", "None", ",", "notfound_ok", "=", "None", ",", "timeout", "=", "None", ",", "include_context", "=", "None", ")", ":", "raise", "NotImplementedError" ]
Fetches a Riak Datatype.
[ "Fetches", "a", "Riak", "Datatype", "." ]
91de13a16607cdf553d1a194e762734e3bec4231
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/transports/transport.py#L292-L297
train
basho/riak-python-client
riak/transports/transport.py
Transport.update_datatype
def update_datatype(self, datatype, w=None, dw=None, pw=None, return_body=None, timeout=None, include_context=None): """ Updates a Riak Datatype by sending local operations to the server. """ raise NotImplementedError
python
def update_datatype(self, datatype, w=None, dw=None, pw=None, return_body=None, timeout=None, include_context=None): """ Updates a Riak Datatype by sending local operations to the server. """ raise NotImplementedError
[ "def", "update_datatype", "(", "self", ",", "datatype", ",", "w", "=", "None", ",", "dw", "=", "None", ",", "pw", "=", "None", ",", "return_body", "=", "None", ",", "timeout", "=", "None", ",", "include_context", "=", "None", ")", ":", "raise", "NotImplementedError" ]
Updates a Riak Datatype by sending local operations to the server.
[ "Updates", "a", "Riak", "Datatype", "by", "sending", "local", "operations", "to", "the", "server", "." ]
91de13a16607cdf553d1a194e762734e3bec4231
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/transports/transport.py#L299-L304
train
basho/riak-python-client
riak/transports/transport.py
Transport._search_mapred_emu
def _search_mapred_emu(self, index, query): """ Emulates a search request via MapReduce. Used in the case where the transport supports MapReduce but has no native search capability. """ phases = [] if not self.phaseless_mapred(): phases.append({'language': 'erlang', 'module': 'riak_kv_mapreduce', 'function': 'reduce_identity', 'keep': True}) mr_result = self.mapred({'module': 'riak_search', 'function': 'mapred_search', 'arg': [index, query]}, phases) result = {'num_found': len(mr_result), 'max_score': 0.0, 'docs': []} for bucket, key, data in mr_result: if u'score' in data and data[u'score'][0] > result['max_score']: result['max_score'] = data[u'score'][0] result['docs'].append({u'id': key}) return result
python
def _search_mapred_emu(self, index, query): """ Emulates a search request via MapReduce. Used in the case where the transport supports MapReduce but has no native search capability. """ phases = [] if not self.phaseless_mapred(): phases.append({'language': 'erlang', 'module': 'riak_kv_mapreduce', 'function': 'reduce_identity', 'keep': True}) mr_result = self.mapred({'module': 'riak_search', 'function': 'mapred_search', 'arg': [index, query]}, phases) result = {'num_found': len(mr_result), 'max_score': 0.0, 'docs': []} for bucket, key, data in mr_result: if u'score' in data and data[u'score'][0] > result['max_score']: result['max_score'] = data[u'score'][0] result['docs'].append({u'id': key}) return result
[ "def", "_search_mapred_emu", "(", "self", ",", "index", ",", "query", ")", ":", "phases", "=", "[", "]", "if", "not", "self", ".", "phaseless_mapred", "(", ")", ":", "phases", ".", "append", "(", "{", "'language'", ":", "'erlang'", ",", "'module'", ":", "'riak_kv_mapreduce'", ",", "'function'", ":", "'reduce_identity'", ",", "'keep'", ":", "True", "}", ")", "mr_result", "=", "self", ".", "mapred", "(", "{", "'module'", ":", "'riak_search'", ",", "'function'", ":", "'mapred_search'", ",", "'arg'", ":", "[", "index", ",", "query", "]", "}", ",", "phases", ")", "result", "=", "{", "'num_found'", ":", "len", "(", "mr_result", ")", ",", "'max_score'", ":", "0.0", ",", "'docs'", ":", "[", "]", "}", "for", "bucket", ",", "key", ",", "data", "in", "mr_result", ":", "if", "u'score'", "in", "data", "and", "data", "[", "u'score'", "]", "[", "0", "]", ">", "result", "[", "'max_score'", "]", ":", "result", "[", "'max_score'", "]", "=", "data", "[", "u'score'", "]", "[", "0", "]", "result", "[", "'docs'", "]", ".", "append", "(", "{", "u'id'", ":", "key", "}", ")", "return", "result" ]
Emulates a search request via MapReduce. Used in the case where the transport supports MapReduce but has no native search capability.
[ "Emulates", "a", "search", "request", "via", "MapReduce", ".", "Used", "in", "the", "case", "where", "the", "transport", "supports", "MapReduce", "but", "has", "no", "native", "search", "capability", "." ]
91de13a16607cdf553d1a194e762734e3bec4231
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/transports/transport.py#L313-L336
train
basho/riak-python-client
riak/transports/transport.py
Transport._get_index_mapred_emu
def _get_index_mapred_emu(self, bucket, index, startkey, endkey=None): """ Emulates a secondary index request via MapReduce. Used in the case where the transport supports MapReduce but has no native secondary index query capability. """ phases = [] if not self.phaseless_mapred(): phases.append({'language': 'erlang', 'module': 'riak_kv_mapreduce', 'function': 'reduce_identity', 'keep': True}) if endkey: result = self.mapred({'bucket': bucket, 'index': index, 'start': startkey, 'end': endkey}, phases) else: result = self.mapred({'bucket': bucket, 'index': index, 'key': startkey}, phases) return [key for resultbucket, key in result]
python
def _get_index_mapred_emu(self, bucket, index, startkey, endkey=None): """ Emulates a secondary index request via MapReduce. Used in the case where the transport supports MapReduce but has no native secondary index query capability. """ phases = [] if not self.phaseless_mapred(): phases.append({'language': 'erlang', 'module': 'riak_kv_mapreduce', 'function': 'reduce_identity', 'keep': True}) if endkey: result = self.mapred({'bucket': bucket, 'index': index, 'start': startkey, 'end': endkey}, phases) else: result = self.mapred({'bucket': bucket, 'index': index, 'key': startkey}, phases) return [key for resultbucket, key in result]
[ "def", "_get_index_mapred_emu", "(", "self", ",", "bucket", ",", "index", ",", "startkey", ",", "endkey", "=", "None", ")", ":", "phases", "=", "[", "]", "if", "not", "self", ".", "phaseless_mapred", "(", ")", ":", "phases", ".", "append", "(", "{", "'language'", ":", "'erlang'", ",", "'module'", ":", "'riak_kv_mapreduce'", ",", "'function'", ":", "'reduce_identity'", ",", "'keep'", ":", "True", "}", ")", "if", "endkey", ":", "result", "=", "self", ".", "mapred", "(", "{", "'bucket'", ":", "bucket", ",", "'index'", ":", "index", ",", "'start'", ":", "startkey", ",", "'end'", ":", "endkey", "}", ",", "phases", ")", "else", ":", "result", "=", "self", ".", "mapred", "(", "{", "'bucket'", ":", "bucket", ",", "'index'", ":", "index", ",", "'key'", ":", "startkey", "}", ",", "phases", ")", "return", "[", "key", "for", "resultbucket", ",", "key", "in", "result", "]" ]
Emulates a secondary index request via MapReduce. Used in the case where the transport supports MapReduce but has no native secondary index query capability.
[ "Emulates", "a", "secondary", "index", "request", "via", "MapReduce", ".", "Used", "in", "the", "case", "where", "the", "transport", "supports", "MapReduce", "but", "has", "no", "native", "secondary", "index", "query", "capability", "." ]
91de13a16607cdf553d1a194e762734e3bec4231
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/transports/transport.py#L339-L362
train
basho/riak-python-client
riak/codecs/http.py
HttpCodec._parse_body
def _parse_body(self, robj, response, expected_statuses): """ Parse the body of an object response and populate the object. """ # If no response given, then return. if response is None: return None status, headers, data = response # Check if the server is down(status==0) if not status: m = 'Could not contact Riak Server: http://{0}:{1}!'.format( self._node.host, self._node.http_port) raise RiakError(m) # Make sure expected code came back self.check_http_code(status, expected_statuses) if 'x-riak-vclock' in headers: robj.vclock = VClock(headers['x-riak-vclock'], 'base64') # If 404(Not Found), then clear the object. if status == 404: robj.siblings = [] return None # If 201 Created, we need to extract the location and set the # key on the object. elif status == 201: robj.key = headers['location'].strip().split('/')[-1] # If 300(Siblings), apply the siblings to the object elif status == 300: ctype, params = parse_header(headers['content-type']) if ctype == 'multipart/mixed': if six.PY3: data = bytes_to_str(data) boundary = re.compile('\r?\n--%s(?:--)?\r?\n' % re.escape(params['boundary'])) parts = [message_from_string(p) for p in re.split(boundary, data)[1:-1]] robj.siblings = [self._parse_sibling(RiakContent(robj), part.items(), part.get_payload()) for part in parts] # Invoke sibling-resolution logic if robj.resolver is not None: robj.resolver(robj) return robj else: raise Exception('unexpected sibling response format: {0}'. format(ctype)) robj.siblings = [self._parse_sibling(RiakContent(robj), headers.items(), data)] return robj
python
def _parse_body(self, robj, response, expected_statuses): """ Parse the body of an object response and populate the object. """ # If no response given, then return. if response is None: return None status, headers, data = response # Check if the server is down(status==0) if not status: m = 'Could not contact Riak Server: http://{0}:{1}!'.format( self._node.host, self._node.http_port) raise RiakError(m) # Make sure expected code came back self.check_http_code(status, expected_statuses) if 'x-riak-vclock' in headers: robj.vclock = VClock(headers['x-riak-vclock'], 'base64') # If 404(Not Found), then clear the object. if status == 404: robj.siblings = [] return None # If 201 Created, we need to extract the location and set the # key on the object. elif status == 201: robj.key = headers['location'].strip().split('/')[-1] # If 300(Siblings), apply the siblings to the object elif status == 300: ctype, params = parse_header(headers['content-type']) if ctype == 'multipart/mixed': if six.PY3: data = bytes_to_str(data) boundary = re.compile('\r?\n--%s(?:--)?\r?\n' % re.escape(params['boundary'])) parts = [message_from_string(p) for p in re.split(boundary, data)[1:-1]] robj.siblings = [self._parse_sibling(RiakContent(robj), part.items(), part.get_payload()) for part in parts] # Invoke sibling-resolution logic if robj.resolver is not None: robj.resolver(robj) return robj else: raise Exception('unexpected sibling response format: {0}'. format(ctype)) robj.siblings = [self._parse_sibling(RiakContent(robj), headers.items(), data)] return robj
[ "def", "_parse_body", "(", "self", ",", "robj", ",", "response", ",", "expected_statuses", ")", ":", "# If no response given, then return.", "if", "response", "is", "None", ":", "return", "None", "status", ",", "headers", ",", "data", "=", "response", "# Check if the server is down(status==0)", "if", "not", "status", ":", "m", "=", "'Could not contact Riak Server: http://{0}:{1}!'", ".", "format", "(", "self", ".", "_node", ".", "host", ",", "self", ".", "_node", ".", "http_port", ")", "raise", "RiakError", "(", "m", ")", "# Make sure expected code came back", "self", ".", "check_http_code", "(", "status", ",", "expected_statuses", ")", "if", "'x-riak-vclock'", "in", "headers", ":", "robj", ".", "vclock", "=", "VClock", "(", "headers", "[", "'x-riak-vclock'", "]", ",", "'base64'", ")", "# If 404(Not Found), then clear the object.", "if", "status", "==", "404", ":", "robj", ".", "siblings", "=", "[", "]", "return", "None", "# If 201 Created, we need to extract the location and set the", "# key on the object.", "elif", "status", "==", "201", ":", "robj", ".", "key", "=", "headers", "[", "'location'", "]", ".", "strip", "(", ")", ".", "split", "(", "'/'", ")", "[", "-", "1", "]", "# If 300(Siblings), apply the siblings to the object", "elif", "status", "==", "300", ":", "ctype", ",", "params", "=", "parse_header", "(", "headers", "[", "'content-type'", "]", ")", "if", "ctype", "==", "'multipart/mixed'", ":", "if", "six", ".", "PY3", ":", "data", "=", "bytes_to_str", "(", "data", ")", "boundary", "=", "re", ".", "compile", "(", "'\\r?\\n--%s(?:--)?\\r?\\n'", "%", "re", ".", "escape", "(", "params", "[", "'boundary'", "]", ")", ")", "parts", "=", "[", "message_from_string", "(", "p", ")", "for", "p", "in", "re", ".", "split", "(", "boundary", ",", "data", ")", "[", "1", ":", "-", "1", "]", "]", "robj", ".", "siblings", "=", "[", "self", ".", "_parse_sibling", "(", "RiakContent", "(", "robj", ")", ",", "part", ".", "items", "(", ")", ",", "part", ".", "get_payload", "(", ")", ")", "for", "part", "in", "parts", "]", "# Invoke sibling-resolution logic", "if", "robj", ".", "resolver", "is", "not", "None", ":", "robj", ".", "resolver", "(", "robj", ")", "return", "robj", "else", ":", "raise", "Exception", "(", "'unexpected sibling response format: {0}'", ".", "format", "(", "ctype", ")", ")", "robj", ".", "siblings", "=", "[", "self", ".", "_parse_sibling", "(", "RiakContent", "(", "robj", ")", ",", "headers", ".", "items", "(", ")", ",", "data", ")", "]", "return", "robj" ]
Parse the body of an object response and populate the object.
[ "Parse", "the", "body", "of", "an", "object", "response", "and", "populate", "the", "object", "." ]
91de13a16607cdf553d1a194e762734e3bec4231
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/codecs/http.py#L46-L104
train
basho/riak-python-client
riak/codecs/http.py
HttpCodec._parse_sibling
def _parse_sibling(self, sibling, headers, data): """ Parses a single sibling out of a response. """ sibling.exists = True # Parse the headers... for header, value in headers: header = header.lower() if header == 'content-type': sibling.content_type, sibling.charset = \ self._parse_content_type(value) elif header == 'etag': sibling.etag = value elif header == 'link': sibling.links = self._parse_links(value) elif header == 'last-modified': sibling.last_modified = mktime_tz(parsedate_tz(value)) elif header.startswith('x-riak-meta-'): metakey = header.replace('x-riak-meta-', '') sibling.usermeta[metakey] = value elif header.startswith('x-riak-index-'): field = header.replace('x-riak-index-', '') reader = csv.reader([value], skipinitialspace=True) for line in reader: for token in line: token = decode_index_value(field, token) sibling.add_index(field, token) elif header == 'x-riak-deleted': sibling.exists = False sibling.encoded_data = data return sibling
python
def _parse_sibling(self, sibling, headers, data): """ Parses a single sibling out of a response. """ sibling.exists = True # Parse the headers... for header, value in headers: header = header.lower() if header == 'content-type': sibling.content_type, sibling.charset = \ self._parse_content_type(value) elif header == 'etag': sibling.etag = value elif header == 'link': sibling.links = self._parse_links(value) elif header == 'last-modified': sibling.last_modified = mktime_tz(parsedate_tz(value)) elif header.startswith('x-riak-meta-'): metakey = header.replace('x-riak-meta-', '') sibling.usermeta[metakey] = value elif header.startswith('x-riak-index-'): field = header.replace('x-riak-index-', '') reader = csv.reader([value], skipinitialspace=True) for line in reader: for token in line: token = decode_index_value(field, token) sibling.add_index(field, token) elif header == 'x-riak-deleted': sibling.exists = False sibling.encoded_data = data return sibling
[ "def", "_parse_sibling", "(", "self", ",", "sibling", ",", "headers", ",", "data", ")", ":", "sibling", ".", "exists", "=", "True", "# Parse the headers...", "for", "header", ",", "value", "in", "headers", ":", "header", "=", "header", ".", "lower", "(", ")", "if", "header", "==", "'content-type'", ":", "sibling", ".", "content_type", ",", "sibling", ".", "charset", "=", "self", ".", "_parse_content_type", "(", "value", ")", "elif", "header", "==", "'etag'", ":", "sibling", ".", "etag", "=", "value", "elif", "header", "==", "'link'", ":", "sibling", ".", "links", "=", "self", ".", "_parse_links", "(", "value", ")", "elif", "header", "==", "'last-modified'", ":", "sibling", ".", "last_modified", "=", "mktime_tz", "(", "parsedate_tz", "(", "value", ")", ")", "elif", "header", ".", "startswith", "(", "'x-riak-meta-'", ")", ":", "metakey", "=", "header", ".", "replace", "(", "'x-riak-meta-'", ",", "''", ")", "sibling", ".", "usermeta", "[", "metakey", "]", "=", "value", "elif", "header", ".", "startswith", "(", "'x-riak-index-'", ")", ":", "field", "=", "header", ".", "replace", "(", "'x-riak-index-'", ",", "''", ")", "reader", "=", "csv", ".", "reader", "(", "[", "value", "]", ",", "skipinitialspace", "=", "True", ")", "for", "line", "in", "reader", ":", "for", "token", "in", "line", ":", "token", "=", "decode_index_value", "(", "field", ",", "token", ")", "sibling", ".", "add_index", "(", "field", ",", "token", ")", "elif", "header", "==", "'x-riak-deleted'", ":", "sibling", ".", "exists", "=", "False", "sibling", ".", "encoded_data", "=", "data", "return", "sibling" ]
Parses a single sibling out of a response.
[ "Parses", "a", "single", "sibling", "out", "of", "a", "response", "." ]
91de13a16607cdf553d1a194e762734e3bec4231
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/codecs/http.py#L106-L140
train
basho/riak-python-client
riak/codecs/http.py
HttpCodec._to_link_header
def _to_link_header(self, link): """ Convert the link tuple to a link header string. Used internally. """ try: bucket, key, tag = link except ValueError: raise RiakError("Invalid link tuple %s" % link) tag = tag if tag is not None else bucket url = self.object_path(bucket, key) header = '<%s>; riaktag="%s"' % (url, tag) return header
python
def _to_link_header(self, link): """ Convert the link tuple to a link header string. Used internally. """ try: bucket, key, tag = link except ValueError: raise RiakError("Invalid link tuple %s" % link) tag = tag if tag is not None else bucket url = self.object_path(bucket, key) header = '<%s>; riaktag="%s"' % (url, tag) return header
[ "def", "_to_link_header", "(", "self", ",", "link", ")", ":", "try", ":", "bucket", ",", "key", ",", "tag", "=", "link", "except", "ValueError", ":", "raise", "RiakError", "(", "\"Invalid link tuple %s\"", "%", "link", ")", "tag", "=", "tag", "if", "tag", "is", "not", "None", "else", "bucket", "url", "=", "self", ".", "object_path", "(", "bucket", ",", "key", ")", "header", "=", "'<%s>; riaktag=\"%s\"'", "%", "(", "url", ",", "tag", ")", "return", "header" ]
Convert the link tuple to a link header string. Used internally.
[ "Convert", "the", "link", "tuple", "to", "a", "link", "header", "string", ".", "Used", "internally", "." ]
91de13a16607cdf553d1a194e762734e3bec4231
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/codecs/http.py#L142-L153
train