repo
stringlengths 7
55
| path
stringlengths 4
127
| func_name
stringlengths 1
88
| original_string
stringlengths 75
19.8k
| language
stringclasses 1
value | code
stringlengths 75
19.8k
| code_tokens
sequence | docstring
stringlengths 3
17.3k
| docstring_tokens
sequence | sha
stringlengths 40
40
| url
stringlengths 87
242
| partition
stringclasses 1
value |
---|---|---|---|---|---|---|---|---|---|---|---|
stanfordnlp/stanza | stanza/nlp/corenlp.py | CoreNLPClient.annotate_json | def annotate_json(self, text, annotators=None):
"""Return a JSON dict from the CoreNLP server, containing annotations of the text.
:param (str) text: Text to annotate.
:param (list[str]) annotators: a list of annotator names
:return (dict): a dict of annotations
"""
# WARN(chaganty): I'd like to deprecate this function -- we
# should just use annotate().json
#properties = {
# 'annotators': ','.join(annotators or self.default_annotators),
# 'outputFormat': 'json',
#}
#return self._request(text, properties).json(strict=False)
doc = self.annotate(text, annotators)
return doc.json | python | def annotate_json(self, text, annotators=None):
"""Return a JSON dict from the CoreNLP server, containing annotations of the text.
:param (str) text: Text to annotate.
:param (list[str]) annotators: a list of annotator names
:return (dict): a dict of annotations
"""
# WARN(chaganty): I'd like to deprecate this function -- we
# should just use annotate().json
#properties = {
# 'annotators': ','.join(annotators or self.default_annotators),
# 'outputFormat': 'json',
#}
#return self._request(text, properties).json(strict=False)
doc = self.annotate(text, annotators)
return doc.json | [
"def",
"annotate_json",
"(",
"self",
",",
"text",
",",
"annotators",
"=",
"None",
")",
":",
"# WARN(chaganty): I'd like to deprecate this function -- we",
"# should just use annotate().json",
"#properties = {",
"# 'annotators': ','.join(annotators or self.default_annotators),",
"# 'outputFormat': 'json',",
"#}",
"#return self._request(text, properties).json(strict=False)",
"doc",
"=",
"self",
".",
"annotate",
"(",
"text",
",",
"annotators",
")",
"return",
"doc",
".",
"json"
] | Return a JSON dict from the CoreNLP server, containing annotations of the text.
:param (str) text: Text to annotate.
:param (list[str]) annotators: a list of annotator names
:return (dict): a dict of annotations | [
"Return",
"a",
"JSON",
"dict",
"from",
"the",
"CoreNLP",
"server",
"containing",
"annotations",
"of",
"the",
"text",
"."
] | 920c55d8eaa1e7105971059c66eb448a74c100d6 | https://github.com/stanfordnlp/stanza/blob/920c55d8eaa1e7105971059c66eb448a74c100d6/stanza/nlp/corenlp.py#L79-L96 | train |
stanfordnlp/stanza | stanza/nlp/corenlp.py | CoreNLPClient.annotate_proto | def annotate_proto(self, text, annotators=None):
"""Return a Document protocol buffer from the CoreNLP server, containing annotations of the text.
:param (str) text: text to be annotated
:param (list[str]) annotators: a list of annotator names
:return (CoreNLP_pb2.Document): a Document protocol buffer
"""
properties = {
'annotators': ','.join(annotators or self.default_annotators),
'outputFormat': 'serialized',
'serializer': 'edu.stanford.nlp.pipeline.ProtobufAnnotationSerializer'
}
r = self._request(text, properties)
buffer = r.content # bytes
size, pos = _DecodeVarint(buffer, 0)
buffer = buffer[pos:(pos + size)]
doc = CoreNLP_pb2.Document()
doc.ParseFromString(buffer)
return doc | python | def annotate_proto(self, text, annotators=None):
"""Return a Document protocol buffer from the CoreNLP server, containing annotations of the text.
:param (str) text: text to be annotated
:param (list[str]) annotators: a list of annotator names
:return (CoreNLP_pb2.Document): a Document protocol buffer
"""
properties = {
'annotators': ','.join(annotators or self.default_annotators),
'outputFormat': 'serialized',
'serializer': 'edu.stanford.nlp.pipeline.ProtobufAnnotationSerializer'
}
r = self._request(text, properties)
buffer = r.content # bytes
size, pos = _DecodeVarint(buffer, 0)
buffer = buffer[pos:(pos + size)]
doc = CoreNLP_pb2.Document()
doc.ParseFromString(buffer)
return doc | [
"def",
"annotate_proto",
"(",
"self",
",",
"text",
",",
"annotators",
"=",
"None",
")",
":",
"properties",
"=",
"{",
"'annotators'",
":",
"','",
".",
"join",
"(",
"annotators",
"or",
"self",
".",
"default_annotators",
")",
",",
"'outputFormat'",
":",
"'serialized'",
",",
"'serializer'",
":",
"'edu.stanford.nlp.pipeline.ProtobufAnnotationSerializer'",
"}",
"r",
"=",
"self",
".",
"_request",
"(",
"text",
",",
"properties",
")",
"buffer",
"=",
"r",
".",
"content",
"# bytes",
"size",
",",
"pos",
"=",
"_DecodeVarint",
"(",
"buffer",
",",
"0",
")",
"buffer",
"=",
"buffer",
"[",
"pos",
":",
"(",
"pos",
"+",
"size",
")",
"]",
"doc",
"=",
"CoreNLP_pb2",
".",
"Document",
"(",
")",
"doc",
".",
"ParseFromString",
"(",
"buffer",
")",
"return",
"doc"
] | Return a Document protocol buffer from the CoreNLP server, containing annotations of the text.
:param (str) text: text to be annotated
:param (list[str]) annotators: a list of annotator names
:return (CoreNLP_pb2.Document): a Document protocol buffer | [
"Return",
"a",
"Document",
"protocol",
"buffer",
"from",
"the",
"CoreNLP",
"server",
"containing",
"annotations",
"of",
"the",
"text",
"."
] | 920c55d8eaa1e7105971059c66eb448a74c100d6 | https://github.com/stanfordnlp/stanza/blob/920c55d8eaa1e7105971059c66eb448a74c100d6/stanza/nlp/corenlp.py#L98-L118 | train |
stanfordnlp/stanza | stanza/nlp/corenlp.py | CoreNLPClient.annotate | def annotate(self, text, annotators=None):
"""Return an AnnotatedDocument from the CoreNLP server.
:param (str) text: text to be annotated
:param (list[str]) annotators: a list of annotator names
See a list of valid annotator names here:
http://stanfordnlp.github.io/CoreNLP/annotators.html
:return (AnnotatedDocument): an annotated document
"""
doc_pb = self.annotate_proto(text, annotators)
return AnnotatedDocument.from_pb(doc_pb) | python | def annotate(self, text, annotators=None):
"""Return an AnnotatedDocument from the CoreNLP server.
:param (str) text: text to be annotated
:param (list[str]) annotators: a list of annotator names
See a list of valid annotator names here:
http://stanfordnlp.github.io/CoreNLP/annotators.html
:return (AnnotatedDocument): an annotated document
"""
doc_pb = self.annotate_proto(text, annotators)
return AnnotatedDocument.from_pb(doc_pb) | [
"def",
"annotate",
"(",
"self",
",",
"text",
",",
"annotators",
"=",
"None",
")",
":",
"doc_pb",
"=",
"self",
".",
"annotate_proto",
"(",
"text",
",",
"annotators",
")",
"return",
"AnnotatedDocument",
".",
"from_pb",
"(",
"doc_pb",
")"
] | Return an AnnotatedDocument from the CoreNLP server.
:param (str) text: text to be annotated
:param (list[str]) annotators: a list of annotator names
See a list of valid annotator names here:
http://stanfordnlp.github.io/CoreNLP/annotators.html
:return (AnnotatedDocument): an annotated document | [
"Return",
"an",
"AnnotatedDocument",
"from",
"the",
"CoreNLP",
"server",
"."
] | 920c55d8eaa1e7105971059c66eb448a74c100d6 | https://github.com/stanfordnlp/stanza/blob/920c55d8eaa1e7105971059c66eb448a74c100d6/stanza/nlp/corenlp.py#L120-L132 | train |
stanfordnlp/stanza | stanza/nlp/corenlp.py | ProtobufBacked.from_pb | def from_pb(cls, pb):
"""Instantiate the object from a protocol buffer.
Args:
pb (protobuf)
Save a reference to the protocol buffer on the object.
"""
obj = cls._from_pb(pb)
obj._pb = pb
return obj | python | def from_pb(cls, pb):
"""Instantiate the object from a protocol buffer.
Args:
pb (protobuf)
Save a reference to the protocol buffer on the object.
"""
obj = cls._from_pb(pb)
obj._pb = pb
return obj | [
"def",
"from_pb",
"(",
"cls",
",",
"pb",
")",
":",
"obj",
"=",
"cls",
".",
"_from_pb",
"(",
"pb",
")",
"obj",
".",
"_pb",
"=",
"pb",
"return",
"obj"
] | Instantiate the object from a protocol buffer.
Args:
pb (protobuf)
Save a reference to the protocol buffer on the object. | [
"Instantiate",
"the",
"object",
"from",
"a",
"protocol",
"buffer",
"."
] | 920c55d8eaa1e7105971059c66eb448a74c100d6 | https://github.com/stanfordnlp/stanza/blob/920c55d8eaa1e7105971059c66eb448a74c100d6/stanza/nlp/corenlp.py#L157-L167 | train |
stanfordnlp/stanza | stanza/nlp/corenlp.py | AnnotatedEntity.character_span | def character_span(self):
"""
Returns the character span of the token
"""
begin, end = self.token_span
return (self.sentence[begin].character_span[0], self.sentence[end-1].character_span[-1]) | python | def character_span(self):
"""
Returns the character span of the token
"""
begin, end = self.token_span
return (self.sentence[begin].character_span[0], self.sentence[end-1].character_span[-1]) | [
"def",
"character_span",
"(",
"self",
")",
":",
"begin",
",",
"end",
"=",
"self",
".",
"token_span",
"return",
"(",
"self",
".",
"sentence",
"[",
"begin",
"]",
".",
"character_span",
"[",
"0",
"]",
",",
"self",
".",
"sentence",
"[",
"end",
"-",
"1",
"]",
".",
"character_span",
"[",
"-",
"1",
"]",
")"
] | Returns the character span of the token | [
"Returns",
"the",
"character",
"span",
"of",
"the",
"token"
] | 920c55d8eaa1e7105971059c66eb448a74c100d6 | https://github.com/stanfordnlp/stanza/blob/920c55d8eaa1e7105971059c66eb448a74c100d6/stanza/nlp/corenlp.py#L810-L815 | train |
stanfordnlp/stanza | stanza/research/summary_basic.py | TensorBoardLogger.log_proto | def log_proto(self, proto, step_num):
"""Log a Summary protobuf to the event file.
:param proto: a Summary protobuf
:param step_num: the iteration number at which this value was logged
"""
self.summ_writer.add_summary(proto, step_num)
return proto | python | def log_proto(self, proto, step_num):
"""Log a Summary protobuf to the event file.
:param proto: a Summary protobuf
:param step_num: the iteration number at which this value was logged
"""
self.summ_writer.add_summary(proto, step_num)
return proto | [
"def",
"log_proto",
"(",
"self",
",",
"proto",
",",
"step_num",
")",
":",
"self",
".",
"summ_writer",
".",
"add_summary",
"(",
"proto",
",",
"step_num",
")",
"return",
"proto"
] | Log a Summary protobuf to the event file.
:param proto: a Summary protobuf
:param step_num: the iteration number at which this value was logged | [
"Log",
"a",
"Summary",
"protobuf",
"to",
"the",
"event",
"file",
"."
] | 920c55d8eaa1e7105971059c66eb448a74c100d6 | https://github.com/stanfordnlp/stanza/blob/920c55d8eaa1e7105971059c66eb448a74c100d6/stanza/research/summary_basic.py#L23-L30 | train |
stanfordnlp/stanza | stanza/research/summary_basic.py | TensorBoardLogger.log | def log(self, key, val, step_num):
"""Directly log a scalar value to the event file.
:param string key: a name for the value
:param val: a float
:param step_num: the iteration number at which this value was logged
"""
try:
ph, summ = self.summaries[key]
except KeyError:
# if we haven't defined a variable for this key, define one
with self.g.as_default():
ph = tf.placeholder(tf.float32, (), name=key) # scalar
summ = tf.scalar_summary(key, ph)
self.summaries[key] = (ph, summ)
summary_str = self.sess.run(summ, {ph: val})
self.summ_writer.add_summary(summary_str, step_num)
return val | python | def log(self, key, val, step_num):
"""Directly log a scalar value to the event file.
:param string key: a name for the value
:param val: a float
:param step_num: the iteration number at which this value was logged
"""
try:
ph, summ = self.summaries[key]
except KeyError:
# if we haven't defined a variable for this key, define one
with self.g.as_default():
ph = tf.placeholder(tf.float32, (), name=key) # scalar
summ = tf.scalar_summary(key, ph)
self.summaries[key] = (ph, summ)
summary_str = self.sess.run(summ, {ph: val})
self.summ_writer.add_summary(summary_str, step_num)
return val | [
"def",
"log",
"(",
"self",
",",
"key",
",",
"val",
",",
"step_num",
")",
":",
"try",
":",
"ph",
",",
"summ",
"=",
"self",
".",
"summaries",
"[",
"key",
"]",
"except",
"KeyError",
":",
"# if we haven't defined a variable for this key, define one",
"with",
"self",
".",
"g",
".",
"as_default",
"(",
")",
":",
"ph",
"=",
"tf",
".",
"placeholder",
"(",
"tf",
".",
"float32",
",",
"(",
")",
",",
"name",
"=",
"key",
")",
"# scalar",
"summ",
"=",
"tf",
".",
"scalar_summary",
"(",
"key",
",",
"ph",
")",
"self",
".",
"summaries",
"[",
"key",
"]",
"=",
"(",
"ph",
",",
"summ",
")",
"summary_str",
"=",
"self",
".",
"sess",
".",
"run",
"(",
"summ",
",",
"{",
"ph",
":",
"val",
"}",
")",
"self",
".",
"summ_writer",
".",
"add_summary",
"(",
"summary_str",
",",
"step_num",
")",
"return",
"val"
] | Directly log a scalar value to the event file.
:param string key: a name for the value
:param val: a float
:param step_num: the iteration number at which this value was logged | [
"Directly",
"log",
"a",
"scalar",
"value",
"to",
"the",
"event",
"file",
"."
] | 920c55d8eaa1e7105971059c66eb448a74c100d6 | https://github.com/stanfordnlp/stanza/blob/920c55d8eaa1e7105971059c66eb448a74c100d6/stanza/research/summary_basic.py#L32-L50 | train |
stanfordnlp/stanza | stanza/monitoring/summary.py | read_events | def read_events(stream):
'''
Read and return as a generator a sequence of Event protos from
file-like object `stream`.
'''
header_size = struct.calcsize('<QI')
len_size = struct.calcsize('<Q')
footer_size = struct.calcsize('<I')
while True:
header = stream.read(header_size)
if len(header) == 0:
break
elif len(header) < header_size:
raise SummaryReaderException('unexpected EOF (expected a %d-byte header, '
'got %d bytes)' % (header_size, len(header)))
data_len, len_crc = struct.unpack('<QI', header)
len_crc_actual = masked_crc(header[:len_size])
if len_crc_actual != len_crc:
raise SummaryReaderException('incorrect length CRC (%d != %d)' %
(len_crc_actual, len_crc))
data = stream.read(data_len)
if len(data) < data_len:
raise SummaryReaderException('unexpected EOF (expected %d bytes, got %d)' %
(data_len, len(data)))
yield Event.FromString(data)
footer = stream.read(footer_size)
if len(footer) < footer_size:
raise SummaryReaderException('unexpected EOF (expected a %d-byte footer, '
'got %d bytes)' % (footer_size, len(footer)))
data_crc, = struct.unpack('<I', footer)
data_crc_actual = masked_crc(data)
if data_crc_actual != data_crc:
raise SummaryReaderException('incorrect data CRC (%d != %d)' %
(data_crc_actual, data_crc)) | python | def read_events(stream):
'''
Read and return as a generator a sequence of Event protos from
file-like object `stream`.
'''
header_size = struct.calcsize('<QI')
len_size = struct.calcsize('<Q')
footer_size = struct.calcsize('<I')
while True:
header = stream.read(header_size)
if len(header) == 0:
break
elif len(header) < header_size:
raise SummaryReaderException('unexpected EOF (expected a %d-byte header, '
'got %d bytes)' % (header_size, len(header)))
data_len, len_crc = struct.unpack('<QI', header)
len_crc_actual = masked_crc(header[:len_size])
if len_crc_actual != len_crc:
raise SummaryReaderException('incorrect length CRC (%d != %d)' %
(len_crc_actual, len_crc))
data = stream.read(data_len)
if len(data) < data_len:
raise SummaryReaderException('unexpected EOF (expected %d bytes, got %d)' %
(data_len, len(data)))
yield Event.FromString(data)
footer = stream.read(footer_size)
if len(footer) < footer_size:
raise SummaryReaderException('unexpected EOF (expected a %d-byte footer, '
'got %d bytes)' % (footer_size, len(footer)))
data_crc, = struct.unpack('<I', footer)
data_crc_actual = masked_crc(data)
if data_crc_actual != data_crc:
raise SummaryReaderException('incorrect data CRC (%d != %d)' %
(data_crc_actual, data_crc)) | [
"def",
"read_events",
"(",
"stream",
")",
":",
"header_size",
"=",
"struct",
".",
"calcsize",
"(",
"'<QI'",
")",
"len_size",
"=",
"struct",
".",
"calcsize",
"(",
"'<Q'",
")",
"footer_size",
"=",
"struct",
".",
"calcsize",
"(",
"'<I'",
")",
"while",
"True",
":",
"header",
"=",
"stream",
".",
"read",
"(",
"header_size",
")",
"if",
"len",
"(",
"header",
")",
"==",
"0",
":",
"break",
"elif",
"len",
"(",
"header",
")",
"<",
"header_size",
":",
"raise",
"SummaryReaderException",
"(",
"'unexpected EOF (expected a %d-byte header, '",
"'got %d bytes)'",
"%",
"(",
"header_size",
",",
"len",
"(",
"header",
")",
")",
")",
"data_len",
",",
"len_crc",
"=",
"struct",
".",
"unpack",
"(",
"'<QI'",
",",
"header",
")",
"len_crc_actual",
"=",
"masked_crc",
"(",
"header",
"[",
":",
"len_size",
"]",
")",
"if",
"len_crc_actual",
"!=",
"len_crc",
":",
"raise",
"SummaryReaderException",
"(",
"'incorrect length CRC (%d != %d)'",
"%",
"(",
"len_crc_actual",
",",
"len_crc",
")",
")",
"data",
"=",
"stream",
".",
"read",
"(",
"data_len",
")",
"if",
"len",
"(",
"data",
")",
"<",
"data_len",
":",
"raise",
"SummaryReaderException",
"(",
"'unexpected EOF (expected %d bytes, got %d)'",
"%",
"(",
"data_len",
",",
"len",
"(",
"data",
")",
")",
")",
"yield",
"Event",
".",
"FromString",
"(",
"data",
")",
"footer",
"=",
"stream",
".",
"read",
"(",
"footer_size",
")",
"if",
"len",
"(",
"footer",
")",
"<",
"footer_size",
":",
"raise",
"SummaryReaderException",
"(",
"'unexpected EOF (expected a %d-byte footer, '",
"'got %d bytes)'",
"%",
"(",
"footer_size",
",",
"len",
"(",
"footer",
")",
")",
")",
"data_crc",
",",
"=",
"struct",
".",
"unpack",
"(",
"'<I'",
",",
"footer",
")",
"data_crc_actual",
"=",
"masked_crc",
"(",
"data",
")",
"if",
"data_crc_actual",
"!=",
"data_crc",
":",
"raise",
"SummaryReaderException",
"(",
"'incorrect data CRC (%d != %d)'",
"%",
"(",
"data_crc_actual",
",",
"data_crc",
")",
")"
] | Read and return as a generator a sequence of Event protos from
file-like object `stream`. | [
"Read",
"and",
"return",
"as",
"a",
"generator",
"a",
"sequence",
"of",
"Event",
"protos",
"from",
"file",
"-",
"like",
"object",
"stream",
"."
] | 920c55d8eaa1e7105971059c66eb448a74c100d6 | https://github.com/stanfordnlp/stanza/blob/920c55d8eaa1e7105971059c66eb448a74c100d6/stanza/monitoring/summary.py#L286-L322 | train |
stanfordnlp/stanza | stanza/monitoring/summary.py | write_events | def write_events(stream, events):
'''
Write a sequence of Event protos to file-like object `stream`.
'''
for event in events:
data = event.SerializeToString()
len_field = struct.pack('<Q', len(data))
len_crc = struct.pack('<I', masked_crc(len_field))
data_crc = struct.pack('<I', masked_crc(data))
stream.write(len_field)
stream.write(len_crc)
stream.write(data)
stream.write(data_crc) | python | def write_events(stream, events):
'''
Write a sequence of Event protos to file-like object `stream`.
'''
for event in events:
data = event.SerializeToString()
len_field = struct.pack('<Q', len(data))
len_crc = struct.pack('<I', masked_crc(len_field))
data_crc = struct.pack('<I', masked_crc(data))
stream.write(len_field)
stream.write(len_crc)
stream.write(data)
stream.write(data_crc) | [
"def",
"write_events",
"(",
"stream",
",",
"events",
")",
":",
"for",
"event",
"in",
"events",
":",
"data",
"=",
"event",
".",
"SerializeToString",
"(",
")",
"len_field",
"=",
"struct",
".",
"pack",
"(",
"'<Q'",
",",
"len",
"(",
"data",
")",
")",
"len_crc",
"=",
"struct",
".",
"pack",
"(",
"'<I'",
",",
"masked_crc",
"(",
"len_field",
")",
")",
"data_crc",
"=",
"struct",
".",
"pack",
"(",
"'<I'",
",",
"masked_crc",
"(",
"data",
")",
")",
"stream",
".",
"write",
"(",
"len_field",
")",
"stream",
".",
"write",
"(",
"len_crc",
")",
"stream",
".",
"write",
"(",
"data",
")",
"stream",
".",
"write",
"(",
"data_crc",
")"
] | Write a sequence of Event protos to file-like object `stream`. | [
"Write",
"a",
"sequence",
"of",
"Event",
"protos",
"to",
"file",
"-",
"like",
"object",
"stream",
"."
] | 920c55d8eaa1e7105971059c66eb448a74c100d6 | https://github.com/stanfordnlp/stanza/blob/920c55d8eaa1e7105971059c66eb448a74c100d6/stanza/monitoring/summary.py#L325-L337 | train |
stanfordnlp/stanza | stanza/monitoring/summary.py | SummaryWriter.log_image | def log_image(self, step, tag, val):
'''
Write an image event.
:param int step: Time step (x-axis in TensorBoard graphs)
:param str tag: Label for this value
:param numpy.ndarray val: Image in RGB format with values from
0 to 255; a 3-D array with index order (row, column, channel).
`val.shape[-1] == 3`
'''
# TODO: support floating-point tensors, 4-D tensors, grayscale
if len(val.shape) != 3:
raise ValueError('`log_image` value should be a 3-D tensor, instead got shape %s' %
(val.shape,))
if val.shape[2] != 3:
raise ValueError('Last dimension of `log_image` value should be 3 (RGB), '
'instead got shape %s' %
(val.shape,))
fakefile = StringIO()
png.Writer(size=(val.shape[1], val.shape[0])).write(
fakefile, val.reshape(val.shape[0], val.shape[1] * val.shape[2]))
encoded = fakefile.getvalue()
# https://tensorflow.googlesource.com/tensorflow/+/master/tensorflow/core/framework/summary.proto
RGB = 3
image = Summary.Image(height=val.shape[0], width=val.shape[1],
colorspace=RGB, encoded_image_string=encoded)
summary = Summary(value=[Summary.Value(tag=tag, image=image)])
self._add_event(step, summary) | python | def log_image(self, step, tag, val):
'''
Write an image event.
:param int step: Time step (x-axis in TensorBoard graphs)
:param str tag: Label for this value
:param numpy.ndarray val: Image in RGB format with values from
0 to 255; a 3-D array with index order (row, column, channel).
`val.shape[-1] == 3`
'''
# TODO: support floating-point tensors, 4-D tensors, grayscale
if len(val.shape) != 3:
raise ValueError('`log_image` value should be a 3-D tensor, instead got shape %s' %
(val.shape,))
if val.shape[2] != 3:
raise ValueError('Last dimension of `log_image` value should be 3 (RGB), '
'instead got shape %s' %
(val.shape,))
fakefile = StringIO()
png.Writer(size=(val.shape[1], val.shape[0])).write(
fakefile, val.reshape(val.shape[0], val.shape[1] * val.shape[2]))
encoded = fakefile.getvalue()
# https://tensorflow.googlesource.com/tensorflow/+/master/tensorflow/core/framework/summary.proto
RGB = 3
image = Summary.Image(height=val.shape[0], width=val.shape[1],
colorspace=RGB, encoded_image_string=encoded)
summary = Summary(value=[Summary.Value(tag=tag, image=image)])
self._add_event(step, summary) | [
"def",
"log_image",
"(",
"self",
",",
"step",
",",
"tag",
",",
"val",
")",
":",
"# TODO: support floating-point tensors, 4-D tensors, grayscale",
"if",
"len",
"(",
"val",
".",
"shape",
")",
"!=",
"3",
":",
"raise",
"ValueError",
"(",
"'`log_image` value should be a 3-D tensor, instead got shape %s'",
"%",
"(",
"val",
".",
"shape",
",",
")",
")",
"if",
"val",
".",
"shape",
"[",
"2",
"]",
"!=",
"3",
":",
"raise",
"ValueError",
"(",
"'Last dimension of `log_image` value should be 3 (RGB), '",
"'instead got shape %s'",
"%",
"(",
"val",
".",
"shape",
",",
")",
")",
"fakefile",
"=",
"StringIO",
"(",
")",
"png",
".",
"Writer",
"(",
"size",
"=",
"(",
"val",
".",
"shape",
"[",
"1",
"]",
",",
"val",
".",
"shape",
"[",
"0",
"]",
")",
")",
".",
"write",
"(",
"fakefile",
",",
"val",
".",
"reshape",
"(",
"val",
".",
"shape",
"[",
"0",
"]",
",",
"val",
".",
"shape",
"[",
"1",
"]",
"*",
"val",
".",
"shape",
"[",
"2",
"]",
")",
")",
"encoded",
"=",
"fakefile",
".",
"getvalue",
"(",
")",
"# https://tensorflow.googlesource.com/tensorflow/+/master/tensorflow/core/framework/summary.proto",
"RGB",
"=",
"3",
"image",
"=",
"Summary",
".",
"Image",
"(",
"height",
"=",
"val",
".",
"shape",
"[",
"0",
"]",
",",
"width",
"=",
"val",
".",
"shape",
"[",
"1",
"]",
",",
"colorspace",
"=",
"RGB",
",",
"encoded_image_string",
"=",
"encoded",
")",
"summary",
"=",
"Summary",
"(",
"value",
"=",
"[",
"Summary",
".",
"Value",
"(",
"tag",
"=",
"tag",
",",
"image",
"=",
"image",
")",
"]",
")",
"self",
".",
"_add_event",
"(",
"step",
",",
"summary",
")"
] | Write an image event.
:param int step: Time step (x-axis in TensorBoard graphs)
:param str tag: Label for this value
:param numpy.ndarray val: Image in RGB format with values from
0 to 255; a 3-D array with index order (row, column, channel).
`val.shape[-1] == 3` | [
"Write",
"an",
"image",
"event",
"."
] | 920c55d8eaa1e7105971059c66eb448a74c100d6 | https://github.com/stanfordnlp/stanza/blob/920c55d8eaa1e7105971059c66eb448a74c100d6/stanza/monitoring/summary.py#L106-L133 | train |
stanfordnlp/stanza | stanza/monitoring/summary.py | SummaryWriter.log_scalar | def log_scalar(self, step, tag, val):
'''
Write a scalar event.
:param int step: Time step (x-axis in TensorBoard graphs)
:param str tag: Label for this value
:param float val: Scalar to graph at this time step (y-axis)
'''
summary = Summary(value=[Summary.Value(tag=tag, simple_value=float(np.float32(val)))])
self._add_event(step, summary) | python | def log_scalar(self, step, tag, val):
'''
Write a scalar event.
:param int step: Time step (x-axis in TensorBoard graphs)
:param str tag: Label for this value
:param float val: Scalar to graph at this time step (y-axis)
'''
summary = Summary(value=[Summary.Value(tag=tag, simple_value=float(np.float32(val)))])
self._add_event(step, summary) | [
"def",
"log_scalar",
"(",
"self",
",",
"step",
",",
"tag",
",",
"val",
")",
":",
"summary",
"=",
"Summary",
"(",
"value",
"=",
"[",
"Summary",
".",
"Value",
"(",
"tag",
"=",
"tag",
",",
"simple_value",
"=",
"float",
"(",
"np",
".",
"float32",
"(",
"val",
")",
")",
")",
"]",
")",
"self",
".",
"_add_event",
"(",
"step",
",",
"summary",
")"
] | Write a scalar event.
:param int step: Time step (x-axis in TensorBoard graphs)
:param str tag: Label for this value
:param float val: Scalar to graph at this time step (y-axis) | [
"Write",
"a",
"scalar",
"event",
"."
] | 920c55d8eaa1e7105971059c66eb448a74c100d6 | https://github.com/stanfordnlp/stanza/blob/920c55d8eaa1e7105971059c66eb448a74c100d6/stanza/monitoring/summary.py#L135-L144 | train |
stanfordnlp/stanza | stanza/monitoring/summary.py | SummaryWriter.log_histogram | def log_histogram(self, step, tag, val):
'''
Write a histogram event.
:param int step: Time step (x-axis in TensorBoard graphs)
:param str tag: Label for this value
:param numpy.ndarray val: Arbitrary-dimensional array containing
values to be aggregated in the resulting histogram.
'''
hist = Histogram()
hist.add(val)
summary = Summary(value=[Summary.Value(tag=tag, histo=hist.encode_to_proto())])
self._add_event(step, summary) | python | def log_histogram(self, step, tag, val):
'''
Write a histogram event.
:param int step: Time step (x-axis in TensorBoard graphs)
:param str tag: Label for this value
:param numpy.ndarray val: Arbitrary-dimensional array containing
values to be aggregated in the resulting histogram.
'''
hist = Histogram()
hist.add(val)
summary = Summary(value=[Summary.Value(tag=tag, histo=hist.encode_to_proto())])
self._add_event(step, summary) | [
"def",
"log_histogram",
"(",
"self",
",",
"step",
",",
"tag",
",",
"val",
")",
":",
"hist",
"=",
"Histogram",
"(",
")",
"hist",
".",
"add",
"(",
"val",
")",
"summary",
"=",
"Summary",
"(",
"value",
"=",
"[",
"Summary",
".",
"Value",
"(",
"tag",
"=",
"tag",
",",
"histo",
"=",
"hist",
".",
"encode_to_proto",
"(",
")",
")",
"]",
")",
"self",
".",
"_add_event",
"(",
"step",
",",
"summary",
")"
] | Write a histogram event.
:param int step: Time step (x-axis in TensorBoard graphs)
:param str tag: Label for this value
:param numpy.ndarray val: Arbitrary-dimensional array containing
values to be aggregated in the resulting histogram. | [
"Write",
"a",
"histogram",
"event",
"."
] | 920c55d8eaa1e7105971059c66eb448a74c100d6 | https://github.com/stanfordnlp/stanza/blob/920c55d8eaa1e7105971059c66eb448a74c100d6/stanza/monitoring/summary.py#L146-L158 | train |
stanfordnlp/stanza | stanza/research/config.py | options | def options(allow_partial=False, read=False):
'''
Get the object containing the values of the parsed command line options.
:param bool allow_partial: If `True`, ignore unrecognized arguments and allow
the options to be re-parsed next time `options` is called. This
also suppresses overwrite checking (the check is performed the first
time `options` is called with `allow_partial=False`).
:param bool read: If `True`, do not create or overwrite a `config.json`
file, and do not check whether such file already exists. Use for scripts
that read from the run directory rather than/in addition to writing to it.
:return argparse.Namespace: An object storing the values of the options specified
to the parser returned by `get_options_parser()`.
'''
global _options
if allow_partial:
opts, extras = _options_parser.parse_known_args()
if opts.run_dir:
mkdirp(opts.run_dir)
return opts
if _options is None:
# Add back in the help option (only show help and quit once arguments are finalized)
_options_parser.add_argument('-h', '--help', action='help', default=argparse.SUPPRESS,
help='show this help message and exit')
_options = _options_parser.parse_args()
if _options.run_dir:
mkdirp(_options.run_dir, overwrite=_options.overwrite or read)
if not read:
options_dump = vars(_options)
# People should be able to rerun an experiment with -C config.json safely.
# Don't include the overwrite option, since using a config from an experiment
# done with -O should still require passing -O for it to be overwritten again.
del options_dump['overwrite']
# And don't write the name of the other config file in this new one! It's
# probably harmless (config file interpretation can't be chained with the
# config option), but still confusing.
del options_dump['config']
dump_pretty(options_dump, 'config.json')
return _options | python | def options(allow_partial=False, read=False):
'''
Get the object containing the values of the parsed command line options.
:param bool allow_partial: If `True`, ignore unrecognized arguments and allow
the options to be re-parsed next time `options` is called. This
also suppresses overwrite checking (the check is performed the first
time `options` is called with `allow_partial=False`).
:param bool read: If `True`, do not create or overwrite a `config.json`
file, and do not check whether such file already exists. Use for scripts
that read from the run directory rather than/in addition to writing to it.
:return argparse.Namespace: An object storing the values of the options specified
to the parser returned by `get_options_parser()`.
'''
global _options
if allow_partial:
opts, extras = _options_parser.parse_known_args()
if opts.run_dir:
mkdirp(opts.run_dir)
return opts
if _options is None:
# Add back in the help option (only show help and quit once arguments are finalized)
_options_parser.add_argument('-h', '--help', action='help', default=argparse.SUPPRESS,
help='show this help message and exit')
_options = _options_parser.parse_args()
if _options.run_dir:
mkdirp(_options.run_dir, overwrite=_options.overwrite or read)
if not read:
options_dump = vars(_options)
# People should be able to rerun an experiment with -C config.json safely.
# Don't include the overwrite option, since using a config from an experiment
# done with -O should still require passing -O for it to be overwritten again.
del options_dump['overwrite']
# And don't write the name of the other config file in this new one! It's
# probably harmless (config file interpretation can't be chained with the
# config option), but still confusing.
del options_dump['config']
dump_pretty(options_dump, 'config.json')
return _options | [
"def",
"options",
"(",
"allow_partial",
"=",
"False",
",",
"read",
"=",
"False",
")",
":",
"global",
"_options",
"if",
"allow_partial",
":",
"opts",
",",
"extras",
"=",
"_options_parser",
".",
"parse_known_args",
"(",
")",
"if",
"opts",
".",
"run_dir",
":",
"mkdirp",
"(",
"opts",
".",
"run_dir",
")",
"return",
"opts",
"if",
"_options",
"is",
"None",
":",
"# Add back in the help option (only show help and quit once arguments are finalized)",
"_options_parser",
".",
"add_argument",
"(",
"'-h'",
",",
"'--help'",
",",
"action",
"=",
"'help'",
",",
"default",
"=",
"argparse",
".",
"SUPPRESS",
",",
"help",
"=",
"'show this help message and exit'",
")",
"_options",
"=",
"_options_parser",
".",
"parse_args",
"(",
")",
"if",
"_options",
".",
"run_dir",
":",
"mkdirp",
"(",
"_options",
".",
"run_dir",
",",
"overwrite",
"=",
"_options",
".",
"overwrite",
"or",
"read",
")",
"if",
"not",
"read",
":",
"options_dump",
"=",
"vars",
"(",
"_options",
")",
"# People should be able to rerun an experiment with -C config.json safely.",
"# Don't include the overwrite option, since using a config from an experiment",
"# done with -O should still require passing -O for it to be overwritten again.",
"del",
"options_dump",
"[",
"'overwrite'",
"]",
"# And don't write the name of the other config file in this new one! It's",
"# probably harmless (config file interpretation can't be chained with the",
"# config option), but still confusing.",
"del",
"options_dump",
"[",
"'config'",
"]",
"dump_pretty",
"(",
"options_dump",
",",
"'config.json'",
")",
"return",
"_options"
] | Get the object containing the values of the parsed command line options.
:param bool allow_partial: If `True`, ignore unrecognized arguments and allow
the options to be re-parsed next time `options` is called. This
also suppresses overwrite checking (the check is performed the first
time `options` is called with `allow_partial=False`).
:param bool read: If `True`, do not create or overwrite a `config.json`
file, and do not check whether such file already exists. Use for scripts
that read from the run directory rather than/in addition to writing to it.
:return argparse.Namespace: An object storing the values of the options specified
to the parser returned by `get_options_parser()`. | [
"Get",
"the",
"object",
"containing",
"the",
"values",
"of",
"the",
"parsed",
"command",
"line",
"options",
"."
] | 920c55d8eaa1e7105971059c66eb448a74c100d6 | https://github.com/stanfordnlp/stanza/blob/920c55d8eaa1e7105971059c66eb448a74c100d6/stanza/research/config.py#L88-L130 | train |
stanfordnlp/stanza | stanza/ml/embeddings.py | Embeddings.inner_products | def inner_products(self, vec):
"""Get the inner product of a vector with every embedding.
:param (np.array) vector: the query vector
:return (list[tuple[str, float]]): a map of embeddings to inner products
"""
products = self.array.dot(vec)
return self._word_to_score(np.arange(len(products)), products) | python | def inner_products(self, vec):
"""Get the inner product of a vector with every embedding.
:param (np.array) vector: the query vector
:return (list[tuple[str, float]]): a map of embeddings to inner products
"""
products = self.array.dot(vec)
return self._word_to_score(np.arange(len(products)), products) | [
"def",
"inner_products",
"(",
"self",
",",
"vec",
")",
":",
"products",
"=",
"self",
".",
"array",
".",
"dot",
"(",
"vec",
")",
"return",
"self",
".",
"_word_to_score",
"(",
"np",
".",
"arange",
"(",
"len",
"(",
"products",
")",
")",
",",
"products",
")"
] | Get the inner product of a vector with every embedding.
:param (np.array) vector: the query vector
:return (list[tuple[str, float]]): a map of embeddings to inner products | [
"Get",
"the",
"inner",
"product",
"of",
"a",
"vector",
"with",
"every",
"embedding",
"."
] | 920c55d8eaa1e7105971059c66eb448a74c100d6 | https://github.com/stanfordnlp/stanza/blob/920c55d8eaa1e7105971059c66eb448a74c100d6/stanza/ml/embeddings.py#L50-L58 | train |
stanfordnlp/stanza | stanza/ml/embeddings.py | Embeddings._word_to_score | def _word_to_score(self, ids, scores):
"""Return a map from each word to its score.
:param (np.array) ids: a vector of word ids
:param (np.array) scores: a vector of scores
:return (dict[unicode, float]): a map from each word (unicode) to its score (float)
"""
# should be 1-D vectors
assert len(ids.shape) == 1
assert ids.shape == scores.shape
w2s = {}
for i in range(len(ids)):
w2s[self.vocab.index2word(ids[i])] = scores[i]
return w2s | python | def _word_to_score(self, ids, scores):
"""Return a map from each word to its score.
:param (np.array) ids: a vector of word ids
:param (np.array) scores: a vector of scores
:return (dict[unicode, float]): a map from each word (unicode) to its score (float)
"""
# should be 1-D vectors
assert len(ids.shape) == 1
assert ids.shape == scores.shape
w2s = {}
for i in range(len(ids)):
w2s[self.vocab.index2word(ids[i])] = scores[i]
return w2s | [
"def",
"_word_to_score",
"(",
"self",
",",
"ids",
",",
"scores",
")",
":",
"# should be 1-D vectors",
"assert",
"len",
"(",
"ids",
".",
"shape",
")",
"==",
"1",
"assert",
"ids",
".",
"shape",
"==",
"scores",
".",
"shape",
"w2s",
"=",
"{",
"}",
"for",
"i",
"in",
"range",
"(",
"len",
"(",
"ids",
")",
")",
":",
"w2s",
"[",
"self",
".",
"vocab",
".",
"index2word",
"(",
"ids",
"[",
"i",
"]",
")",
"]",
"=",
"scores",
"[",
"i",
"]",
"return",
"w2s"
] | Return a map from each word to its score.
:param (np.array) ids: a vector of word ids
:param (np.array) scores: a vector of scores
:return (dict[unicode, float]): a map from each word (unicode) to its score (float) | [
"Return",
"a",
"map",
"from",
"each",
"word",
"to",
"its",
"score",
"."
] | 920c55d8eaa1e7105971059c66eb448a74c100d6 | https://github.com/stanfordnlp/stanza/blob/920c55d8eaa1e7105971059c66eb448a74c100d6/stanza/ml/embeddings.py#L60-L75 | train |
stanfordnlp/stanza | stanza/ml/embeddings.py | Embeddings._init_lsh_forest | def _init_lsh_forest(self):
"""Construct an LSH forest for nearest neighbor search."""
import sklearn.neighbors
lshf = sklearn.neighbors.LSHForest()
lshf.fit(self.array)
return lshf | python | def _init_lsh_forest(self):
"""Construct an LSH forest for nearest neighbor search."""
import sklearn.neighbors
lshf = sklearn.neighbors.LSHForest()
lshf.fit(self.array)
return lshf | [
"def",
"_init_lsh_forest",
"(",
"self",
")",
":",
"import",
"sklearn",
".",
"neighbors",
"lshf",
"=",
"sklearn",
".",
"neighbors",
".",
"LSHForest",
"(",
")",
"lshf",
".",
"fit",
"(",
"self",
".",
"array",
")",
"return",
"lshf"
] | Construct an LSH forest for nearest neighbor search. | [
"Construct",
"an",
"LSH",
"forest",
"for",
"nearest",
"neighbor",
"search",
"."
] | 920c55d8eaa1e7105971059c66eb448a74c100d6 | https://github.com/stanfordnlp/stanza/blob/920c55d8eaa1e7105971059c66eb448a74c100d6/stanza/ml/embeddings.py#L88-L93 | train |
stanfordnlp/stanza | stanza/ml/embeddings.py | Embeddings.to_dict | def to_dict(self):
"""Convert to dictionary.
:return (dict): A dict mapping from strings to vectors.
"""
d = {}
for word, idx in self.vocab.iteritems():
d[word] = self.array[idx].tolist()
return d | python | def to_dict(self):
"""Convert to dictionary.
:return (dict): A dict mapping from strings to vectors.
"""
d = {}
for word, idx in self.vocab.iteritems():
d[word] = self.array[idx].tolist()
return d | [
"def",
"to_dict",
"(",
"self",
")",
":",
"d",
"=",
"{",
"}",
"for",
"word",
",",
"idx",
"in",
"self",
".",
"vocab",
".",
"iteritems",
"(",
")",
":",
"d",
"[",
"word",
"]",
"=",
"self",
".",
"array",
"[",
"idx",
"]",
".",
"tolist",
"(",
")",
"return",
"d"
] | Convert to dictionary.
:return (dict): A dict mapping from strings to vectors. | [
"Convert",
"to",
"dictionary",
"."
] | 920c55d8eaa1e7105971059c66eb448a74c100d6 | https://github.com/stanfordnlp/stanza/blob/920c55d8eaa1e7105971059c66eb448a74c100d6/stanza/ml/embeddings.py#L113-L121 | train |
stanfordnlp/stanza | stanza/ml/embeddings.py | Embeddings.to_files | def to_files(self, array_file, vocab_file):
"""Write the embedding matrix and the vocab to files.
:param (file) array_file: file to write array to
:param (file) vocab_file: file to write vocab to
"""
logging.info('Writing array...')
np.save(array_file, self.array)
logging.info('Writing vocab...')
self.vocab.to_file(vocab_file) | python | def to_files(self, array_file, vocab_file):
"""Write the embedding matrix and the vocab to files.
:param (file) array_file: file to write array to
:param (file) vocab_file: file to write vocab to
"""
logging.info('Writing array...')
np.save(array_file, self.array)
logging.info('Writing vocab...')
self.vocab.to_file(vocab_file) | [
"def",
"to_files",
"(",
"self",
",",
"array_file",
",",
"vocab_file",
")",
":",
"logging",
".",
"info",
"(",
"'Writing array...'",
")",
"np",
".",
"save",
"(",
"array_file",
",",
"self",
".",
"array",
")",
"logging",
".",
"info",
"(",
"'Writing vocab...'",
")",
"self",
".",
"vocab",
".",
"to_file",
"(",
"vocab_file",
")"
] | Write the embedding matrix and the vocab to files.
:param (file) array_file: file to write array to
:param (file) vocab_file: file to write vocab to | [
"Write",
"the",
"embedding",
"matrix",
"and",
"the",
"vocab",
"to",
"files",
"."
] | 920c55d8eaa1e7105971059c66eb448a74c100d6 | https://github.com/stanfordnlp/stanza/blob/920c55d8eaa1e7105971059c66eb448a74c100d6/stanza/ml/embeddings.py#L136-L145 | train |
stanfordnlp/stanza | stanza/ml/embeddings.py | Embeddings.from_files | def from_files(cls, array_file, vocab_file):
"""Load the embedding matrix and the vocab from files.
:param (file) array_file: file to read array from
:param (file) vocab_file: file to read vocab from
:return (Embeddings): an Embeddings object
"""
logging.info('Loading array...')
array = np.load(array_file)
logging.info('Loading vocab...')
vocab = Vocab.from_file(vocab_file)
return cls(array, vocab) | python | def from_files(cls, array_file, vocab_file):
"""Load the embedding matrix and the vocab from files.
:param (file) array_file: file to read array from
:param (file) vocab_file: file to read vocab from
:return (Embeddings): an Embeddings object
"""
logging.info('Loading array...')
array = np.load(array_file)
logging.info('Loading vocab...')
vocab = Vocab.from_file(vocab_file)
return cls(array, vocab) | [
"def",
"from_files",
"(",
"cls",
",",
"array_file",
",",
"vocab_file",
")",
":",
"logging",
".",
"info",
"(",
"'Loading array...'",
")",
"array",
"=",
"np",
".",
"load",
"(",
"array_file",
")",
"logging",
".",
"info",
"(",
"'Loading vocab...'",
")",
"vocab",
"=",
"Vocab",
".",
"from_file",
"(",
"vocab_file",
")",
"return",
"cls",
"(",
"array",
",",
"vocab",
")"
] | Load the embedding matrix and the vocab from files.
:param (file) array_file: file to read array from
:param (file) vocab_file: file to read vocab from
:return (Embeddings): an Embeddings object | [
"Load",
"the",
"embedding",
"matrix",
"and",
"the",
"vocab",
"from",
"files",
"."
] | 920c55d8eaa1e7105971059c66eb448a74c100d6 | https://github.com/stanfordnlp/stanza/blob/920c55d8eaa1e7105971059c66eb448a74c100d6/stanza/ml/embeddings.py#L148-L160 | train |
stanfordnlp/stanza | stanza/research/codalab.py | get_uuids | def get_uuids():
"""List all bundle UUIDs in the worksheet."""
result = shell('cl ls -w {} -u'.format(worksheet))
uuids = result.split('\n')
uuids = uuids[1:-1] # trim non uuids
return uuids | python | def get_uuids():
"""List all bundle UUIDs in the worksheet."""
result = shell('cl ls -w {} -u'.format(worksheet))
uuids = result.split('\n')
uuids = uuids[1:-1] # trim non uuids
return uuids | [
"def",
"get_uuids",
"(",
")",
":",
"result",
"=",
"shell",
"(",
"'cl ls -w {} -u'",
".",
"format",
"(",
"worksheet",
")",
")",
"uuids",
"=",
"result",
".",
"split",
"(",
"'\\n'",
")",
"uuids",
"=",
"uuids",
"[",
"1",
":",
"-",
"1",
"]",
"# trim non uuids",
"return",
"uuids"
] | List all bundle UUIDs in the worksheet. | [
"List",
"all",
"bundle",
"UUIDs",
"in",
"the",
"worksheet",
"."
] | 920c55d8eaa1e7105971059c66eb448a74c100d6 | https://github.com/stanfordnlp/stanza/blob/920c55d8eaa1e7105971059c66eb448a74c100d6/stanza/research/codalab.py#L48-L53 | train |
stanfordnlp/stanza | stanza/research/codalab.py | open_file | def open_file(uuid, path):
"""Get the raw file content within a particular bundle at a particular path.
Path have no leading slash.
"""
# create temporary file just so we can get an unused file path
f = tempfile.NamedTemporaryFile()
f.close() # close and delete right away
fname = f.name
# download file to temporary path
cmd ='cl down -o {} -w {} {}/{}'.format(fname, worksheet, uuid, path)
try:
shell(cmd)
except RuntimeError:
try:
os.remove(fname) # if file exists, remove it
except OSError:
pass
raise IOError('Failed to open file {}/{}'.format(uuid, path))
f = open(fname)
yield f
f.close()
os.remove(fname) | python | def open_file(uuid, path):
"""Get the raw file content within a particular bundle at a particular path.
Path have no leading slash.
"""
# create temporary file just so we can get an unused file path
f = tempfile.NamedTemporaryFile()
f.close() # close and delete right away
fname = f.name
# download file to temporary path
cmd ='cl down -o {} -w {} {}/{}'.format(fname, worksheet, uuid, path)
try:
shell(cmd)
except RuntimeError:
try:
os.remove(fname) # if file exists, remove it
except OSError:
pass
raise IOError('Failed to open file {}/{}'.format(uuid, path))
f = open(fname)
yield f
f.close()
os.remove(fname) | [
"def",
"open_file",
"(",
"uuid",
",",
"path",
")",
":",
"# create temporary file just so we can get an unused file path",
"f",
"=",
"tempfile",
".",
"NamedTemporaryFile",
"(",
")",
"f",
".",
"close",
"(",
")",
"# close and delete right away",
"fname",
"=",
"f",
".",
"name",
"# download file to temporary path",
"cmd",
"=",
"'cl down -o {} -w {} {}/{}'",
".",
"format",
"(",
"fname",
",",
"worksheet",
",",
"uuid",
",",
"path",
")",
"try",
":",
"shell",
"(",
"cmd",
")",
"except",
"RuntimeError",
":",
"try",
":",
"os",
".",
"remove",
"(",
"fname",
")",
"# if file exists, remove it",
"except",
"OSError",
":",
"pass",
"raise",
"IOError",
"(",
"'Failed to open file {}/{}'",
".",
"format",
"(",
"uuid",
",",
"path",
")",
")",
"f",
"=",
"open",
"(",
"fname",
")",
"yield",
"f",
"f",
".",
"close",
"(",
")",
"os",
".",
"remove",
"(",
"fname",
")"
] | Get the raw file content within a particular bundle at a particular path.
Path have no leading slash. | [
"Get",
"the",
"raw",
"file",
"content",
"within",
"a",
"particular",
"bundle",
"at",
"a",
"particular",
"path",
"."
] | 920c55d8eaa1e7105971059c66eb448a74c100d6 | https://github.com/stanfordnlp/stanza/blob/920c55d8eaa1e7105971059c66eb448a74c100d6/stanza/research/codalab.py#L57-L81 | train |
stanfordnlp/stanza | stanza/research/codalab.py | Bundle.load_img | def load_img(self, img_path):
"""
Return an image object that can be immediately plotted with matplotlib
"""
with open_file(self.uuid, img_path) as f:
return mpimg.imread(f) | python | def load_img(self, img_path):
"""
Return an image object that can be immediately plotted with matplotlib
"""
with open_file(self.uuid, img_path) as f:
return mpimg.imread(f) | [
"def",
"load_img",
"(",
"self",
",",
"img_path",
")",
":",
"with",
"open_file",
"(",
"self",
".",
"uuid",
",",
"img_path",
")",
"as",
"f",
":",
"return",
"mpimg",
".",
"imread",
"(",
"f",
")"
] | Return an image object that can be immediately plotted with matplotlib | [
"Return",
"an",
"image",
"object",
"that",
"can",
"be",
"immediately",
"plotted",
"with",
"matplotlib"
] | 920c55d8eaa1e7105971059c66eb448a74c100d6 | https://github.com/stanfordnlp/stanza/blob/920c55d8eaa1e7105971059c66eb448a74c100d6/stanza/research/codalab.py#L126-L131 | train |
stanfordnlp/stanza | stanza/research/output.py | output_results | def output_results(results, split_id='results', output_stream=None):
'''
Log `results` readably to `output_stream`, with a header
containing `split_id`.
:param results: a dictionary of summary statistics from an evaluation
:type results: dict(str -> object)
:param str split_id: an identifier for the source of `results` (e.g. 'dev')
:param file output_stream: the file-like object to which to log the results
(default: stdout)
:type split_id: str
'''
if output_stream is None:
output_stream = sys.stdout
output_stream.write('----- %s -----\n' % split_id)
for name in sorted(results.keys()):
output_stream.write('%s: %s\n' % (name, repr(results[name])))
output_stream.flush() | python | def output_results(results, split_id='results', output_stream=None):
'''
Log `results` readably to `output_stream`, with a header
containing `split_id`.
:param results: a dictionary of summary statistics from an evaluation
:type results: dict(str -> object)
:param str split_id: an identifier for the source of `results` (e.g. 'dev')
:param file output_stream: the file-like object to which to log the results
(default: stdout)
:type split_id: str
'''
if output_stream is None:
output_stream = sys.stdout
output_stream.write('----- %s -----\n' % split_id)
for name in sorted(results.keys()):
output_stream.write('%s: %s\n' % (name, repr(results[name])))
output_stream.flush() | [
"def",
"output_results",
"(",
"results",
",",
"split_id",
"=",
"'results'",
",",
"output_stream",
"=",
"None",
")",
":",
"if",
"output_stream",
"is",
"None",
":",
"output_stream",
"=",
"sys",
".",
"stdout",
"output_stream",
".",
"write",
"(",
"'----- %s -----\\n'",
"%",
"split_id",
")",
"for",
"name",
"in",
"sorted",
"(",
"results",
".",
"keys",
"(",
")",
")",
":",
"output_stream",
".",
"write",
"(",
"'%s: %s\\n'",
"%",
"(",
"name",
",",
"repr",
"(",
"results",
"[",
"name",
"]",
")",
")",
")",
"output_stream",
".",
"flush",
"(",
")"
] | Log `results` readably to `output_stream`, with a header
containing `split_id`.
:param results: a dictionary of summary statistics from an evaluation
:type results: dict(str -> object)
:param str split_id: an identifier for the source of `results` (e.g. 'dev')
:param file output_stream: the file-like object to which to log the results
(default: stdout)
:type split_id: str | [
"Log",
"results",
"readably",
"to",
"output_stream",
"with",
"a",
"header",
"containing",
"split_id",
"."
] | 920c55d8eaa1e7105971059c66eb448a74c100d6 | https://github.com/stanfordnlp/stanza/blob/920c55d8eaa1e7105971059c66eb448a74c100d6/stanza/research/output.py#L4-L25 | train |
stanfordnlp/stanza | stanza/ml/tensorflow_utils.py | labels_to_onehots | def labels_to_onehots(labels, num_classes):
"""Convert a vector of integer class labels to a matrix of one-hot target vectors.
:param labels: a vector of integer labels, 0 to num_classes. Has shape (batch_size,).
:param num_classes: the total number of classes
:return: has shape (batch_size, num_classes)
"""
batch_size = labels.get_shape().as_list()[0]
with tf.name_scope("one_hot"):
labels = tf.expand_dims(labels, 1)
indices = tf.expand_dims(tf.range(0, batch_size, 1), 1)
sparse_ptrs = tf.concat(1, [indices, labels], name="ptrs")
onehots = tf.sparse_to_dense(sparse_ptrs, [batch_size, num_classes],
1.0, 0.0)
return onehots | python | def labels_to_onehots(labels, num_classes):
"""Convert a vector of integer class labels to a matrix of one-hot target vectors.
:param labels: a vector of integer labels, 0 to num_classes. Has shape (batch_size,).
:param num_classes: the total number of classes
:return: has shape (batch_size, num_classes)
"""
batch_size = labels.get_shape().as_list()[0]
with tf.name_scope("one_hot"):
labels = tf.expand_dims(labels, 1)
indices = tf.expand_dims(tf.range(0, batch_size, 1), 1)
sparse_ptrs = tf.concat(1, [indices, labels], name="ptrs")
onehots = tf.sparse_to_dense(sparse_ptrs, [batch_size, num_classes],
1.0, 0.0)
return onehots | [
"def",
"labels_to_onehots",
"(",
"labels",
",",
"num_classes",
")",
":",
"batch_size",
"=",
"labels",
".",
"get_shape",
"(",
")",
".",
"as_list",
"(",
")",
"[",
"0",
"]",
"with",
"tf",
".",
"name_scope",
"(",
"\"one_hot\"",
")",
":",
"labels",
"=",
"tf",
".",
"expand_dims",
"(",
"labels",
",",
"1",
")",
"indices",
"=",
"tf",
".",
"expand_dims",
"(",
"tf",
".",
"range",
"(",
"0",
",",
"batch_size",
",",
"1",
")",
",",
"1",
")",
"sparse_ptrs",
"=",
"tf",
".",
"concat",
"(",
"1",
",",
"[",
"indices",
",",
"labels",
"]",
",",
"name",
"=",
"\"ptrs\"",
")",
"onehots",
"=",
"tf",
".",
"sparse_to_dense",
"(",
"sparse_ptrs",
",",
"[",
"batch_size",
",",
"num_classes",
"]",
",",
"1.0",
",",
"0.0",
")",
"return",
"onehots"
] | Convert a vector of integer class labels to a matrix of one-hot target vectors.
:param labels: a vector of integer labels, 0 to num_classes. Has shape (batch_size,).
:param num_classes: the total number of classes
:return: has shape (batch_size, num_classes) | [
"Convert",
"a",
"vector",
"of",
"integer",
"class",
"labels",
"to",
"a",
"matrix",
"of",
"one",
"-",
"hot",
"target",
"vectors",
"."
] | 920c55d8eaa1e7105971059c66eb448a74c100d6 | https://github.com/stanfordnlp/stanza/blob/920c55d8eaa1e7105971059c66eb448a74c100d6/stanza/ml/tensorflow_utils.py#L6-L21 | train |
stanfordnlp/stanza | stanza/monitoring/progress.py | ProgressMonitor.start_task | def start_task(self, name, size):
'''
Add a task to the stack. If, for example, `name` is `'Iteration'` and
`size` is 10, progress on that task will be shown as
..., Iteration <p> of 10, ...
:param str name: A descriptive name for the type of subtask that is
being completed.
:param int size: The total number of subtasks to complete.
'''
if len(self.task_stack) == 0:
self.start_time = datetime.datetime.now()
self.task_stack.append(Task(name, size, 0)) | python | def start_task(self, name, size):
'''
Add a task to the stack. If, for example, `name` is `'Iteration'` and
`size` is 10, progress on that task will be shown as
..., Iteration <p> of 10, ...
:param str name: A descriptive name for the type of subtask that is
being completed.
:param int size: The total number of subtasks to complete.
'''
if len(self.task_stack) == 0:
self.start_time = datetime.datetime.now()
self.task_stack.append(Task(name, size, 0)) | [
"def",
"start_task",
"(",
"self",
",",
"name",
",",
"size",
")",
":",
"if",
"len",
"(",
"self",
".",
"task_stack",
")",
"==",
"0",
":",
"self",
".",
"start_time",
"=",
"datetime",
".",
"datetime",
".",
"now",
"(",
")",
"self",
".",
"task_stack",
".",
"append",
"(",
"Task",
"(",
"name",
",",
"size",
",",
"0",
")",
")"
] | Add a task to the stack. If, for example, `name` is `'Iteration'` and
`size` is 10, progress on that task will be shown as
..., Iteration <p> of 10, ...
:param str name: A descriptive name for the type of subtask that is
being completed.
:param int size: The total number of subtasks to complete. | [
"Add",
"a",
"task",
"to",
"the",
"stack",
".",
"If",
"for",
"example",
"name",
"is",
"Iteration",
"and",
"size",
"is",
"10",
"progress",
"on",
"that",
"task",
"will",
"be",
"shown",
"as"
] | 920c55d8eaa1e7105971059c66eb448a74c100d6 | https://github.com/stanfordnlp/stanza/blob/920c55d8eaa1e7105971059c66eb448a74c100d6/stanza/monitoring/progress.py#L58-L71 | train |
stanfordnlp/stanza | stanza/monitoring/progress.py | ProgressMonitor.progress | def progress(self, p):
'''
Update the current progress on the task at the top of the stack.
:param int p: The current subtask number, between 0 and `size`
(passed to `start_task`), inclusive.
'''
self.task_stack[-1] = self.task_stack[-1]._replace(progress=p)
self.progress_report() | python | def progress(self, p):
'''
Update the current progress on the task at the top of the stack.
:param int p: The current subtask number, between 0 and `size`
(passed to `start_task`), inclusive.
'''
self.task_stack[-1] = self.task_stack[-1]._replace(progress=p)
self.progress_report() | [
"def",
"progress",
"(",
"self",
",",
"p",
")",
":",
"self",
".",
"task_stack",
"[",
"-",
"1",
"]",
"=",
"self",
".",
"task_stack",
"[",
"-",
"1",
"]",
".",
"_replace",
"(",
"progress",
"=",
"p",
")",
"self",
".",
"progress_report",
"(",
")"
] | Update the current progress on the task at the top of the stack.
:param int p: The current subtask number, between 0 and `size`
(passed to `start_task`), inclusive. | [
"Update",
"the",
"current",
"progress",
"on",
"the",
"task",
"at",
"the",
"top",
"of",
"the",
"stack",
"."
] | 920c55d8eaa1e7105971059c66eb448a74c100d6 | https://github.com/stanfordnlp/stanza/blob/920c55d8eaa1e7105971059c66eb448a74c100d6/stanza/monitoring/progress.py#L73-L81 | train |
stanfordnlp/stanza | stanza/monitoring/progress.py | ProgressMonitor.end_task | def end_task(self):
'''
Remove the current task from the stack.
'''
self.progress(self.task_stack[-1].size)
self.task_stack.pop() | python | def end_task(self):
'''
Remove the current task from the stack.
'''
self.progress(self.task_stack[-1].size)
self.task_stack.pop() | [
"def",
"end_task",
"(",
"self",
")",
":",
"self",
".",
"progress",
"(",
"self",
".",
"task_stack",
"[",
"-",
"1",
"]",
".",
"size",
")",
"self",
".",
"task_stack",
".",
"pop",
"(",
")"
] | Remove the current task from the stack. | [
"Remove",
"the",
"current",
"task",
"from",
"the",
"stack",
"."
] | 920c55d8eaa1e7105971059c66eb448a74c100d6 | https://github.com/stanfordnlp/stanza/blob/920c55d8eaa1e7105971059c66eb448a74c100d6/stanza/monitoring/progress.py#L83-L88 | train |
stanfordnlp/stanza | stanza/monitoring/progress.py | ProgressMonitor.progress_report | def progress_report(self, force=False):
'''
Print the current progress.
:param bool force: If `True`, print the report regardless of the
elapsed time since the last progress report.
'''
now = datetime.datetime.now()
if (len(self.task_stack) > 1 or self.task_stack[0] > 0) and \
now - self.last_report < self.resolution and not force:
return
stack_printout = ', '.join('%s %s of %s' % (t.name, t.progress, t.size)
for t in self.task_stack)
frac_done = self.fraction_done()
if frac_done == 0.0:
now_str = now.strftime('%c')
eta_str = 'unknown on %s' % now_str
else:
elapsed = (now - self.start_time)
estimated_length = elapsed.total_seconds() / frac_done
eta = self.start_time + datetime.timedelta(seconds=estimated_length)
eta_str = eta.strftime('%c')
print '%s (~%d%% done, ETA %s)' % (stack_printout,
round(frac_done * 100.0),
eta_str)
self.last_report = datetime.datetime.now() | python | def progress_report(self, force=False):
'''
Print the current progress.
:param bool force: If `True`, print the report regardless of the
elapsed time since the last progress report.
'''
now = datetime.datetime.now()
if (len(self.task_stack) > 1 or self.task_stack[0] > 0) and \
now - self.last_report < self.resolution and not force:
return
stack_printout = ', '.join('%s %s of %s' % (t.name, t.progress, t.size)
for t in self.task_stack)
frac_done = self.fraction_done()
if frac_done == 0.0:
now_str = now.strftime('%c')
eta_str = 'unknown on %s' % now_str
else:
elapsed = (now - self.start_time)
estimated_length = elapsed.total_seconds() / frac_done
eta = self.start_time + datetime.timedelta(seconds=estimated_length)
eta_str = eta.strftime('%c')
print '%s (~%d%% done, ETA %s)' % (stack_printout,
round(frac_done * 100.0),
eta_str)
self.last_report = datetime.datetime.now() | [
"def",
"progress_report",
"(",
"self",
",",
"force",
"=",
"False",
")",
":",
"now",
"=",
"datetime",
".",
"datetime",
".",
"now",
"(",
")",
"if",
"(",
"len",
"(",
"self",
".",
"task_stack",
")",
">",
"1",
"or",
"self",
".",
"task_stack",
"[",
"0",
"]",
">",
"0",
")",
"and",
"now",
"-",
"self",
".",
"last_report",
"<",
"self",
".",
"resolution",
"and",
"not",
"force",
":",
"return",
"stack_printout",
"=",
"', '",
".",
"join",
"(",
"'%s %s of %s'",
"%",
"(",
"t",
".",
"name",
",",
"t",
".",
"progress",
",",
"t",
".",
"size",
")",
"for",
"t",
"in",
"self",
".",
"task_stack",
")",
"frac_done",
"=",
"self",
".",
"fraction_done",
"(",
")",
"if",
"frac_done",
"==",
"0.0",
":",
"now_str",
"=",
"now",
".",
"strftime",
"(",
"'%c'",
")",
"eta_str",
"=",
"'unknown on %s'",
"%",
"now_str",
"else",
":",
"elapsed",
"=",
"(",
"now",
"-",
"self",
".",
"start_time",
")",
"estimated_length",
"=",
"elapsed",
".",
"total_seconds",
"(",
")",
"/",
"frac_done",
"eta",
"=",
"self",
".",
"start_time",
"+",
"datetime",
".",
"timedelta",
"(",
"seconds",
"=",
"estimated_length",
")",
"eta_str",
"=",
"eta",
".",
"strftime",
"(",
"'%c'",
")",
"print",
"'%s (~%d%% done, ETA %s)'",
"%",
"(",
"stack_printout",
",",
"round",
"(",
"frac_done",
"*",
"100.0",
")",
",",
"eta_str",
")",
"self",
".",
"last_report",
"=",
"datetime",
".",
"datetime",
".",
"now",
"(",
")"
] | Print the current progress.
:param bool force: If `True`, print the report regardless of the
elapsed time since the last progress report. | [
"Print",
"the",
"current",
"progress",
"."
] | 920c55d8eaa1e7105971059c66eb448a74c100d6 | https://github.com/stanfordnlp/stanza/blob/920c55d8eaa1e7105971059c66eb448a74c100d6/stanza/monitoring/progress.py#L90-L118 | train |
stanfordnlp/stanza | stanza/text/dataset.py | Dataset.write_conll | def write_conll(self, fname):
"""
Serializes the dataset in CONLL format to fname
"""
if 'label' not in self.fields:
raise InvalidFieldsException("dataset is not in CONLL format: missing label field")
def instance_to_conll(inst):
tab = [v for k, v in inst.items() if k != 'label']
return '{}\n{}'.format(inst['label'], '\n'.join(['\t'.join(['-' if e is None else str(e) for e in row]) for row in zip(*tab)]))
with open(fname, 'wb') as f:
f.write('# {}'.format('\t'.join([k for k in self.fields if k != 'label'])))
for i, d in enumerate(self):
f.write('\n{}'.format(instance_to_conll(d)))
if i != len(self) - 1:
f.write('\n') | python | def write_conll(self, fname):
"""
Serializes the dataset in CONLL format to fname
"""
if 'label' not in self.fields:
raise InvalidFieldsException("dataset is not in CONLL format: missing label field")
def instance_to_conll(inst):
tab = [v for k, v in inst.items() if k != 'label']
return '{}\n{}'.format(inst['label'], '\n'.join(['\t'.join(['-' if e is None else str(e) for e in row]) for row in zip(*tab)]))
with open(fname, 'wb') as f:
f.write('# {}'.format('\t'.join([k for k in self.fields if k != 'label'])))
for i, d in enumerate(self):
f.write('\n{}'.format(instance_to_conll(d)))
if i != len(self) - 1:
f.write('\n') | [
"def",
"write_conll",
"(",
"self",
",",
"fname",
")",
":",
"if",
"'label'",
"not",
"in",
"self",
".",
"fields",
":",
"raise",
"InvalidFieldsException",
"(",
"\"dataset is not in CONLL format: missing label field\"",
")",
"def",
"instance_to_conll",
"(",
"inst",
")",
":",
"tab",
"=",
"[",
"v",
"for",
"k",
",",
"v",
"in",
"inst",
".",
"items",
"(",
")",
"if",
"k",
"!=",
"'label'",
"]",
"return",
"'{}\\n{}'",
".",
"format",
"(",
"inst",
"[",
"'label'",
"]",
",",
"'\\n'",
".",
"join",
"(",
"[",
"'\\t'",
".",
"join",
"(",
"[",
"'-'",
"if",
"e",
"is",
"None",
"else",
"str",
"(",
"e",
")",
"for",
"e",
"in",
"row",
"]",
")",
"for",
"row",
"in",
"zip",
"(",
"*",
"tab",
")",
"]",
")",
")",
"with",
"open",
"(",
"fname",
",",
"'wb'",
")",
"as",
"f",
":",
"f",
".",
"write",
"(",
"'# {}'",
".",
"format",
"(",
"'\\t'",
".",
"join",
"(",
"[",
"k",
"for",
"k",
"in",
"self",
".",
"fields",
"if",
"k",
"!=",
"'label'",
"]",
")",
")",
")",
"for",
"i",
",",
"d",
"in",
"enumerate",
"(",
"self",
")",
":",
"f",
".",
"write",
"(",
"'\\n{}'",
".",
"format",
"(",
"instance_to_conll",
"(",
"d",
")",
")",
")",
"if",
"i",
"!=",
"len",
"(",
"self",
")",
"-",
"1",
":",
"f",
".",
"write",
"(",
"'\\n'",
")"
] | Serializes the dataset in CONLL format to fname | [
"Serializes",
"the",
"dataset",
"in",
"CONLL",
"format",
"to",
"fname"
] | 920c55d8eaa1e7105971059c66eb448a74c100d6 | https://github.com/stanfordnlp/stanza/blob/920c55d8eaa1e7105971059c66eb448a74c100d6/stanza/text/dataset.py#L122-L138 | train |
stanfordnlp/stanza | stanza/text/dataset.py | Dataset.convert | def convert(self, converters, in_place=False):
"""
Applies transformations to the dataset.
:param converters: A dictionary specifying the function to apply to each field. If a field is missing from the dictionary, then it will not be transformed.
:param in_place: Whether to perform the transformation in place or create a new dataset instance
:return: the transformed dataset instance
"""
dataset = self if in_place else self.__class__(OrderedDict([(name, data[:]) for name, data in self.fields.items()]))
for name, convert in converters.items():
if name not in self.fields.keys():
raise InvalidFieldsException('Converter specified for non-existent field {}'.format(name))
for i, d in enumerate(dataset.fields[name]):
dataset.fields[name][i] = convert(d)
return dataset | python | def convert(self, converters, in_place=False):
"""
Applies transformations to the dataset.
:param converters: A dictionary specifying the function to apply to each field. If a field is missing from the dictionary, then it will not be transformed.
:param in_place: Whether to perform the transformation in place or create a new dataset instance
:return: the transformed dataset instance
"""
dataset = self if in_place else self.__class__(OrderedDict([(name, data[:]) for name, data in self.fields.items()]))
for name, convert in converters.items():
if name not in self.fields.keys():
raise InvalidFieldsException('Converter specified for non-existent field {}'.format(name))
for i, d in enumerate(dataset.fields[name]):
dataset.fields[name][i] = convert(d)
return dataset | [
"def",
"convert",
"(",
"self",
",",
"converters",
",",
"in_place",
"=",
"False",
")",
":",
"dataset",
"=",
"self",
"if",
"in_place",
"else",
"self",
".",
"__class__",
"(",
"OrderedDict",
"(",
"[",
"(",
"name",
",",
"data",
"[",
":",
"]",
")",
"for",
"name",
",",
"data",
"in",
"self",
".",
"fields",
".",
"items",
"(",
")",
"]",
")",
")",
"for",
"name",
",",
"convert",
"in",
"converters",
".",
"items",
"(",
")",
":",
"if",
"name",
"not",
"in",
"self",
".",
"fields",
".",
"keys",
"(",
")",
":",
"raise",
"InvalidFieldsException",
"(",
"'Converter specified for non-existent field {}'",
".",
"format",
"(",
"name",
")",
")",
"for",
"i",
",",
"d",
"in",
"enumerate",
"(",
"dataset",
".",
"fields",
"[",
"name",
"]",
")",
":",
"dataset",
".",
"fields",
"[",
"name",
"]",
"[",
"i",
"]",
"=",
"convert",
"(",
"d",
")",
"return",
"dataset"
] | Applies transformations to the dataset.
:param converters: A dictionary specifying the function to apply to each field. If a field is missing from the dictionary, then it will not be transformed.
:param in_place: Whether to perform the transformation in place or create a new dataset instance
:return: the transformed dataset instance | [
"Applies",
"transformations",
"to",
"the",
"dataset",
"."
] | 920c55d8eaa1e7105971059c66eb448a74c100d6 | https://github.com/stanfordnlp/stanza/blob/920c55d8eaa1e7105971059c66eb448a74c100d6/stanza/text/dataset.py#L140-L156 | train |
stanfordnlp/stanza | stanza/text/dataset.py | Dataset.shuffle | def shuffle(self):
"""
Re-indexes the dataset in random order
:return: the shuffled dataset instance
"""
order = range(len(self))
random.shuffle(order)
for name, data in self.fields.items():
reindexed = []
for _, i in enumerate(order):
reindexed.append(data[i])
self.fields[name] = reindexed
return self | python | def shuffle(self):
"""
Re-indexes the dataset in random order
:return: the shuffled dataset instance
"""
order = range(len(self))
random.shuffle(order)
for name, data in self.fields.items():
reindexed = []
for _, i in enumerate(order):
reindexed.append(data[i])
self.fields[name] = reindexed
return self | [
"def",
"shuffle",
"(",
"self",
")",
":",
"order",
"=",
"range",
"(",
"len",
"(",
"self",
")",
")",
"random",
".",
"shuffle",
"(",
"order",
")",
"for",
"name",
",",
"data",
"in",
"self",
".",
"fields",
".",
"items",
"(",
")",
":",
"reindexed",
"=",
"[",
"]",
"for",
"_",
",",
"i",
"in",
"enumerate",
"(",
"order",
")",
":",
"reindexed",
".",
"append",
"(",
"data",
"[",
"i",
"]",
")",
"self",
".",
"fields",
"[",
"name",
"]",
"=",
"reindexed",
"return",
"self"
] | Re-indexes the dataset in random order
:return: the shuffled dataset instance | [
"Re",
"-",
"indexes",
"the",
"dataset",
"in",
"random",
"order"
] | 920c55d8eaa1e7105971059c66eb448a74c100d6 | https://github.com/stanfordnlp/stanza/blob/920c55d8eaa1e7105971059c66eb448a74c100d6/stanza/text/dataset.py#L158-L171 | train |
stanfordnlp/stanza | stanza/text/dataset.py | Dataset.pad | def pad(cls, sequences, padding, pad_len=None):
"""
Pads a list of sequences such that they form a matrix.
:param sequences: a list of sequences of varying lengths.
:param padding: the value of padded cells.
:param pad_len: the length of the maximum padded sequence.
"""
max_len = max([len(s) for s in sequences])
pad_len = pad_len or max_len
assert pad_len >= max_len, 'pad_len {} must be greater or equal to the longest sequence {}'.format(pad_len, max_len)
for i, s in enumerate(sequences):
sequences[i] = [padding] * (pad_len - len(s)) + s
return np.array(sequences) | python | def pad(cls, sequences, padding, pad_len=None):
"""
Pads a list of sequences such that they form a matrix.
:param sequences: a list of sequences of varying lengths.
:param padding: the value of padded cells.
:param pad_len: the length of the maximum padded sequence.
"""
max_len = max([len(s) for s in sequences])
pad_len = pad_len or max_len
assert pad_len >= max_len, 'pad_len {} must be greater or equal to the longest sequence {}'.format(pad_len, max_len)
for i, s in enumerate(sequences):
sequences[i] = [padding] * (pad_len - len(s)) + s
return np.array(sequences) | [
"def",
"pad",
"(",
"cls",
",",
"sequences",
",",
"padding",
",",
"pad_len",
"=",
"None",
")",
":",
"max_len",
"=",
"max",
"(",
"[",
"len",
"(",
"s",
")",
"for",
"s",
"in",
"sequences",
"]",
")",
"pad_len",
"=",
"pad_len",
"or",
"max_len",
"assert",
"pad_len",
">=",
"max_len",
",",
"'pad_len {} must be greater or equal to the longest sequence {}'",
".",
"format",
"(",
"pad_len",
",",
"max_len",
")",
"for",
"i",
",",
"s",
"in",
"enumerate",
"(",
"sequences",
")",
":",
"sequences",
"[",
"i",
"]",
"=",
"[",
"padding",
"]",
"*",
"(",
"pad_len",
"-",
"len",
"(",
"s",
")",
")",
"+",
"s",
"return",
"np",
".",
"array",
"(",
"sequences",
")"
] | Pads a list of sequences such that they form a matrix.
:param sequences: a list of sequences of varying lengths.
:param padding: the value of padded cells.
:param pad_len: the length of the maximum padded sequence. | [
"Pads",
"a",
"list",
"of",
"sequences",
"such",
"that",
"they",
"form",
"a",
"matrix",
"."
] | 920c55d8eaa1e7105971059c66eb448a74c100d6 | https://github.com/stanfordnlp/stanza/blob/920c55d8eaa1e7105971059c66eb448a74c100d6/stanza/text/dataset.py#L208-L221 | train |
stanfordnlp/stanza | stanza/research/metrics.py | bleu | def bleu(eval_data, predictions, scores='ignored', learner='ignored'):
'''
Return corpus-level BLEU score of `predictions` using the `output`
field of the instances in `eval_data` as references. This is returned
as a length-1 list of floats.
This uses the NLTK unsmoothed implementation, which has been known
to have some bugs. This function patches over the biggest bug, which
is that NLTK ignores n-gram overlap counts of 0 (this should result
in a zero BLEU score).
>>> data = [Instance('input', 'this is the good'),
... Instance('input', 'the bad'),
... Instance('input', 'and the ugly')]
>>> bleu(data, ['this is the good', 'the good', 'seriously really good']) # doctest: +ELLIPSIS
[0.65599...]
>>> np.exp(np.mean([np.log(5. / 9.), np.log(3. / 6.),
... np.log(2. / 3.), np.log(1. / 1.)])) # doctest: +ELLIPSIS
0.65599...
'''
ref_groups = ([inst.output.split()]
if isinstance(inst.output, basestring) else
[_maybe_tokenize(r) for r in inst.output]
for inst in eval_data)
return [corpus_bleu(ref_groups, [p.split() for p in predictions])] | python | def bleu(eval_data, predictions, scores='ignored', learner='ignored'):
'''
Return corpus-level BLEU score of `predictions` using the `output`
field of the instances in `eval_data` as references. This is returned
as a length-1 list of floats.
This uses the NLTK unsmoothed implementation, which has been known
to have some bugs. This function patches over the biggest bug, which
is that NLTK ignores n-gram overlap counts of 0 (this should result
in a zero BLEU score).
>>> data = [Instance('input', 'this is the good'),
... Instance('input', 'the bad'),
... Instance('input', 'and the ugly')]
>>> bleu(data, ['this is the good', 'the good', 'seriously really good']) # doctest: +ELLIPSIS
[0.65599...]
>>> np.exp(np.mean([np.log(5. / 9.), np.log(3. / 6.),
... np.log(2. / 3.), np.log(1. / 1.)])) # doctest: +ELLIPSIS
0.65599...
'''
ref_groups = ([inst.output.split()]
if isinstance(inst.output, basestring) else
[_maybe_tokenize(r) for r in inst.output]
for inst in eval_data)
return [corpus_bleu(ref_groups, [p.split() for p in predictions])] | [
"def",
"bleu",
"(",
"eval_data",
",",
"predictions",
",",
"scores",
"=",
"'ignored'",
",",
"learner",
"=",
"'ignored'",
")",
":",
"ref_groups",
"=",
"(",
"[",
"inst",
".",
"output",
".",
"split",
"(",
")",
"]",
"if",
"isinstance",
"(",
"inst",
".",
"output",
",",
"basestring",
")",
"else",
"[",
"_maybe_tokenize",
"(",
"r",
")",
"for",
"r",
"in",
"inst",
".",
"output",
"]",
"for",
"inst",
"in",
"eval_data",
")",
"return",
"[",
"corpus_bleu",
"(",
"ref_groups",
",",
"[",
"p",
".",
"split",
"(",
")",
"for",
"p",
"in",
"predictions",
"]",
")",
"]"
] | Return corpus-level BLEU score of `predictions` using the `output`
field of the instances in `eval_data` as references. This is returned
as a length-1 list of floats.
This uses the NLTK unsmoothed implementation, which has been known
to have some bugs. This function patches over the biggest bug, which
is that NLTK ignores n-gram overlap counts of 0 (this should result
in a zero BLEU score).
>>> data = [Instance('input', 'this is the good'),
... Instance('input', 'the bad'),
... Instance('input', 'and the ugly')]
>>> bleu(data, ['this is the good', 'the good', 'seriously really good']) # doctest: +ELLIPSIS
[0.65599...]
>>> np.exp(np.mean([np.log(5. / 9.), np.log(3. / 6.),
... np.log(2. / 3.), np.log(1. / 1.)])) # doctest: +ELLIPSIS
0.65599... | [
"Return",
"corpus",
"-",
"level",
"BLEU",
"score",
"of",
"predictions",
"using",
"the",
"output",
"field",
"of",
"the",
"instances",
"in",
"eval_data",
"as",
"references",
".",
"This",
"is",
"returned",
"as",
"a",
"length",
"-",
"1",
"list",
"of",
"floats",
"."
] | 920c55d8eaa1e7105971059c66eb448a74c100d6 | https://github.com/stanfordnlp/stanza/blob/920c55d8eaa1e7105971059c66eb448a74c100d6/stanza/research/metrics.py#L70-L94 | train |
stanfordnlp/stanza | stanza/research/metrics.py | squared_error | def squared_error(eval_data, predictions, scores='ignored', learner='ignored'):
'''
Return the squared error of each prediction in `predictions` with respect
to the correct output in `eval_data`.
>>> data = [Instance('input', (0., 0., 1.)),
... Instance('input', (0., 1., 1.)),
... Instance('input', (1., 0., 0.))]
>>> squared_error(data, [(0., 1., 1.), (0., 1., 1.), (-1., 1., 0.)])
[1.0, 0.0, 5.0]
'''
return [np.sum((np.array(pred) - np.array(inst.output)) ** 2)
for inst, pred in zip(eval_data, predictions)] | python | def squared_error(eval_data, predictions, scores='ignored', learner='ignored'):
'''
Return the squared error of each prediction in `predictions` with respect
to the correct output in `eval_data`.
>>> data = [Instance('input', (0., 0., 1.)),
... Instance('input', (0., 1., 1.)),
... Instance('input', (1., 0., 0.))]
>>> squared_error(data, [(0., 1., 1.), (0., 1., 1.), (-1., 1., 0.)])
[1.0, 0.0, 5.0]
'''
return [np.sum((np.array(pred) - np.array(inst.output)) ** 2)
for inst, pred in zip(eval_data, predictions)] | [
"def",
"squared_error",
"(",
"eval_data",
",",
"predictions",
",",
"scores",
"=",
"'ignored'",
",",
"learner",
"=",
"'ignored'",
")",
":",
"return",
"[",
"np",
".",
"sum",
"(",
"(",
"np",
".",
"array",
"(",
"pred",
")",
"-",
"np",
".",
"array",
"(",
"inst",
".",
"output",
")",
")",
"**",
"2",
")",
"for",
"inst",
",",
"pred",
"in",
"zip",
"(",
"eval_data",
",",
"predictions",
")",
"]"
] | Return the squared error of each prediction in `predictions` with respect
to the correct output in `eval_data`.
>>> data = [Instance('input', (0., 0., 1.)),
... Instance('input', (0., 1., 1.)),
... Instance('input', (1., 0., 0.))]
>>> squared_error(data, [(0., 1., 1.), (0., 1., 1.), (-1., 1., 0.)])
[1.0, 0.0, 5.0] | [
"Return",
"the",
"squared",
"error",
"of",
"each",
"prediction",
"in",
"predictions",
"with",
"respect",
"to",
"the",
"correct",
"output",
"in",
"eval_data",
"."
] | 920c55d8eaa1e7105971059c66eb448a74c100d6 | https://github.com/stanfordnlp/stanza/blob/920c55d8eaa1e7105971059c66eb448a74c100d6/stanza/research/metrics.py#L122-L134 | train |
drdoctr/doctr | doctr/local.py | encrypt_variable | def encrypt_variable(variable, build_repo, *, tld='.org', public_key=None,
travis_token=None, **login_kwargs):
"""
Encrypt an environment variable for ``build_repo`` for Travis
``variable`` should be a bytes object, of the form ``b'ENV=value'``.
``build_repo`` is the repo that ``doctr deploy`` will be run from. It
should be like 'drdoctr/doctr'.
``tld`` should be ``'.org'`` for travis-ci.org and ``'.com'`` for
travis-ci.com.
``public_key`` should be a pem format public key, obtained from Travis if
not provided.
If the repo is private, travis_token should be as returned by
``get_temporary_token(**login_kwargs)``. A token being present
automatically implies ``tld='.com'``.
"""
if not isinstance(variable, bytes):
raise TypeError("variable should be bytes")
if not b"=" in variable:
raise ValueError("variable should be of the form 'VARIABLE=value'")
if not public_key:
_headers = {
'Content-Type': 'application/json',
'User-Agent': 'MyClient/1.0.0',
}
headersv2 = {**_headers, **Travis_APIv2}
headersv3 = {**_headers, **Travis_APIv3}
if travis_token:
headersv3['Authorization'] = 'token {}'.format(travis_token)
res = requests.get('https://api.travis-ci.com/repo/{build_repo}/key_pair/generated'.format(build_repo=urllib.parse.quote(build_repo,
safe='')), headers=headersv3)
if res.json().get('file') == 'not found':
raise RuntimeError("Could not find the Travis public key for %s" % build_repo)
public_key = res.json()['public_key']
else:
res = requests.get('https://api.travis-ci{tld}/repos/{build_repo}/key'.format(build_repo=build_repo,
tld=tld),
headers=headersv2)
public_key = res.json()['key']
if res.status_code == requests.codes.not_found:
raise RuntimeError('Could not find requested repo on Travis. Is Travis enabled?')
res.raise_for_status()
public_key = public_key.replace("RSA PUBLIC KEY", "PUBLIC KEY").encode('utf-8')
key = serialization.load_pem_public_key(public_key, backend=default_backend())
pad = padding.PKCS1v15()
return base64.b64encode(key.encrypt(variable, pad)) | python | def encrypt_variable(variable, build_repo, *, tld='.org', public_key=None,
travis_token=None, **login_kwargs):
"""
Encrypt an environment variable for ``build_repo`` for Travis
``variable`` should be a bytes object, of the form ``b'ENV=value'``.
``build_repo`` is the repo that ``doctr deploy`` will be run from. It
should be like 'drdoctr/doctr'.
``tld`` should be ``'.org'`` for travis-ci.org and ``'.com'`` for
travis-ci.com.
``public_key`` should be a pem format public key, obtained from Travis if
not provided.
If the repo is private, travis_token should be as returned by
``get_temporary_token(**login_kwargs)``. A token being present
automatically implies ``tld='.com'``.
"""
if not isinstance(variable, bytes):
raise TypeError("variable should be bytes")
if not b"=" in variable:
raise ValueError("variable should be of the form 'VARIABLE=value'")
if not public_key:
_headers = {
'Content-Type': 'application/json',
'User-Agent': 'MyClient/1.0.0',
}
headersv2 = {**_headers, **Travis_APIv2}
headersv3 = {**_headers, **Travis_APIv3}
if travis_token:
headersv3['Authorization'] = 'token {}'.format(travis_token)
res = requests.get('https://api.travis-ci.com/repo/{build_repo}/key_pair/generated'.format(build_repo=urllib.parse.quote(build_repo,
safe='')), headers=headersv3)
if res.json().get('file') == 'not found':
raise RuntimeError("Could not find the Travis public key for %s" % build_repo)
public_key = res.json()['public_key']
else:
res = requests.get('https://api.travis-ci{tld}/repos/{build_repo}/key'.format(build_repo=build_repo,
tld=tld),
headers=headersv2)
public_key = res.json()['key']
if res.status_code == requests.codes.not_found:
raise RuntimeError('Could not find requested repo on Travis. Is Travis enabled?')
res.raise_for_status()
public_key = public_key.replace("RSA PUBLIC KEY", "PUBLIC KEY").encode('utf-8')
key = serialization.load_pem_public_key(public_key, backend=default_backend())
pad = padding.PKCS1v15()
return base64.b64encode(key.encrypt(variable, pad)) | [
"def",
"encrypt_variable",
"(",
"variable",
",",
"build_repo",
",",
"*",
",",
"tld",
"=",
"'.org'",
",",
"public_key",
"=",
"None",
",",
"travis_token",
"=",
"None",
",",
"*",
"*",
"login_kwargs",
")",
":",
"if",
"not",
"isinstance",
"(",
"variable",
",",
"bytes",
")",
":",
"raise",
"TypeError",
"(",
"\"variable should be bytes\"",
")",
"if",
"not",
"b\"=\"",
"in",
"variable",
":",
"raise",
"ValueError",
"(",
"\"variable should be of the form 'VARIABLE=value'\"",
")",
"if",
"not",
"public_key",
":",
"_headers",
"=",
"{",
"'Content-Type'",
":",
"'application/json'",
",",
"'User-Agent'",
":",
"'MyClient/1.0.0'",
",",
"}",
"headersv2",
"=",
"{",
"*",
"*",
"_headers",
",",
"*",
"*",
"Travis_APIv2",
"}",
"headersv3",
"=",
"{",
"*",
"*",
"_headers",
",",
"*",
"*",
"Travis_APIv3",
"}",
"if",
"travis_token",
":",
"headersv3",
"[",
"'Authorization'",
"]",
"=",
"'token {}'",
".",
"format",
"(",
"travis_token",
")",
"res",
"=",
"requests",
".",
"get",
"(",
"'https://api.travis-ci.com/repo/{build_repo}/key_pair/generated'",
".",
"format",
"(",
"build_repo",
"=",
"urllib",
".",
"parse",
".",
"quote",
"(",
"build_repo",
",",
"safe",
"=",
"''",
")",
")",
",",
"headers",
"=",
"headersv3",
")",
"if",
"res",
".",
"json",
"(",
")",
".",
"get",
"(",
"'file'",
")",
"==",
"'not found'",
":",
"raise",
"RuntimeError",
"(",
"\"Could not find the Travis public key for %s\"",
"%",
"build_repo",
")",
"public_key",
"=",
"res",
".",
"json",
"(",
")",
"[",
"'public_key'",
"]",
"else",
":",
"res",
"=",
"requests",
".",
"get",
"(",
"'https://api.travis-ci{tld}/repos/{build_repo}/key'",
".",
"format",
"(",
"build_repo",
"=",
"build_repo",
",",
"tld",
"=",
"tld",
")",
",",
"headers",
"=",
"headersv2",
")",
"public_key",
"=",
"res",
".",
"json",
"(",
")",
"[",
"'key'",
"]",
"if",
"res",
".",
"status_code",
"==",
"requests",
".",
"codes",
".",
"not_found",
":",
"raise",
"RuntimeError",
"(",
"'Could not find requested repo on Travis. Is Travis enabled?'",
")",
"res",
".",
"raise_for_status",
"(",
")",
"public_key",
"=",
"public_key",
".",
"replace",
"(",
"\"RSA PUBLIC KEY\"",
",",
"\"PUBLIC KEY\"",
")",
".",
"encode",
"(",
"'utf-8'",
")",
"key",
"=",
"serialization",
".",
"load_pem_public_key",
"(",
"public_key",
",",
"backend",
"=",
"default_backend",
"(",
")",
")",
"pad",
"=",
"padding",
".",
"PKCS1v15",
"(",
")",
"return",
"base64",
".",
"b64encode",
"(",
"key",
".",
"encrypt",
"(",
"variable",
",",
"pad",
")",
")"
] | Encrypt an environment variable for ``build_repo`` for Travis
``variable`` should be a bytes object, of the form ``b'ENV=value'``.
``build_repo`` is the repo that ``doctr deploy`` will be run from. It
should be like 'drdoctr/doctr'.
``tld`` should be ``'.org'`` for travis-ci.org and ``'.com'`` for
travis-ci.com.
``public_key`` should be a pem format public key, obtained from Travis if
not provided.
If the repo is private, travis_token should be as returned by
``get_temporary_token(**login_kwargs)``. A token being present
automatically implies ``tld='.com'``. | [
"Encrypt",
"an",
"environment",
"variable",
"for",
"build_repo",
"for",
"Travis"
] | 0f19ff78c8239efcc98d417f36b0a31d9be01ba5 | https://github.com/drdoctr/doctr/blob/0f19ff78c8239efcc98d417f36b0a31d9be01ba5/doctr/local.py#L28-L84 | train |
drdoctr/doctr | doctr/local.py | encrypt_to_file | def encrypt_to_file(contents, filename):
"""
Encrypts ``contents`` and writes it to ``filename``.
``contents`` should be a bytes string. ``filename`` should end with
``.enc``.
Returns the secret key used for the encryption.
Decrypt the file with :func:`doctr.travis.decrypt_file`.
"""
if not filename.endswith('.enc'):
raise ValueError("%s does not end with .enc" % filename)
key = Fernet.generate_key()
fer = Fernet(key)
encrypted_file = fer.encrypt(contents)
with open(filename, 'wb') as f:
f.write(encrypted_file)
return key | python | def encrypt_to_file(contents, filename):
"""
Encrypts ``contents`` and writes it to ``filename``.
``contents`` should be a bytes string. ``filename`` should end with
``.enc``.
Returns the secret key used for the encryption.
Decrypt the file with :func:`doctr.travis.decrypt_file`.
"""
if not filename.endswith('.enc'):
raise ValueError("%s does not end with .enc" % filename)
key = Fernet.generate_key()
fer = Fernet(key)
encrypted_file = fer.encrypt(contents)
with open(filename, 'wb') as f:
f.write(encrypted_file)
return key | [
"def",
"encrypt_to_file",
"(",
"contents",
",",
"filename",
")",
":",
"if",
"not",
"filename",
".",
"endswith",
"(",
"'.enc'",
")",
":",
"raise",
"ValueError",
"(",
"\"%s does not end with .enc\"",
"%",
"filename",
")",
"key",
"=",
"Fernet",
".",
"generate_key",
"(",
")",
"fer",
"=",
"Fernet",
"(",
"key",
")",
"encrypted_file",
"=",
"fer",
".",
"encrypt",
"(",
"contents",
")",
"with",
"open",
"(",
"filename",
",",
"'wb'",
")",
"as",
"f",
":",
"f",
".",
"write",
"(",
"encrypted_file",
")",
"return",
"key"
] | Encrypts ``contents`` and writes it to ``filename``.
``contents`` should be a bytes string. ``filename`` should end with
``.enc``.
Returns the secret key used for the encryption.
Decrypt the file with :func:`doctr.travis.decrypt_file`. | [
"Encrypts",
"contents",
"and",
"writes",
"it",
"to",
"filename",
"."
] | 0f19ff78c8239efcc98d417f36b0a31d9be01ba5 | https://github.com/drdoctr/doctr/blob/0f19ff78c8239efcc98d417f36b0a31d9be01ba5/doctr/local.py#L86-L109 | train |
drdoctr/doctr | doctr/local.py | GitHub_login | def GitHub_login(*, username=None, password=None, OTP=None, headers=None):
"""
Login to GitHub.
If no username, password, or OTP (2-factor authentication code) are
provided, they will be requested from the command line.
Returns a dict of kwargs that can be passed to functions that require
authenticated connections to GitHub.
"""
if not username:
username = input("What is your GitHub username? ")
if not password:
password = getpass("Enter the GitHub password for {username}: ".format(username=username))
headers = headers or {}
if OTP:
headers['X-GitHub-OTP'] = OTP
auth = HTTPBasicAuth(username, password)
r = requests.get('https://api.github.com/', auth=auth, headers=headers)
if r.status_code == 401:
two_factor = r.headers.get('X-GitHub-OTP')
if two_factor:
if OTP:
print(red("Invalid authentication code"))
# For SMS, we have to make a fake request (that will fail without
# the OTP) to get GitHub to send it. See https://github.com/drdoctr/doctr/pull/203
auth_header = base64.urlsafe_b64encode(bytes(username + ':' + password, 'utf8')).decode()
login_kwargs = {'auth': None, 'headers': {'Authorization': 'Basic {}'.format(auth_header)}}
try:
generate_GitHub_token(**login_kwargs)
except (requests.exceptions.HTTPError, GitHubError):
pass
print("A two-factor authentication code is required:", two_factor.split(';')[1].strip())
OTP = input("Authentication code: ")
return GitHub_login(username=username, password=password, OTP=OTP, headers=headers)
raise AuthenticationFailed("invalid username or password")
GitHub_raise_for_status(r)
return {'auth': auth, 'headers': headers} | python | def GitHub_login(*, username=None, password=None, OTP=None, headers=None):
"""
Login to GitHub.
If no username, password, or OTP (2-factor authentication code) are
provided, they will be requested from the command line.
Returns a dict of kwargs that can be passed to functions that require
authenticated connections to GitHub.
"""
if not username:
username = input("What is your GitHub username? ")
if not password:
password = getpass("Enter the GitHub password for {username}: ".format(username=username))
headers = headers or {}
if OTP:
headers['X-GitHub-OTP'] = OTP
auth = HTTPBasicAuth(username, password)
r = requests.get('https://api.github.com/', auth=auth, headers=headers)
if r.status_code == 401:
two_factor = r.headers.get('X-GitHub-OTP')
if two_factor:
if OTP:
print(red("Invalid authentication code"))
# For SMS, we have to make a fake request (that will fail without
# the OTP) to get GitHub to send it. See https://github.com/drdoctr/doctr/pull/203
auth_header = base64.urlsafe_b64encode(bytes(username + ':' + password, 'utf8')).decode()
login_kwargs = {'auth': None, 'headers': {'Authorization': 'Basic {}'.format(auth_header)}}
try:
generate_GitHub_token(**login_kwargs)
except (requests.exceptions.HTTPError, GitHubError):
pass
print("A two-factor authentication code is required:", two_factor.split(';')[1].strip())
OTP = input("Authentication code: ")
return GitHub_login(username=username, password=password, OTP=OTP, headers=headers)
raise AuthenticationFailed("invalid username or password")
GitHub_raise_for_status(r)
return {'auth': auth, 'headers': headers} | [
"def",
"GitHub_login",
"(",
"*",
",",
"username",
"=",
"None",
",",
"password",
"=",
"None",
",",
"OTP",
"=",
"None",
",",
"headers",
"=",
"None",
")",
":",
"if",
"not",
"username",
":",
"username",
"=",
"input",
"(",
"\"What is your GitHub username? \"",
")",
"if",
"not",
"password",
":",
"password",
"=",
"getpass",
"(",
"\"Enter the GitHub password for {username}: \"",
".",
"format",
"(",
"username",
"=",
"username",
")",
")",
"headers",
"=",
"headers",
"or",
"{",
"}",
"if",
"OTP",
":",
"headers",
"[",
"'X-GitHub-OTP'",
"]",
"=",
"OTP",
"auth",
"=",
"HTTPBasicAuth",
"(",
"username",
",",
"password",
")",
"r",
"=",
"requests",
".",
"get",
"(",
"'https://api.github.com/'",
",",
"auth",
"=",
"auth",
",",
"headers",
"=",
"headers",
")",
"if",
"r",
".",
"status_code",
"==",
"401",
":",
"two_factor",
"=",
"r",
".",
"headers",
".",
"get",
"(",
"'X-GitHub-OTP'",
")",
"if",
"two_factor",
":",
"if",
"OTP",
":",
"print",
"(",
"red",
"(",
"\"Invalid authentication code\"",
")",
")",
"# For SMS, we have to make a fake request (that will fail without",
"# the OTP) to get GitHub to send it. See https://github.com/drdoctr/doctr/pull/203",
"auth_header",
"=",
"base64",
".",
"urlsafe_b64encode",
"(",
"bytes",
"(",
"username",
"+",
"':'",
"+",
"password",
",",
"'utf8'",
")",
")",
".",
"decode",
"(",
")",
"login_kwargs",
"=",
"{",
"'auth'",
":",
"None",
",",
"'headers'",
":",
"{",
"'Authorization'",
":",
"'Basic {}'",
".",
"format",
"(",
"auth_header",
")",
"}",
"}",
"try",
":",
"generate_GitHub_token",
"(",
"*",
"*",
"login_kwargs",
")",
"except",
"(",
"requests",
".",
"exceptions",
".",
"HTTPError",
",",
"GitHubError",
")",
":",
"pass",
"print",
"(",
"\"A two-factor authentication code is required:\"",
",",
"two_factor",
".",
"split",
"(",
"';'",
")",
"[",
"1",
"]",
".",
"strip",
"(",
")",
")",
"OTP",
"=",
"input",
"(",
"\"Authentication code: \"",
")",
"return",
"GitHub_login",
"(",
"username",
"=",
"username",
",",
"password",
"=",
"password",
",",
"OTP",
"=",
"OTP",
",",
"headers",
"=",
"headers",
")",
"raise",
"AuthenticationFailed",
"(",
"\"invalid username or password\"",
")",
"GitHub_raise_for_status",
"(",
"r",
")",
"return",
"{",
"'auth'",
":",
"auth",
",",
"'headers'",
":",
"headers",
"}"
] | Login to GitHub.
If no username, password, or OTP (2-factor authentication code) are
provided, they will be requested from the command line.
Returns a dict of kwargs that can be passed to functions that require
authenticated connections to GitHub. | [
"Login",
"to",
"GitHub",
"."
] | 0f19ff78c8239efcc98d417f36b0a31d9be01ba5 | https://github.com/drdoctr/doctr/blob/0f19ff78c8239efcc98d417f36b0a31d9be01ba5/doctr/local.py#L114-L158 | train |
drdoctr/doctr | doctr/local.py | GitHub_post | def GitHub_post(data, url, *, auth, headers):
"""
POST the data ``data`` to GitHub.
Returns the json response from the server, or raises on error status.
"""
r = requests.post(url, auth=auth, headers=headers, data=json.dumps(data))
GitHub_raise_for_status(r)
return r.json() | python | def GitHub_post(data, url, *, auth, headers):
"""
POST the data ``data`` to GitHub.
Returns the json response from the server, or raises on error status.
"""
r = requests.post(url, auth=auth, headers=headers, data=json.dumps(data))
GitHub_raise_for_status(r)
return r.json() | [
"def",
"GitHub_post",
"(",
"data",
",",
"url",
",",
"*",
",",
"auth",
",",
"headers",
")",
":",
"r",
"=",
"requests",
".",
"post",
"(",
"url",
",",
"auth",
"=",
"auth",
",",
"headers",
"=",
"headers",
",",
"data",
"=",
"json",
".",
"dumps",
"(",
"data",
")",
")",
"GitHub_raise_for_status",
"(",
"r",
")",
"return",
"r",
".",
"json",
"(",
")"
] | POST the data ``data`` to GitHub.
Returns the json response from the server, or raises on error status. | [
"POST",
"the",
"data",
"data",
"to",
"GitHub",
"."
] | 0f19ff78c8239efcc98d417f36b0a31d9be01ba5 | https://github.com/drdoctr/doctr/blob/0f19ff78c8239efcc98d417f36b0a31d9be01ba5/doctr/local.py#L215-L224 | train |
drdoctr/doctr | doctr/local.py | get_travis_token | def get_travis_token(*, GitHub_token=None, **login_kwargs):
"""
Generate a temporary token for authenticating with Travis
The GitHub token can be passed in to the ``GitHub_token`` keyword
argument. If no token is passed in, a GitHub token is generated
temporarily, and then immediately deleted.
This is needed to activate a private repo
Returns the secret token. It should be added to the headers like
headers['Authorization'] = "token {}".format(token)
"""
_headers = {
'Content-Type': 'application/json',
'User-Agent': 'MyClient/1.0.0',
}
headersv2 = {**_headers, **Travis_APIv2}
token_id = None
try:
if not GitHub_token:
print(green("I need to generate a temporary token with GitHub to authenticate with Travis. You may get a warning email from GitHub about this."))
print(green("It will be deleted immediately. If you still see it after this at https://github.com/settings/tokens after please delete it manually."))
# /auth/github doesn't seem to exist in the Travis API v3.
tok_dict = generate_GitHub_token(scopes=["read:org", "user:email", "repo"],
note="temporary token for doctr to auth against travis (delete me)",
**login_kwargs)
GitHub_token = tok_dict['token']
token_id = tok_dict['id']
data = {'github_token': GitHub_token}
res = requests.post('https://api.travis-ci.com/auth/github', data=json.dumps(data), headers=headersv2)
return res.json()['access_token']
finally:
if token_id:
delete_GitHub_token(token_id, **login_kwargs) | python | def get_travis_token(*, GitHub_token=None, **login_kwargs):
"""
Generate a temporary token for authenticating with Travis
The GitHub token can be passed in to the ``GitHub_token`` keyword
argument. If no token is passed in, a GitHub token is generated
temporarily, and then immediately deleted.
This is needed to activate a private repo
Returns the secret token. It should be added to the headers like
headers['Authorization'] = "token {}".format(token)
"""
_headers = {
'Content-Type': 'application/json',
'User-Agent': 'MyClient/1.0.0',
}
headersv2 = {**_headers, **Travis_APIv2}
token_id = None
try:
if not GitHub_token:
print(green("I need to generate a temporary token with GitHub to authenticate with Travis. You may get a warning email from GitHub about this."))
print(green("It will be deleted immediately. If you still see it after this at https://github.com/settings/tokens after please delete it manually."))
# /auth/github doesn't seem to exist in the Travis API v3.
tok_dict = generate_GitHub_token(scopes=["read:org", "user:email", "repo"],
note="temporary token for doctr to auth against travis (delete me)",
**login_kwargs)
GitHub_token = tok_dict['token']
token_id = tok_dict['id']
data = {'github_token': GitHub_token}
res = requests.post('https://api.travis-ci.com/auth/github', data=json.dumps(data), headers=headersv2)
return res.json()['access_token']
finally:
if token_id:
delete_GitHub_token(token_id, **login_kwargs) | [
"def",
"get_travis_token",
"(",
"*",
",",
"GitHub_token",
"=",
"None",
",",
"*",
"*",
"login_kwargs",
")",
":",
"_headers",
"=",
"{",
"'Content-Type'",
":",
"'application/json'",
",",
"'User-Agent'",
":",
"'MyClient/1.0.0'",
",",
"}",
"headersv2",
"=",
"{",
"*",
"*",
"_headers",
",",
"*",
"*",
"Travis_APIv2",
"}",
"token_id",
"=",
"None",
"try",
":",
"if",
"not",
"GitHub_token",
":",
"print",
"(",
"green",
"(",
"\"I need to generate a temporary token with GitHub to authenticate with Travis. You may get a warning email from GitHub about this.\"",
")",
")",
"print",
"(",
"green",
"(",
"\"It will be deleted immediately. If you still see it after this at https://github.com/settings/tokens after please delete it manually.\"",
")",
")",
"# /auth/github doesn't seem to exist in the Travis API v3.",
"tok_dict",
"=",
"generate_GitHub_token",
"(",
"scopes",
"=",
"[",
"\"read:org\"",
",",
"\"user:email\"",
",",
"\"repo\"",
"]",
",",
"note",
"=",
"\"temporary token for doctr to auth against travis (delete me)\"",
",",
"*",
"*",
"login_kwargs",
")",
"GitHub_token",
"=",
"tok_dict",
"[",
"'token'",
"]",
"token_id",
"=",
"tok_dict",
"[",
"'id'",
"]",
"data",
"=",
"{",
"'github_token'",
":",
"GitHub_token",
"}",
"res",
"=",
"requests",
".",
"post",
"(",
"'https://api.travis-ci.com/auth/github'",
",",
"data",
"=",
"json",
".",
"dumps",
"(",
"data",
")",
",",
"headers",
"=",
"headersv2",
")",
"return",
"res",
".",
"json",
"(",
")",
"[",
"'access_token'",
"]",
"finally",
":",
"if",
"token_id",
":",
"delete_GitHub_token",
"(",
"token_id",
",",
"*",
"*",
"login_kwargs",
")"
] | Generate a temporary token for authenticating with Travis
The GitHub token can be passed in to the ``GitHub_token`` keyword
argument. If no token is passed in, a GitHub token is generated
temporarily, and then immediately deleted.
This is needed to activate a private repo
Returns the secret token. It should be added to the headers like
headers['Authorization'] = "token {}".format(token) | [
"Generate",
"a",
"temporary",
"token",
"for",
"authenticating",
"with",
"Travis"
] | 0f19ff78c8239efcc98d417f36b0a31d9be01ba5 | https://github.com/drdoctr/doctr/blob/0f19ff78c8239efcc98d417f36b0a31d9be01ba5/doctr/local.py#L227-L264 | train |
drdoctr/doctr | doctr/local.py | generate_GitHub_token | def generate_GitHub_token(*, note="Doctr token for pushing to gh-pages from Travis", scopes=None, **login_kwargs):
"""
Generate a GitHub token for pushing from Travis
The scope requested is public_repo.
If no password or OTP are provided, they will be requested from the
command line.
The token created here can be revoked at
https://github.com/settings/tokens.
"""
if scopes is None:
scopes = ['public_repo']
AUTH_URL = "https://api.github.com/authorizations"
data = {
"scopes": scopes,
"note": note,
"note_url": "https://github.com/drdoctr/doctr",
"fingerprint": str(uuid.uuid4()),
}
return GitHub_post(data, AUTH_URL, **login_kwargs) | python | def generate_GitHub_token(*, note="Doctr token for pushing to gh-pages from Travis", scopes=None, **login_kwargs):
"""
Generate a GitHub token for pushing from Travis
The scope requested is public_repo.
If no password or OTP are provided, they will be requested from the
command line.
The token created here can be revoked at
https://github.com/settings/tokens.
"""
if scopes is None:
scopes = ['public_repo']
AUTH_URL = "https://api.github.com/authorizations"
data = {
"scopes": scopes,
"note": note,
"note_url": "https://github.com/drdoctr/doctr",
"fingerprint": str(uuid.uuid4()),
}
return GitHub_post(data, AUTH_URL, **login_kwargs) | [
"def",
"generate_GitHub_token",
"(",
"*",
",",
"note",
"=",
"\"Doctr token for pushing to gh-pages from Travis\"",
",",
"scopes",
"=",
"None",
",",
"*",
"*",
"login_kwargs",
")",
":",
"if",
"scopes",
"is",
"None",
":",
"scopes",
"=",
"[",
"'public_repo'",
"]",
"AUTH_URL",
"=",
"\"https://api.github.com/authorizations\"",
"data",
"=",
"{",
"\"scopes\"",
":",
"scopes",
",",
"\"note\"",
":",
"note",
",",
"\"note_url\"",
":",
"\"https://github.com/drdoctr/doctr\"",
",",
"\"fingerprint\"",
":",
"str",
"(",
"uuid",
".",
"uuid4",
"(",
")",
")",
",",
"}",
"return",
"GitHub_post",
"(",
"data",
",",
"AUTH_URL",
",",
"*",
"*",
"login_kwargs",
")"
] | Generate a GitHub token for pushing from Travis
The scope requested is public_repo.
If no password or OTP are provided, they will be requested from the
command line.
The token created here can be revoked at
https://github.com/settings/tokens. | [
"Generate",
"a",
"GitHub",
"token",
"for",
"pushing",
"from",
"Travis"
] | 0f19ff78c8239efcc98d417f36b0a31d9be01ba5 | https://github.com/drdoctr/doctr/blob/0f19ff78c8239efcc98d417f36b0a31d9be01ba5/doctr/local.py#L267-L288 | train |
drdoctr/doctr | doctr/local.py | delete_GitHub_token | def delete_GitHub_token(token_id, *, auth, headers):
"""Delete a temporary GitHub token"""
r = requests.delete('https://api.github.com/authorizations/{id}'.format(id=token_id), auth=auth, headers=headers)
GitHub_raise_for_status(r) | python | def delete_GitHub_token(token_id, *, auth, headers):
"""Delete a temporary GitHub token"""
r = requests.delete('https://api.github.com/authorizations/{id}'.format(id=token_id), auth=auth, headers=headers)
GitHub_raise_for_status(r) | [
"def",
"delete_GitHub_token",
"(",
"token_id",
",",
"*",
",",
"auth",
",",
"headers",
")",
":",
"r",
"=",
"requests",
".",
"delete",
"(",
"'https://api.github.com/authorizations/{id}'",
".",
"format",
"(",
"id",
"=",
"token_id",
")",
",",
"auth",
"=",
"auth",
",",
"headers",
"=",
"headers",
")",
"GitHub_raise_for_status",
"(",
"r",
")"
] | Delete a temporary GitHub token | [
"Delete",
"a",
"temporary",
"GitHub",
"token"
] | 0f19ff78c8239efcc98d417f36b0a31d9be01ba5 | https://github.com/drdoctr/doctr/blob/0f19ff78c8239efcc98d417f36b0a31d9be01ba5/doctr/local.py#L291-L294 | train |
drdoctr/doctr | doctr/local.py | upload_GitHub_deploy_key | def upload_GitHub_deploy_key(deploy_repo, ssh_key, *, read_only=False,
title="Doctr deploy key for pushing to gh-pages from Travis", **login_kwargs):
"""
Uploads a GitHub deploy key to ``deploy_repo``.
If ``read_only=True``, the deploy_key will not be able to write to the
repo.
"""
DEPLOY_KEY_URL = "https://api.github.com/repos/{deploy_repo}/keys".format(deploy_repo=deploy_repo)
data = {
"title": title,
"key": ssh_key,
"read_only": read_only,
}
return GitHub_post(data, DEPLOY_KEY_URL, **login_kwargs) | python | def upload_GitHub_deploy_key(deploy_repo, ssh_key, *, read_only=False,
title="Doctr deploy key for pushing to gh-pages from Travis", **login_kwargs):
"""
Uploads a GitHub deploy key to ``deploy_repo``.
If ``read_only=True``, the deploy_key will not be able to write to the
repo.
"""
DEPLOY_KEY_URL = "https://api.github.com/repos/{deploy_repo}/keys".format(deploy_repo=deploy_repo)
data = {
"title": title,
"key": ssh_key,
"read_only": read_only,
}
return GitHub_post(data, DEPLOY_KEY_URL, **login_kwargs) | [
"def",
"upload_GitHub_deploy_key",
"(",
"deploy_repo",
",",
"ssh_key",
",",
"*",
",",
"read_only",
"=",
"False",
",",
"title",
"=",
"\"Doctr deploy key for pushing to gh-pages from Travis\"",
",",
"*",
"*",
"login_kwargs",
")",
":",
"DEPLOY_KEY_URL",
"=",
"\"https://api.github.com/repos/{deploy_repo}/keys\"",
".",
"format",
"(",
"deploy_repo",
"=",
"deploy_repo",
")",
"data",
"=",
"{",
"\"title\"",
":",
"title",
",",
"\"key\"",
":",
"ssh_key",
",",
"\"read_only\"",
":",
"read_only",
",",
"}",
"return",
"GitHub_post",
"(",
"data",
",",
"DEPLOY_KEY_URL",
",",
"*",
"*",
"login_kwargs",
")"
] | Uploads a GitHub deploy key to ``deploy_repo``.
If ``read_only=True``, the deploy_key will not be able to write to the
repo. | [
"Uploads",
"a",
"GitHub",
"deploy",
"key",
"to",
"deploy_repo",
"."
] | 0f19ff78c8239efcc98d417f36b0a31d9be01ba5 | https://github.com/drdoctr/doctr/blob/0f19ff78c8239efcc98d417f36b0a31d9be01ba5/doctr/local.py#L297-L312 | train |
drdoctr/doctr | doctr/local.py | generate_ssh_key | def generate_ssh_key():
"""
Generates an SSH deploy public and private key.
Returns (private key, public key), a tuple of byte strings.
"""
key = rsa.generate_private_key(
backend=default_backend(),
public_exponent=65537,
key_size=4096
)
private_key = key.private_bytes(
serialization.Encoding.PEM,
serialization.PrivateFormat.PKCS8,
serialization.NoEncryption())
public_key = key.public_key().public_bytes(
serialization.Encoding.OpenSSH,
serialization.PublicFormat.OpenSSH
)
return private_key, public_key | python | def generate_ssh_key():
"""
Generates an SSH deploy public and private key.
Returns (private key, public key), a tuple of byte strings.
"""
key = rsa.generate_private_key(
backend=default_backend(),
public_exponent=65537,
key_size=4096
)
private_key = key.private_bytes(
serialization.Encoding.PEM,
serialization.PrivateFormat.PKCS8,
serialization.NoEncryption())
public_key = key.public_key().public_bytes(
serialization.Encoding.OpenSSH,
serialization.PublicFormat.OpenSSH
)
return private_key, public_key | [
"def",
"generate_ssh_key",
"(",
")",
":",
"key",
"=",
"rsa",
".",
"generate_private_key",
"(",
"backend",
"=",
"default_backend",
"(",
")",
",",
"public_exponent",
"=",
"65537",
",",
"key_size",
"=",
"4096",
")",
"private_key",
"=",
"key",
".",
"private_bytes",
"(",
"serialization",
".",
"Encoding",
".",
"PEM",
",",
"serialization",
".",
"PrivateFormat",
".",
"PKCS8",
",",
"serialization",
".",
"NoEncryption",
"(",
")",
")",
"public_key",
"=",
"key",
".",
"public_key",
"(",
")",
".",
"public_bytes",
"(",
"serialization",
".",
"Encoding",
".",
"OpenSSH",
",",
"serialization",
".",
"PublicFormat",
".",
"OpenSSH",
")",
"return",
"private_key",
",",
"public_key"
] | Generates an SSH deploy public and private key.
Returns (private key, public key), a tuple of byte strings. | [
"Generates",
"an",
"SSH",
"deploy",
"public",
"and",
"private",
"key",
"."
] | 0f19ff78c8239efcc98d417f36b0a31d9be01ba5 | https://github.com/drdoctr/doctr/blob/0f19ff78c8239efcc98d417f36b0a31d9be01ba5/doctr/local.py#L314-L335 | train |
drdoctr/doctr | doctr/local.py | guess_github_repo | def guess_github_repo():
"""
Guesses the github repo for the current directory
Returns False if no guess can be made.
"""
p = subprocess.run(['git', 'ls-remote', '--get-url', 'origin'],
stdout=subprocess.PIPE, stderr=subprocess.PIPE, check=False)
if p.stderr or p.returncode:
return False
url = p.stdout.decode('utf-8').strip()
m = GIT_URL.fullmatch(url)
if not m:
return False
return m.group(1) | python | def guess_github_repo():
"""
Guesses the github repo for the current directory
Returns False if no guess can be made.
"""
p = subprocess.run(['git', 'ls-remote', '--get-url', 'origin'],
stdout=subprocess.PIPE, stderr=subprocess.PIPE, check=False)
if p.stderr or p.returncode:
return False
url = p.stdout.decode('utf-8').strip()
m = GIT_URL.fullmatch(url)
if not m:
return False
return m.group(1) | [
"def",
"guess_github_repo",
"(",
")",
":",
"p",
"=",
"subprocess",
".",
"run",
"(",
"[",
"'git'",
",",
"'ls-remote'",
",",
"'--get-url'",
",",
"'origin'",
"]",
",",
"stdout",
"=",
"subprocess",
".",
"PIPE",
",",
"stderr",
"=",
"subprocess",
".",
"PIPE",
",",
"check",
"=",
"False",
")",
"if",
"p",
".",
"stderr",
"or",
"p",
".",
"returncode",
":",
"return",
"False",
"url",
"=",
"p",
".",
"stdout",
".",
"decode",
"(",
"'utf-8'",
")",
".",
"strip",
"(",
")",
"m",
"=",
"GIT_URL",
".",
"fullmatch",
"(",
"url",
")",
"if",
"not",
"m",
":",
"return",
"False",
"return",
"m",
".",
"group",
"(",
"1",
")"
] | Guesses the github repo for the current directory
Returns False if no guess can be made. | [
"Guesses",
"the",
"github",
"repo",
"for",
"the",
"current",
"directory"
] | 0f19ff78c8239efcc98d417f36b0a31d9be01ba5 | https://github.com/drdoctr/doctr/blob/0f19ff78c8239efcc98d417f36b0a31d9be01ba5/doctr/local.py#L453-L468 | train |
drdoctr/doctr | doctr/__main__.py | get_config | def get_config():
"""
This load some configuration from the ``.travis.yml``, if file is present,
``doctr`` key if present.
"""
p = Path('.travis.yml')
if not p.exists():
return {}
with p.open() as f:
travis_config = yaml.safe_load(f.read())
config = travis_config.get('doctr', {})
if not isinstance(config, dict):
raise ValueError('config is not a dict: {}'.format(config))
return config | python | def get_config():
"""
This load some configuration from the ``.travis.yml``, if file is present,
``doctr`` key if present.
"""
p = Path('.travis.yml')
if not p.exists():
return {}
with p.open() as f:
travis_config = yaml.safe_load(f.read())
config = travis_config.get('doctr', {})
if not isinstance(config, dict):
raise ValueError('config is not a dict: {}'.format(config))
return config | [
"def",
"get_config",
"(",
")",
":",
"p",
"=",
"Path",
"(",
"'.travis.yml'",
")",
"if",
"not",
"p",
".",
"exists",
"(",
")",
":",
"return",
"{",
"}",
"with",
"p",
".",
"open",
"(",
")",
"as",
"f",
":",
"travis_config",
"=",
"yaml",
".",
"safe_load",
"(",
"f",
".",
"read",
"(",
")",
")",
"config",
"=",
"travis_config",
".",
"get",
"(",
"'doctr'",
",",
"{",
"}",
")",
"if",
"not",
"isinstance",
"(",
"config",
",",
"dict",
")",
":",
"raise",
"ValueError",
"(",
"'config is not a dict: {}'",
".",
"format",
"(",
"config",
")",
")",
"return",
"config"
] | This load some configuration from the ``.travis.yml``, if file is present,
``doctr`` key if present. | [
"This",
"load",
"some",
"configuration",
"from",
"the",
".",
"travis",
".",
"yml",
"if",
"file",
"is",
"present",
"doctr",
"key",
"if",
"present",
"."
] | 0f19ff78c8239efcc98d417f36b0a31d9be01ba5 | https://github.com/drdoctr/doctr/blob/0f19ff78c8239efcc98d417f36b0a31d9be01ba5/doctr/__main__.py#L219-L234 | train |
drdoctr/doctr | doctr/travis.py | decrypt_file | def decrypt_file(file, key):
"""
Decrypts the file ``file``.
The encrypted file is assumed to end with the ``.enc`` extension. The
decrypted file is saved to the same location without the ``.enc``
extension.
The permissions on the decrypted file are automatically set to 0o600.
See also :func:`doctr.local.encrypt_file`.
"""
if not file.endswith('.enc'):
raise ValueError("%s does not end with .enc" % file)
fer = Fernet(key)
with open(file, 'rb') as f:
decrypted_file = fer.decrypt(f.read())
with open(file[:-4], 'wb') as f:
f.write(decrypted_file)
os.chmod(file[:-4], 0o600) | python | def decrypt_file(file, key):
"""
Decrypts the file ``file``.
The encrypted file is assumed to end with the ``.enc`` extension. The
decrypted file is saved to the same location without the ``.enc``
extension.
The permissions on the decrypted file are automatically set to 0o600.
See also :func:`doctr.local.encrypt_file`.
"""
if not file.endswith('.enc'):
raise ValueError("%s does not end with .enc" % file)
fer = Fernet(key)
with open(file, 'rb') as f:
decrypted_file = fer.decrypt(f.read())
with open(file[:-4], 'wb') as f:
f.write(decrypted_file)
os.chmod(file[:-4], 0o600) | [
"def",
"decrypt_file",
"(",
"file",
",",
"key",
")",
":",
"if",
"not",
"file",
".",
"endswith",
"(",
"'.enc'",
")",
":",
"raise",
"ValueError",
"(",
"\"%s does not end with .enc\"",
"%",
"file",
")",
"fer",
"=",
"Fernet",
"(",
"key",
")",
"with",
"open",
"(",
"file",
",",
"'rb'",
")",
"as",
"f",
":",
"decrypted_file",
"=",
"fer",
".",
"decrypt",
"(",
"f",
".",
"read",
"(",
")",
")",
"with",
"open",
"(",
"file",
"[",
":",
"-",
"4",
"]",
",",
"'wb'",
")",
"as",
"f",
":",
"f",
".",
"write",
"(",
"decrypted_file",
")",
"os",
".",
"chmod",
"(",
"file",
"[",
":",
"-",
"4",
"]",
",",
"0o600",
")"
] | Decrypts the file ``file``.
The encrypted file is assumed to end with the ``.enc`` extension. The
decrypted file is saved to the same location without the ``.enc``
extension.
The permissions on the decrypted file are automatically set to 0o600.
See also :func:`doctr.local.encrypt_file`. | [
"Decrypts",
"the",
"file",
"file",
"."
] | 0f19ff78c8239efcc98d417f36b0a31d9be01ba5 | https://github.com/drdoctr/doctr/blob/0f19ff78c8239efcc98d417f36b0a31d9be01ba5/doctr/travis.py#L23-L47 | train |
drdoctr/doctr | doctr/travis.py | setup_deploy_key | def setup_deploy_key(keypath='github_deploy_key', key_ext='.enc', env_name='DOCTR_DEPLOY_ENCRYPTION_KEY'):
"""
Decrypts the deploy key and configures it with ssh
The key is assumed to be encrypted as keypath + key_ext, and the
encryption key is assumed to be set in the environment variable
``env_name``. If ``env_name`` is not set, it falls back to
``DOCTR_DEPLOY_ENCRYPTION_KEY`` for backwards compatibility.
If keypath + key_ext does not exist, it falls back to
``github_deploy_key.enc`` for backwards compatibility.
"""
key = os.environ.get(env_name, os.environ.get("DOCTR_DEPLOY_ENCRYPTION_KEY", None))
if not key:
raise RuntimeError("{env_name} or DOCTR_DEPLOY_ENCRYPTION_KEY environment variable is not set. Make sure you followed the instructions from 'doctr configure' properly. You may need to re-run 'doctr configure' to fix this error."
.format(env_name=env_name))
# Legacy keyfile name
if (not os.path.isfile(keypath + key_ext) and
os.path.isfile('github_deploy_key' + key_ext)):
keypath = 'github_deploy_key'
key_filename = os.path.basename(keypath)
key = key.encode('utf-8')
decrypt_file(keypath + key_ext, key)
key_path = os.path.expanduser("~/.ssh/" + key_filename)
os.makedirs(os.path.expanduser("~/.ssh"), exist_ok=True)
os.rename(keypath, key_path)
with open(os.path.expanduser("~/.ssh/config"), 'a') as f:
f.write("Host github.com"
' IdentityFile "%s"'
" LogLevel ERROR\n" % key_path)
# start ssh-agent and add key to it
# info from SSH agent has to be put into the environment
agent_info = subprocess.check_output(['ssh-agent', '-s'])
agent_info = agent_info.decode('utf-8')
agent_info = agent_info.split()
AUTH_SOCK = agent_info[0].split('=')[1][:-1]
AGENT_PID = agent_info[3].split('=')[1][:-1]
os.putenv('SSH_AUTH_SOCK', AUTH_SOCK)
os.putenv('SSH_AGENT_PID', AGENT_PID)
run(['ssh-add', os.path.expanduser('~/.ssh/' + key_filename)]) | python | def setup_deploy_key(keypath='github_deploy_key', key_ext='.enc', env_name='DOCTR_DEPLOY_ENCRYPTION_KEY'):
"""
Decrypts the deploy key and configures it with ssh
The key is assumed to be encrypted as keypath + key_ext, and the
encryption key is assumed to be set in the environment variable
``env_name``. If ``env_name`` is not set, it falls back to
``DOCTR_DEPLOY_ENCRYPTION_KEY`` for backwards compatibility.
If keypath + key_ext does not exist, it falls back to
``github_deploy_key.enc`` for backwards compatibility.
"""
key = os.environ.get(env_name, os.environ.get("DOCTR_DEPLOY_ENCRYPTION_KEY", None))
if not key:
raise RuntimeError("{env_name} or DOCTR_DEPLOY_ENCRYPTION_KEY environment variable is not set. Make sure you followed the instructions from 'doctr configure' properly. You may need to re-run 'doctr configure' to fix this error."
.format(env_name=env_name))
# Legacy keyfile name
if (not os.path.isfile(keypath + key_ext) and
os.path.isfile('github_deploy_key' + key_ext)):
keypath = 'github_deploy_key'
key_filename = os.path.basename(keypath)
key = key.encode('utf-8')
decrypt_file(keypath + key_ext, key)
key_path = os.path.expanduser("~/.ssh/" + key_filename)
os.makedirs(os.path.expanduser("~/.ssh"), exist_ok=True)
os.rename(keypath, key_path)
with open(os.path.expanduser("~/.ssh/config"), 'a') as f:
f.write("Host github.com"
' IdentityFile "%s"'
" LogLevel ERROR\n" % key_path)
# start ssh-agent and add key to it
# info from SSH agent has to be put into the environment
agent_info = subprocess.check_output(['ssh-agent', '-s'])
agent_info = agent_info.decode('utf-8')
agent_info = agent_info.split()
AUTH_SOCK = agent_info[0].split('=')[1][:-1]
AGENT_PID = agent_info[3].split('=')[1][:-1]
os.putenv('SSH_AUTH_SOCK', AUTH_SOCK)
os.putenv('SSH_AGENT_PID', AGENT_PID)
run(['ssh-add', os.path.expanduser('~/.ssh/' + key_filename)]) | [
"def",
"setup_deploy_key",
"(",
"keypath",
"=",
"'github_deploy_key'",
",",
"key_ext",
"=",
"'.enc'",
",",
"env_name",
"=",
"'DOCTR_DEPLOY_ENCRYPTION_KEY'",
")",
":",
"key",
"=",
"os",
".",
"environ",
".",
"get",
"(",
"env_name",
",",
"os",
".",
"environ",
".",
"get",
"(",
"\"DOCTR_DEPLOY_ENCRYPTION_KEY\"",
",",
"None",
")",
")",
"if",
"not",
"key",
":",
"raise",
"RuntimeError",
"(",
"\"{env_name} or DOCTR_DEPLOY_ENCRYPTION_KEY environment variable is not set. Make sure you followed the instructions from 'doctr configure' properly. You may need to re-run 'doctr configure' to fix this error.\"",
".",
"format",
"(",
"env_name",
"=",
"env_name",
")",
")",
"# Legacy keyfile name",
"if",
"(",
"not",
"os",
".",
"path",
".",
"isfile",
"(",
"keypath",
"+",
"key_ext",
")",
"and",
"os",
".",
"path",
".",
"isfile",
"(",
"'github_deploy_key'",
"+",
"key_ext",
")",
")",
":",
"keypath",
"=",
"'github_deploy_key'",
"key_filename",
"=",
"os",
".",
"path",
".",
"basename",
"(",
"keypath",
")",
"key",
"=",
"key",
".",
"encode",
"(",
"'utf-8'",
")",
"decrypt_file",
"(",
"keypath",
"+",
"key_ext",
",",
"key",
")",
"key_path",
"=",
"os",
".",
"path",
".",
"expanduser",
"(",
"\"~/.ssh/\"",
"+",
"key_filename",
")",
"os",
".",
"makedirs",
"(",
"os",
".",
"path",
".",
"expanduser",
"(",
"\"~/.ssh\"",
")",
",",
"exist_ok",
"=",
"True",
")",
"os",
".",
"rename",
"(",
"keypath",
",",
"key_path",
")",
"with",
"open",
"(",
"os",
".",
"path",
".",
"expanduser",
"(",
"\"~/.ssh/config\"",
")",
",",
"'a'",
")",
"as",
"f",
":",
"f",
".",
"write",
"(",
"\"Host github.com\"",
"' IdentityFile \"%s\"'",
"\" LogLevel ERROR\\n\"",
"%",
"key_path",
")",
"# start ssh-agent and add key to it",
"# info from SSH agent has to be put into the environment",
"agent_info",
"=",
"subprocess",
".",
"check_output",
"(",
"[",
"'ssh-agent'",
",",
"'-s'",
"]",
")",
"agent_info",
"=",
"agent_info",
".",
"decode",
"(",
"'utf-8'",
")",
"agent_info",
"=",
"agent_info",
".",
"split",
"(",
")",
"AUTH_SOCK",
"=",
"agent_info",
"[",
"0",
"]",
".",
"split",
"(",
"'='",
")",
"[",
"1",
"]",
"[",
":",
"-",
"1",
"]",
"AGENT_PID",
"=",
"agent_info",
"[",
"3",
"]",
".",
"split",
"(",
"'='",
")",
"[",
"1",
"]",
"[",
":",
"-",
"1",
"]",
"os",
".",
"putenv",
"(",
"'SSH_AUTH_SOCK'",
",",
"AUTH_SOCK",
")",
"os",
".",
"putenv",
"(",
"'SSH_AGENT_PID'",
",",
"AGENT_PID",
")",
"run",
"(",
"[",
"'ssh-add'",
",",
"os",
".",
"path",
".",
"expanduser",
"(",
"'~/.ssh/'",
"+",
"key_filename",
")",
"]",
")"
] | Decrypts the deploy key and configures it with ssh
The key is assumed to be encrypted as keypath + key_ext, and the
encryption key is assumed to be set in the environment variable
``env_name``. If ``env_name`` is not set, it falls back to
``DOCTR_DEPLOY_ENCRYPTION_KEY`` for backwards compatibility.
If keypath + key_ext does not exist, it falls back to
``github_deploy_key.enc`` for backwards compatibility. | [
"Decrypts",
"the",
"deploy",
"key",
"and",
"configures",
"it",
"with",
"ssh"
] | 0f19ff78c8239efcc98d417f36b0a31d9be01ba5 | https://github.com/drdoctr/doctr/blob/0f19ff78c8239efcc98d417f36b0a31d9be01ba5/doctr/travis.py#L49-L95 | train |
drdoctr/doctr | doctr/travis.py | get_token | def get_token():
"""
Get the encrypted GitHub token in Travis.
Make sure the contents this variable do not leak. The ``run()`` function
will remove this from the output, so always use it.
"""
token = os.environ.get("GH_TOKEN", None)
if not token:
token = "GH_TOKEN environment variable not set"
token = token.encode('utf-8')
return token | python | def get_token():
"""
Get the encrypted GitHub token in Travis.
Make sure the contents this variable do not leak. The ``run()`` function
will remove this from the output, so always use it.
"""
token = os.environ.get("GH_TOKEN", None)
if not token:
token = "GH_TOKEN environment variable not set"
token = token.encode('utf-8')
return token | [
"def",
"get_token",
"(",
")",
":",
"token",
"=",
"os",
".",
"environ",
".",
"get",
"(",
"\"GH_TOKEN\"",
",",
"None",
")",
"if",
"not",
"token",
":",
"token",
"=",
"\"GH_TOKEN environment variable not set\"",
"token",
"=",
"token",
".",
"encode",
"(",
"'utf-8'",
")",
"return",
"token"
] | Get the encrypted GitHub token in Travis.
Make sure the contents this variable do not leak. The ``run()`` function
will remove this from the output, so always use it. | [
"Get",
"the",
"encrypted",
"GitHub",
"token",
"in",
"Travis",
"."
] | 0f19ff78c8239efcc98d417f36b0a31d9be01ba5 | https://github.com/drdoctr/doctr/blob/0f19ff78c8239efcc98d417f36b0a31d9be01ba5/doctr/travis.py#L114-L125 | train |
drdoctr/doctr | doctr/travis.py | run | def run(args, shell=False, exit=True):
"""
Run the command ``args``.
Automatically hides the secret GitHub token from the output.
If shell=False (recommended for most commands), args should be a list of
strings. If shell=True, args should be a string of the command to run.
If exit=True, it exits on nonzero returncode. Otherwise it returns the
returncode.
"""
if "GH_TOKEN" in os.environ:
token = get_token()
else:
token = b''
if not shell:
command = ' '.join(map(shlex.quote, args))
else:
command = args
command = command.replace(token.decode('utf-8'), '~'*len(token))
print(blue(command))
sys.stdout.flush()
returncode = run_command_hiding_token(args, token, shell=shell)
if exit and returncode != 0:
sys.exit(red("%s failed: %s" % (command, returncode)))
return returncode | python | def run(args, shell=False, exit=True):
"""
Run the command ``args``.
Automatically hides the secret GitHub token from the output.
If shell=False (recommended for most commands), args should be a list of
strings. If shell=True, args should be a string of the command to run.
If exit=True, it exits on nonzero returncode. Otherwise it returns the
returncode.
"""
if "GH_TOKEN" in os.environ:
token = get_token()
else:
token = b''
if not shell:
command = ' '.join(map(shlex.quote, args))
else:
command = args
command = command.replace(token.decode('utf-8'), '~'*len(token))
print(blue(command))
sys.stdout.flush()
returncode = run_command_hiding_token(args, token, shell=shell)
if exit and returncode != 0:
sys.exit(red("%s failed: %s" % (command, returncode)))
return returncode | [
"def",
"run",
"(",
"args",
",",
"shell",
"=",
"False",
",",
"exit",
"=",
"True",
")",
":",
"if",
"\"GH_TOKEN\"",
"in",
"os",
".",
"environ",
":",
"token",
"=",
"get_token",
"(",
")",
"else",
":",
"token",
"=",
"b''",
"if",
"not",
"shell",
":",
"command",
"=",
"' '",
".",
"join",
"(",
"map",
"(",
"shlex",
".",
"quote",
",",
"args",
")",
")",
"else",
":",
"command",
"=",
"args",
"command",
"=",
"command",
".",
"replace",
"(",
"token",
".",
"decode",
"(",
"'utf-8'",
")",
",",
"'~'",
"*",
"len",
"(",
"token",
")",
")",
"print",
"(",
"blue",
"(",
"command",
")",
")",
"sys",
".",
"stdout",
".",
"flush",
"(",
")",
"returncode",
"=",
"run_command_hiding_token",
"(",
"args",
",",
"token",
",",
"shell",
"=",
"shell",
")",
"if",
"exit",
"and",
"returncode",
"!=",
"0",
":",
"sys",
".",
"exit",
"(",
"red",
"(",
"\"%s failed: %s\"",
"%",
"(",
"command",
",",
"returncode",
")",
")",
")",
"return",
"returncode"
] | Run the command ``args``.
Automatically hides the secret GitHub token from the output.
If shell=False (recommended for most commands), args should be a list of
strings. If shell=True, args should be a string of the command to run.
If exit=True, it exits on nonzero returncode. Otherwise it returns the
returncode. | [
"Run",
"the",
"command",
"args",
"."
] | 0f19ff78c8239efcc98d417f36b0a31d9be01ba5 | https://github.com/drdoctr/doctr/blob/0f19ff78c8239efcc98d417f36b0a31d9be01ba5/doctr/travis.py#L127-L156 | train |
drdoctr/doctr | doctr/travis.py | get_current_repo | def get_current_repo():
"""
Get the GitHub repo name for the current directory.
Assumes that the repo is in the ``origin`` remote.
"""
remote_url = subprocess.check_output(['git', 'config', '--get',
'remote.origin.url']).decode('utf-8')
# Travis uses the https clone url
_, org, git_repo = remote_url.rsplit('.git', 1)[0].rsplit('/', 2)
return (org + '/' + git_repo) | python | def get_current_repo():
"""
Get the GitHub repo name for the current directory.
Assumes that the repo is in the ``origin`` remote.
"""
remote_url = subprocess.check_output(['git', 'config', '--get',
'remote.origin.url']).decode('utf-8')
# Travis uses the https clone url
_, org, git_repo = remote_url.rsplit('.git', 1)[0].rsplit('/', 2)
return (org + '/' + git_repo) | [
"def",
"get_current_repo",
"(",
")",
":",
"remote_url",
"=",
"subprocess",
".",
"check_output",
"(",
"[",
"'git'",
",",
"'config'",
",",
"'--get'",
",",
"'remote.origin.url'",
"]",
")",
".",
"decode",
"(",
"'utf-8'",
")",
"# Travis uses the https clone url",
"_",
",",
"org",
",",
"git_repo",
"=",
"remote_url",
".",
"rsplit",
"(",
"'.git'",
",",
"1",
")",
"[",
"0",
"]",
".",
"rsplit",
"(",
"'/'",
",",
"2",
")",
"return",
"(",
"org",
"+",
"'/'",
"+",
"git_repo",
")"
] | Get the GitHub repo name for the current directory.
Assumes that the repo is in the ``origin`` remote. | [
"Get",
"the",
"GitHub",
"repo",
"name",
"for",
"the",
"current",
"directory",
"."
] | 0f19ff78c8239efcc98d417f36b0a31d9be01ba5 | https://github.com/drdoctr/doctr/blob/0f19ff78c8239efcc98d417f36b0a31d9be01ba5/doctr/travis.py#L158-L169 | train |
drdoctr/doctr | doctr/travis.py | get_travis_branch | def get_travis_branch():
"""Get the name of the branch that the PR is from.
Note that this is not simply ``$TRAVIS_BRANCH``. the ``push`` build will
use the correct branch (the branch that the PR is from) but the ``pr``
build will use the _target_ of the PR (usually master). So instead, we ask
for ``$TRAVIS_PULL_REQUEST_BRANCH`` if it's a PR build, and
``$TRAVIS_BRANCH`` if it's a push build.
"""
if os.environ.get("TRAVIS_PULL_REQUEST", "") == "true":
return os.environ.get("TRAVIS_PULL_REQUEST_BRANCH", "")
else:
return os.environ.get("TRAVIS_BRANCH", "") | python | def get_travis_branch():
"""Get the name of the branch that the PR is from.
Note that this is not simply ``$TRAVIS_BRANCH``. the ``push`` build will
use the correct branch (the branch that the PR is from) but the ``pr``
build will use the _target_ of the PR (usually master). So instead, we ask
for ``$TRAVIS_PULL_REQUEST_BRANCH`` if it's a PR build, and
``$TRAVIS_BRANCH`` if it's a push build.
"""
if os.environ.get("TRAVIS_PULL_REQUEST", "") == "true":
return os.environ.get("TRAVIS_PULL_REQUEST_BRANCH", "")
else:
return os.environ.get("TRAVIS_BRANCH", "") | [
"def",
"get_travis_branch",
"(",
")",
":",
"if",
"os",
".",
"environ",
".",
"get",
"(",
"\"TRAVIS_PULL_REQUEST\"",
",",
"\"\"",
")",
"==",
"\"true\"",
":",
"return",
"os",
".",
"environ",
".",
"get",
"(",
"\"TRAVIS_PULL_REQUEST_BRANCH\"",
",",
"\"\"",
")",
"else",
":",
"return",
"os",
".",
"environ",
".",
"get",
"(",
"\"TRAVIS_BRANCH\"",
",",
"\"\"",
")"
] | Get the name of the branch that the PR is from.
Note that this is not simply ``$TRAVIS_BRANCH``. the ``push`` build will
use the correct branch (the branch that the PR is from) but the ``pr``
build will use the _target_ of the PR (usually master). So instead, we ask
for ``$TRAVIS_PULL_REQUEST_BRANCH`` if it's a PR build, and
``$TRAVIS_BRANCH`` if it's a push build. | [
"Get",
"the",
"name",
"of",
"the",
"branch",
"that",
"the",
"PR",
"is",
"from",
"."
] | 0f19ff78c8239efcc98d417f36b0a31d9be01ba5 | https://github.com/drdoctr/doctr/blob/0f19ff78c8239efcc98d417f36b0a31d9be01ba5/doctr/travis.py#L171-L183 | train |
drdoctr/doctr | doctr/travis.py | set_git_user_email | def set_git_user_email():
"""
Set global user and email for git user if not already present on system
"""
username = subprocess.run(shlex.split('git config user.name'), stdout=subprocess.PIPE).stdout.strip().decode('utf-8')
if not username or username == "Travis CI User":
run(['git', 'config', '--global', 'user.name', "Doctr (Travis CI)"])
else:
print("Not setting git user name, as it's already set to %r" % username)
email = subprocess.run(shlex.split('git config user.email'), stdout=subprocess.PIPE).stdout.strip().decode('utf-8')
if not email or email == "travis@example.org":
# We need a dummy email or git will fail. We use this one as per
# https://help.github.com/articles/keeping-your-email-address-private/.
run(['git', 'config', '--global', 'user.email', 'drdoctr@users.noreply.github.com'])
else:
print("Not setting git user email, as it's already set to %r" % email) | python | def set_git_user_email():
"""
Set global user and email for git user if not already present on system
"""
username = subprocess.run(shlex.split('git config user.name'), stdout=subprocess.PIPE).stdout.strip().decode('utf-8')
if not username or username == "Travis CI User":
run(['git', 'config', '--global', 'user.name', "Doctr (Travis CI)"])
else:
print("Not setting git user name, as it's already set to %r" % username)
email = subprocess.run(shlex.split('git config user.email'), stdout=subprocess.PIPE).stdout.strip().decode('utf-8')
if not email or email == "travis@example.org":
# We need a dummy email or git will fail. We use this one as per
# https://help.github.com/articles/keeping-your-email-address-private/.
run(['git', 'config', '--global', 'user.email', 'drdoctr@users.noreply.github.com'])
else:
print("Not setting git user email, as it's already set to %r" % email) | [
"def",
"set_git_user_email",
"(",
")",
":",
"username",
"=",
"subprocess",
".",
"run",
"(",
"shlex",
".",
"split",
"(",
"'git config user.name'",
")",
",",
"stdout",
"=",
"subprocess",
".",
"PIPE",
")",
".",
"stdout",
".",
"strip",
"(",
")",
".",
"decode",
"(",
"'utf-8'",
")",
"if",
"not",
"username",
"or",
"username",
"==",
"\"Travis CI User\"",
":",
"run",
"(",
"[",
"'git'",
",",
"'config'",
",",
"'--global'",
",",
"'user.name'",
",",
"\"Doctr (Travis CI)\"",
"]",
")",
"else",
":",
"print",
"(",
"\"Not setting git user name, as it's already set to %r\"",
"%",
"username",
")",
"email",
"=",
"subprocess",
".",
"run",
"(",
"shlex",
".",
"split",
"(",
"'git config user.email'",
")",
",",
"stdout",
"=",
"subprocess",
".",
"PIPE",
")",
".",
"stdout",
".",
"strip",
"(",
")",
".",
"decode",
"(",
"'utf-8'",
")",
"if",
"not",
"email",
"or",
"email",
"==",
"\"travis@example.org\"",
":",
"# We need a dummy email or git will fail. We use this one as per",
"# https://help.github.com/articles/keeping-your-email-address-private/.",
"run",
"(",
"[",
"'git'",
",",
"'config'",
",",
"'--global'",
",",
"'user.email'",
",",
"'drdoctr@users.noreply.github.com'",
"]",
")",
"else",
":",
"print",
"(",
"\"Not setting git user email, as it's already set to %r\"",
"%",
"email",
")"
] | Set global user and email for git user if not already present on system | [
"Set",
"global",
"user",
"and",
"email",
"for",
"git",
"user",
"if",
"not",
"already",
"present",
"on",
"system"
] | 0f19ff78c8239efcc98d417f36b0a31d9be01ba5 | https://github.com/drdoctr/doctr/blob/0f19ff78c8239efcc98d417f36b0a31d9be01ba5/doctr/travis.py#L272-L288 | train |
drdoctr/doctr | doctr/travis.py | checkout_deploy_branch | def checkout_deploy_branch(deploy_branch, canpush=True):
"""
Checkout the deploy branch, creating it if it doesn't exist.
"""
# Create an empty branch with .nojekyll if it doesn't already exist
create_deploy_branch(deploy_branch, push=canpush)
remote_branch = "doctr_remote/{}".format(deploy_branch)
print("Checking out doctr working branch tracking", remote_branch)
clear_working_branch()
# If gh-pages doesn't exist the above create_deploy_branch() will create
# it we can push, but if we can't, it won't and the --track would fail.
if run(['git', 'rev-parse', '--verify', remote_branch], exit=False) == 0:
extra_args = ['--track', remote_branch]
else:
extra_args = []
run(['git', 'checkout', '-b', DOCTR_WORKING_BRANCH] + extra_args)
print("Done")
return canpush | python | def checkout_deploy_branch(deploy_branch, canpush=True):
"""
Checkout the deploy branch, creating it if it doesn't exist.
"""
# Create an empty branch with .nojekyll if it doesn't already exist
create_deploy_branch(deploy_branch, push=canpush)
remote_branch = "doctr_remote/{}".format(deploy_branch)
print("Checking out doctr working branch tracking", remote_branch)
clear_working_branch()
# If gh-pages doesn't exist the above create_deploy_branch() will create
# it we can push, but if we can't, it won't and the --track would fail.
if run(['git', 'rev-parse', '--verify', remote_branch], exit=False) == 0:
extra_args = ['--track', remote_branch]
else:
extra_args = []
run(['git', 'checkout', '-b', DOCTR_WORKING_BRANCH] + extra_args)
print("Done")
return canpush | [
"def",
"checkout_deploy_branch",
"(",
"deploy_branch",
",",
"canpush",
"=",
"True",
")",
":",
"# Create an empty branch with .nojekyll if it doesn't already exist",
"create_deploy_branch",
"(",
"deploy_branch",
",",
"push",
"=",
"canpush",
")",
"remote_branch",
"=",
"\"doctr_remote/{}\"",
".",
"format",
"(",
"deploy_branch",
")",
"print",
"(",
"\"Checking out doctr working branch tracking\"",
",",
"remote_branch",
")",
"clear_working_branch",
"(",
")",
"# If gh-pages doesn't exist the above create_deploy_branch() will create",
"# it we can push, but if we can't, it won't and the --track would fail.",
"if",
"run",
"(",
"[",
"'git'",
",",
"'rev-parse'",
",",
"'--verify'",
",",
"remote_branch",
"]",
",",
"exit",
"=",
"False",
")",
"==",
"0",
":",
"extra_args",
"=",
"[",
"'--track'",
",",
"remote_branch",
"]",
"else",
":",
"extra_args",
"=",
"[",
"]",
"run",
"(",
"[",
"'git'",
",",
"'checkout'",
",",
"'-b'",
",",
"DOCTR_WORKING_BRANCH",
"]",
"+",
"extra_args",
")",
"print",
"(",
"\"Done\"",
")",
"return",
"canpush"
] | Checkout the deploy branch, creating it if it doesn't exist. | [
"Checkout",
"the",
"deploy",
"branch",
"creating",
"it",
"if",
"it",
"doesn",
"t",
"exist",
"."
] | 0f19ff78c8239efcc98d417f36b0a31d9be01ba5 | https://github.com/drdoctr/doctr/blob/0f19ff78c8239efcc98d417f36b0a31d9be01ba5/doctr/travis.py#L290-L308 | train |
drdoctr/doctr | doctr/travis.py | deploy_branch_exists | def deploy_branch_exists(deploy_branch):
"""
Check if there is a remote branch with name specified in ``deploy_branch``.
Note that default ``deploy_branch`` is ``gh-pages`` for regular repos and
``master`` for ``github.io`` repos.
This isn't completely robust. If there are multiple remotes and you have a
``deploy_branch`` branch on the non-default remote, this won't see it.
"""
remote_name = 'doctr_remote'
branch_names = subprocess.check_output(['git', 'branch', '-r']).decode('utf-8').split()
return '{}/{}'.format(remote_name, deploy_branch) in branch_names | python | def deploy_branch_exists(deploy_branch):
"""
Check if there is a remote branch with name specified in ``deploy_branch``.
Note that default ``deploy_branch`` is ``gh-pages`` for regular repos and
``master`` for ``github.io`` repos.
This isn't completely robust. If there are multiple remotes and you have a
``deploy_branch`` branch on the non-default remote, this won't see it.
"""
remote_name = 'doctr_remote'
branch_names = subprocess.check_output(['git', 'branch', '-r']).decode('utf-8').split()
return '{}/{}'.format(remote_name, deploy_branch) in branch_names | [
"def",
"deploy_branch_exists",
"(",
"deploy_branch",
")",
":",
"remote_name",
"=",
"'doctr_remote'",
"branch_names",
"=",
"subprocess",
".",
"check_output",
"(",
"[",
"'git'",
",",
"'branch'",
",",
"'-r'",
"]",
")",
".",
"decode",
"(",
"'utf-8'",
")",
".",
"split",
"(",
")",
"return",
"'{}/{}'",
".",
"format",
"(",
"remote_name",
",",
"deploy_branch",
")",
"in",
"branch_names"
] | Check if there is a remote branch with name specified in ``deploy_branch``.
Note that default ``deploy_branch`` is ``gh-pages`` for regular repos and
``master`` for ``github.io`` repos.
This isn't completely robust. If there are multiple remotes and you have a
``deploy_branch`` branch on the non-default remote, this won't see it. | [
"Check",
"if",
"there",
"is",
"a",
"remote",
"branch",
"with",
"name",
"specified",
"in",
"deploy_branch",
"."
] | 0f19ff78c8239efcc98d417f36b0a31d9be01ba5 | https://github.com/drdoctr/doctr/blob/0f19ff78c8239efcc98d417f36b0a31d9be01ba5/doctr/travis.py#L315-L328 | train |
drdoctr/doctr | doctr/travis.py | create_deploy_branch | def create_deploy_branch(deploy_branch, push=True):
"""
If there is no remote branch with name specified in ``deploy_branch``,
create one.
Note that default ``deploy_branch`` is ``gh-pages`` for regular
repos and ``master`` for ``github.io`` repos.
Return True if ``deploy_branch`` was created, False if not.
"""
if not deploy_branch_exists(deploy_branch):
print("Creating {} branch on doctr_remote".format(deploy_branch))
clear_working_branch()
run(['git', 'checkout', '--orphan', DOCTR_WORKING_BRANCH])
# delete everything in the new ref. this is non-destructive to existing
# refs/branches, etc...
run(['git', 'rm', '-rf', '.'])
print("Adding .nojekyll file to working branch")
run(['touch', '.nojekyll'])
run(['git', 'add', '.nojekyll'])
run(['git', 'commit', '-m', 'Create new {} branch with .nojekyll'.format(deploy_branch)])
if push:
print("Pushing working branch to remote {} branch".format(deploy_branch))
run(['git', 'push', '-u', 'doctr_remote', '{}:{}'.format(DOCTR_WORKING_BRANCH, deploy_branch)])
# return to master branch and clear the working branch
run(['git', 'checkout', 'master'])
run(['git', 'branch', '-D', DOCTR_WORKING_BRANCH])
# fetch the remote so that doctr_remote/{deploy_branch} is resolved
run(['git', 'fetch', 'doctr_remote'])
return True
return False | python | def create_deploy_branch(deploy_branch, push=True):
"""
If there is no remote branch with name specified in ``deploy_branch``,
create one.
Note that default ``deploy_branch`` is ``gh-pages`` for regular
repos and ``master`` for ``github.io`` repos.
Return True if ``deploy_branch`` was created, False if not.
"""
if not deploy_branch_exists(deploy_branch):
print("Creating {} branch on doctr_remote".format(deploy_branch))
clear_working_branch()
run(['git', 'checkout', '--orphan', DOCTR_WORKING_BRANCH])
# delete everything in the new ref. this is non-destructive to existing
# refs/branches, etc...
run(['git', 'rm', '-rf', '.'])
print("Adding .nojekyll file to working branch")
run(['touch', '.nojekyll'])
run(['git', 'add', '.nojekyll'])
run(['git', 'commit', '-m', 'Create new {} branch with .nojekyll'.format(deploy_branch)])
if push:
print("Pushing working branch to remote {} branch".format(deploy_branch))
run(['git', 'push', '-u', 'doctr_remote', '{}:{}'.format(DOCTR_WORKING_BRANCH, deploy_branch)])
# return to master branch and clear the working branch
run(['git', 'checkout', 'master'])
run(['git', 'branch', '-D', DOCTR_WORKING_BRANCH])
# fetch the remote so that doctr_remote/{deploy_branch} is resolved
run(['git', 'fetch', 'doctr_remote'])
return True
return False | [
"def",
"create_deploy_branch",
"(",
"deploy_branch",
",",
"push",
"=",
"True",
")",
":",
"if",
"not",
"deploy_branch_exists",
"(",
"deploy_branch",
")",
":",
"print",
"(",
"\"Creating {} branch on doctr_remote\"",
".",
"format",
"(",
"deploy_branch",
")",
")",
"clear_working_branch",
"(",
")",
"run",
"(",
"[",
"'git'",
",",
"'checkout'",
",",
"'--orphan'",
",",
"DOCTR_WORKING_BRANCH",
"]",
")",
"# delete everything in the new ref. this is non-destructive to existing",
"# refs/branches, etc...",
"run",
"(",
"[",
"'git'",
",",
"'rm'",
",",
"'-rf'",
",",
"'.'",
"]",
")",
"print",
"(",
"\"Adding .nojekyll file to working branch\"",
")",
"run",
"(",
"[",
"'touch'",
",",
"'.nojekyll'",
"]",
")",
"run",
"(",
"[",
"'git'",
",",
"'add'",
",",
"'.nojekyll'",
"]",
")",
"run",
"(",
"[",
"'git'",
",",
"'commit'",
",",
"'-m'",
",",
"'Create new {} branch with .nojekyll'",
".",
"format",
"(",
"deploy_branch",
")",
"]",
")",
"if",
"push",
":",
"print",
"(",
"\"Pushing working branch to remote {} branch\"",
".",
"format",
"(",
"deploy_branch",
")",
")",
"run",
"(",
"[",
"'git'",
",",
"'push'",
",",
"'-u'",
",",
"'doctr_remote'",
",",
"'{}:{}'",
".",
"format",
"(",
"DOCTR_WORKING_BRANCH",
",",
"deploy_branch",
")",
"]",
")",
"# return to master branch and clear the working branch",
"run",
"(",
"[",
"'git'",
",",
"'checkout'",
",",
"'master'",
"]",
")",
"run",
"(",
"[",
"'git'",
",",
"'branch'",
",",
"'-D'",
",",
"DOCTR_WORKING_BRANCH",
"]",
")",
"# fetch the remote so that doctr_remote/{deploy_branch} is resolved",
"run",
"(",
"[",
"'git'",
",",
"'fetch'",
",",
"'doctr_remote'",
"]",
")",
"return",
"True",
"return",
"False"
] | If there is no remote branch with name specified in ``deploy_branch``,
create one.
Note that default ``deploy_branch`` is ``gh-pages`` for regular
repos and ``master`` for ``github.io`` repos.
Return True if ``deploy_branch`` was created, False if not. | [
"If",
"there",
"is",
"no",
"remote",
"branch",
"with",
"name",
"specified",
"in",
"deploy_branch",
"create",
"one",
"."
] | 0f19ff78c8239efcc98d417f36b0a31d9be01ba5 | https://github.com/drdoctr/doctr/blob/0f19ff78c8239efcc98d417f36b0a31d9be01ba5/doctr/travis.py#L330-L361 | train |
drdoctr/doctr | doctr/travis.py | find_sphinx_build_dir | def find_sphinx_build_dir():
"""
Find build subfolder within sphinx docs directory.
This is called by :func:`commit_docs` if keyword arg ``built_docs`` is not
specified on the command line.
"""
build = glob.glob('**/*build/html', recursive=True)
if not build:
raise RuntimeError("Could not find Sphinx build directory automatically")
build_folder = build[0]
return build_folder | python | def find_sphinx_build_dir():
"""
Find build subfolder within sphinx docs directory.
This is called by :func:`commit_docs` if keyword arg ``built_docs`` is not
specified on the command line.
"""
build = glob.glob('**/*build/html', recursive=True)
if not build:
raise RuntimeError("Could not find Sphinx build directory automatically")
build_folder = build[0]
return build_folder | [
"def",
"find_sphinx_build_dir",
"(",
")",
":",
"build",
"=",
"glob",
".",
"glob",
"(",
"'**/*build/html'",
",",
"recursive",
"=",
"True",
")",
"if",
"not",
"build",
":",
"raise",
"RuntimeError",
"(",
"\"Could not find Sphinx build directory automatically\"",
")",
"build_folder",
"=",
"build",
"[",
"0",
"]",
"return",
"build_folder"
] | Find build subfolder within sphinx docs directory.
This is called by :func:`commit_docs` if keyword arg ``built_docs`` is not
specified on the command line. | [
"Find",
"build",
"subfolder",
"within",
"sphinx",
"docs",
"directory",
"."
] | 0f19ff78c8239efcc98d417f36b0a31d9be01ba5 | https://github.com/drdoctr/doctr/blob/0f19ff78c8239efcc98d417f36b0a31d9be01ba5/doctr/travis.py#L363-L375 | train |
drdoctr/doctr | doctr/travis.py | copy_to_tmp | def copy_to_tmp(source):
"""
Copies ``source`` to a temporary directory, and returns the copied
location.
If source is a file, the copied location is also a file.
"""
tmp_dir = tempfile.mkdtemp()
# Use pathlib because os.path.basename is different depending on whether
# the path ends in a /
p = pathlib.Path(source)
dirname = p.name or 'temp'
new_dir = os.path.join(tmp_dir, dirname)
if os.path.isdir(source):
shutil.copytree(source, new_dir)
else:
shutil.copy2(source, new_dir)
return new_dir | python | def copy_to_tmp(source):
"""
Copies ``source`` to a temporary directory, and returns the copied
location.
If source is a file, the copied location is also a file.
"""
tmp_dir = tempfile.mkdtemp()
# Use pathlib because os.path.basename is different depending on whether
# the path ends in a /
p = pathlib.Path(source)
dirname = p.name or 'temp'
new_dir = os.path.join(tmp_dir, dirname)
if os.path.isdir(source):
shutil.copytree(source, new_dir)
else:
shutil.copy2(source, new_dir)
return new_dir | [
"def",
"copy_to_tmp",
"(",
"source",
")",
":",
"tmp_dir",
"=",
"tempfile",
".",
"mkdtemp",
"(",
")",
"# Use pathlib because os.path.basename is different depending on whether",
"# the path ends in a /",
"p",
"=",
"pathlib",
".",
"Path",
"(",
"source",
")",
"dirname",
"=",
"p",
".",
"name",
"or",
"'temp'",
"new_dir",
"=",
"os",
".",
"path",
".",
"join",
"(",
"tmp_dir",
",",
"dirname",
")",
"if",
"os",
".",
"path",
".",
"isdir",
"(",
"source",
")",
":",
"shutil",
".",
"copytree",
"(",
"source",
",",
"new_dir",
")",
"else",
":",
"shutil",
".",
"copy2",
"(",
"source",
",",
"new_dir",
")",
"return",
"new_dir"
] | Copies ``source`` to a temporary directory, and returns the copied
location.
If source is a file, the copied location is also a file. | [
"Copies",
"source",
"to",
"a",
"temporary",
"directory",
"and",
"returns",
"the",
"copied",
"location",
"."
] | 0f19ff78c8239efcc98d417f36b0a31d9be01ba5 | https://github.com/drdoctr/doctr/blob/0f19ff78c8239efcc98d417f36b0a31d9be01ba5/doctr/travis.py#L383-L400 | train |
drdoctr/doctr | doctr/travis.py | is_subdir | def is_subdir(a, b):
"""
Return true if a is a subdirectory of b
"""
a, b = map(os.path.abspath, [a, b])
return os.path.commonpath([a, b]) == b | python | def is_subdir(a, b):
"""
Return true if a is a subdirectory of b
"""
a, b = map(os.path.abspath, [a, b])
return os.path.commonpath([a, b]) == b | [
"def",
"is_subdir",
"(",
"a",
",",
"b",
")",
":",
"a",
",",
"b",
"=",
"map",
"(",
"os",
".",
"path",
".",
"abspath",
",",
"[",
"a",
",",
"b",
"]",
")",
"return",
"os",
".",
"path",
".",
"commonpath",
"(",
"[",
"a",
",",
"b",
"]",
")",
"==",
"b"
] | Return true if a is a subdirectory of b | [
"Return",
"true",
"if",
"a",
"is",
"a",
"subdirectory",
"of",
"b"
] | 0f19ff78c8239efcc98d417f36b0a31d9be01ba5 | https://github.com/drdoctr/doctr/blob/0f19ff78c8239efcc98d417f36b0a31d9be01ba5/doctr/travis.py#L402-L408 | train |
drdoctr/doctr | doctr/travis.py | sync_from_log | def sync_from_log(src, dst, log_file, exclude=()):
"""
Sync the files in ``src`` to ``dst``.
The files that are synced are logged to ``log_file``. If ``log_file``
exists, the files in ``log_file`` are removed first.
Returns ``(added, removed)``, where added is a list of all files synced from
``src`` (even if it already existed in ``dst``), and ``removed`` is every
file from ``log_file`` that was removed from ``dst`` because it wasn't in
``src``. ``added`` also includes the log file.
``exclude`` may be a list of paths from ``src`` that should be ignored.
Such paths are neither added nor removed, even if they are in the logfile.
"""
from os.path import join, exists, isdir
exclude = [os.path.normpath(i) for i in exclude]
added, removed = [], []
if not exists(log_file):
# Assume this is the first run
print("%s doesn't exist. Not removing any files." % log_file)
else:
with open(log_file) as f:
files = f.read().strip().split('\n')
for new_f in files:
new_f = new_f.strip()
if any(is_subdir(new_f, os.path.join(dst, i)) for i in exclude):
pass
elif exists(new_f):
os.remove(new_f)
removed.append(new_f)
else:
print("Warning: File %s doesn't exist." % new_f, file=sys.stderr)
if os.path.isdir(src):
if not src.endswith(os.sep):
src += os.sep
files = glob.iglob(join(src, '**'), recursive=True)
else:
files = [src]
src = os.path.dirname(src) + os.sep if os.sep in src else ''
os.makedirs(dst, exist_ok=True)
# sorted makes this easier to test
for f in sorted(files):
if any(is_subdir(f, os.path.join(src, i)) for i in exclude):
continue
new_f = join(dst, f[len(src):])
if isdir(f) or f.endswith(os.sep):
os.makedirs(new_f, exist_ok=True)
else:
shutil.copy2(f, new_f)
added.append(new_f)
if new_f in removed:
removed.remove(new_f)
with open(log_file, 'w') as f:
f.write('\n'.join(added))
added.append(log_file)
return added, removed | python | def sync_from_log(src, dst, log_file, exclude=()):
"""
Sync the files in ``src`` to ``dst``.
The files that are synced are logged to ``log_file``. If ``log_file``
exists, the files in ``log_file`` are removed first.
Returns ``(added, removed)``, where added is a list of all files synced from
``src`` (even if it already existed in ``dst``), and ``removed`` is every
file from ``log_file`` that was removed from ``dst`` because it wasn't in
``src``. ``added`` also includes the log file.
``exclude`` may be a list of paths from ``src`` that should be ignored.
Such paths are neither added nor removed, even if they are in the logfile.
"""
from os.path import join, exists, isdir
exclude = [os.path.normpath(i) for i in exclude]
added, removed = [], []
if not exists(log_file):
# Assume this is the first run
print("%s doesn't exist. Not removing any files." % log_file)
else:
with open(log_file) as f:
files = f.read().strip().split('\n')
for new_f in files:
new_f = new_f.strip()
if any(is_subdir(new_f, os.path.join(dst, i)) for i in exclude):
pass
elif exists(new_f):
os.remove(new_f)
removed.append(new_f)
else:
print("Warning: File %s doesn't exist." % new_f, file=sys.stderr)
if os.path.isdir(src):
if not src.endswith(os.sep):
src += os.sep
files = glob.iglob(join(src, '**'), recursive=True)
else:
files = [src]
src = os.path.dirname(src) + os.sep if os.sep in src else ''
os.makedirs(dst, exist_ok=True)
# sorted makes this easier to test
for f in sorted(files):
if any(is_subdir(f, os.path.join(src, i)) for i in exclude):
continue
new_f = join(dst, f[len(src):])
if isdir(f) or f.endswith(os.sep):
os.makedirs(new_f, exist_ok=True)
else:
shutil.copy2(f, new_f)
added.append(new_f)
if new_f in removed:
removed.remove(new_f)
with open(log_file, 'w') as f:
f.write('\n'.join(added))
added.append(log_file)
return added, removed | [
"def",
"sync_from_log",
"(",
"src",
",",
"dst",
",",
"log_file",
",",
"exclude",
"=",
"(",
")",
")",
":",
"from",
"os",
".",
"path",
"import",
"join",
",",
"exists",
",",
"isdir",
"exclude",
"=",
"[",
"os",
".",
"path",
".",
"normpath",
"(",
"i",
")",
"for",
"i",
"in",
"exclude",
"]",
"added",
",",
"removed",
"=",
"[",
"]",
",",
"[",
"]",
"if",
"not",
"exists",
"(",
"log_file",
")",
":",
"# Assume this is the first run",
"print",
"(",
"\"%s doesn't exist. Not removing any files.\"",
"%",
"log_file",
")",
"else",
":",
"with",
"open",
"(",
"log_file",
")",
"as",
"f",
":",
"files",
"=",
"f",
".",
"read",
"(",
")",
".",
"strip",
"(",
")",
".",
"split",
"(",
"'\\n'",
")",
"for",
"new_f",
"in",
"files",
":",
"new_f",
"=",
"new_f",
".",
"strip",
"(",
")",
"if",
"any",
"(",
"is_subdir",
"(",
"new_f",
",",
"os",
".",
"path",
".",
"join",
"(",
"dst",
",",
"i",
")",
")",
"for",
"i",
"in",
"exclude",
")",
":",
"pass",
"elif",
"exists",
"(",
"new_f",
")",
":",
"os",
".",
"remove",
"(",
"new_f",
")",
"removed",
".",
"append",
"(",
"new_f",
")",
"else",
":",
"print",
"(",
"\"Warning: File %s doesn't exist.\"",
"%",
"new_f",
",",
"file",
"=",
"sys",
".",
"stderr",
")",
"if",
"os",
".",
"path",
".",
"isdir",
"(",
"src",
")",
":",
"if",
"not",
"src",
".",
"endswith",
"(",
"os",
".",
"sep",
")",
":",
"src",
"+=",
"os",
".",
"sep",
"files",
"=",
"glob",
".",
"iglob",
"(",
"join",
"(",
"src",
",",
"'**'",
")",
",",
"recursive",
"=",
"True",
")",
"else",
":",
"files",
"=",
"[",
"src",
"]",
"src",
"=",
"os",
".",
"path",
".",
"dirname",
"(",
"src",
")",
"+",
"os",
".",
"sep",
"if",
"os",
".",
"sep",
"in",
"src",
"else",
"''",
"os",
".",
"makedirs",
"(",
"dst",
",",
"exist_ok",
"=",
"True",
")",
"# sorted makes this easier to test",
"for",
"f",
"in",
"sorted",
"(",
"files",
")",
":",
"if",
"any",
"(",
"is_subdir",
"(",
"f",
",",
"os",
".",
"path",
".",
"join",
"(",
"src",
",",
"i",
")",
")",
"for",
"i",
"in",
"exclude",
")",
":",
"continue",
"new_f",
"=",
"join",
"(",
"dst",
",",
"f",
"[",
"len",
"(",
"src",
")",
":",
"]",
")",
"if",
"isdir",
"(",
"f",
")",
"or",
"f",
".",
"endswith",
"(",
"os",
".",
"sep",
")",
":",
"os",
".",
"makedirs",
"(",
"new_f",
",",
"exist_ok",
"=",
"True",
")",
"else",
":",
"shutil",
".",
"copy2",
"(",
"f",
",",
"new_f",
")",
"added",
".",
"append",
"(",
"new_f",
")",
"if",
"new_f",
"in",
"removed",
":",
"removed",
".",
"remove",
"(",
"new_f",
")",
"with",
"open",
"(",
"log_file",
",",
"'w'",
")",
"as",
"f",
":",
"f",
".",
"write",
"(",
"'\\n'",
".",
"join",
"(",
"added",
")",
")",
"added",
".",
"append",
"(",
"log_file",
")",
"return",
"added",
",",
"removed"
] | Sync the files in ``src`` to ``dst``.
The files that are synced are logged to ``log_file``. If ``log_file``
exists, the files in ``log_file`` are removed first.
Returns ``(added, removed)``, where added is a list of all files synced from
``src`` (even if it already existed in ``dst``), and ``removed`` is every
file from ``log_file`` that was removed from ``dst`` because it wasn't in
``src``. ``added`` also includes the log file.
``exclude`` may be a list of paths from ``src`` that should be ignored.
Such paths are neither added nor removed, even if they are in the logfile. | [
"Sync",
"the",
"files",
"in",
"src",
"to",
"dst",
"."
] | 0f19ff78c8239efcc98d417f36b0a31d9be01ba5 | https://github.com/drdoctr/doctr/blob/0f19ff78c8239efcc98d417f36b0a31d9be01ba5/doctr/travis.py#L410-L477 | train |
drdoctr/doctr | doctr/travis.py | push_docs | def push_docs(deploy_branch='gh-pages', retries=5):
"""
Push the changes to the branch named ``deploy_branch``.
Assumes that :func:`setup_GitHub_push` has been run and returned True, and
that :func:`commit_docs` has been run. Does not push anything if no changes
were made.
"""
code = 1
while code and retries:
print("Pulling")
code = run(['git', 'pull', '-s', 'recursive', '-X', 'ours',
'doctr_remote', deploy_branch], exit=False)
print("Pushing commit")
code = run(['git', 'push', '-q', 'doctr_remote',
'{}:{}'.format(DOCTR_WORKING_BRANCH, deploy_branch)], exit=False)
if code:
retries -= 1
print("Push failed, retrying")
time.sleep(1)
else:
return
sys.exit("Giving up...") | python | def push_docs(deploy_branch='gh-pages', retries=5):
"""
Push the changes to the branch named ``deploy_branch``.
Assumes that :func:`setup_GitHub_push` has been run and returned True, and
that :func:`commit_docs` has been run. Does not push anything if no changes
were made.
"""
code = 1
while code and retries:
print("Pulling")
code = run(['git', 'pull', '-s', 'recursive', '-X', 'ours',
'doctr_remote', deploy_branch], exit=False)
print("Pushing commit")
code = run(['git', 'push', '-q', 'doctr_remote',
'{}:{}'.format(DOCTR_WORKING_BRANCH, deploy_branch)], exit=False)
if code:
retries -= 1
print("Push failed, retrying")
time.sleep(1)
else:
return
sys.exit("Giving up...") | [
"def",
"push_docs",
"(",
"deploy_branch",
"=",
"'gh-pages'",
",",
"retries",
"=",
"5",
")",
":",
"code",
"=",
"1",
"while",
"code",
"and",
"retries",
":",
"print",
"(",
"\"Pulling\"",
")",
"code",
"=",
"run",
"(",
"[",
"'git'",
",",
"'pull'",
",",
"'-s'",
",",
"'recursive'",
",",
"'-X'",
",",
"'ours'",
",",
"'doctr_remote'",
",",
"deploy_branch",
"]",
",",
"exit",
"=",
"False",
")",
"print",
"(",
"\"Pushing commit\"",
")",
"code",
"=",
"run",
"(",
"[",
"'git'",
",",
"'push'",
",",
"'-q'",
",",
"'doctr_remote'",
",",
"'{}:{}'",
".",
"format",
"(",
"DOCTR_WORKING_BRANCH",
",",
"deploy_branch",
")",
"]",
",",
"exit",
"=",
"False",
")",
"if",
"code",
":",
"retries",
"-=",
"1",
"print",
"(",
"\"Push failed, retrying\"",
")",
"time",
".",
"sleep",
"(",
"1",
")",
"else",
":",
"return",
"sys",
".",
"exit",
"(",
"\"Giving up...\"",
")"
] | Push the changes to the branch named ``deploy_branch``.
Assumes that :func:`setup_GitHub_push` has been run and returned True, and
that :func:`commit_docs` has been run. Does not push anything if no changes
were made. | [
"Push",
"the",
"changes",
"to",
"the",
"branch",
"named",
"deploy_branch",
"."
] | 0f19ff78c8239efcc98d417f36b0a31d9be01ba5 | https://github.com/drdoctr/doctr/blob/0f19ff78c8239efcc98d417f36b0a31d9be01ba5/doctr/travis.py#L536-L560 | train |
licenses/lice | lice/core.py | clean_path | def clean_path(p):
""" Clean a path by expanding user and environment variables and
ensuring absolute path.
"""
p = os.path.expanduser(p)
p = os.path.expandvars(p)
p = os.path.abspath(p)
return p | python | def clean_path(p):
""" Clean a path by expanding user and environment variables and
ensuring absolute path.
"""
p = os.path.expanduser(p)
p = os.path.expandvars(p)
p = os.path.abspath(p)
return p | [
"def",
"clean_path",
"(",
"p",
")",
":",
"p",
"=",
"os",
".",
"path",
".",
"expanduser",
"(",
"p",
")",
"p",
"=",
"os",
".",
"path",
".",
"expandvars",
"(",
"p",
")",
"p",
"=",
"os",
".",
"path",
".",
"abspath",
"(",
"p",
")",
"return",
"p"
] | Clean a path by expanding user and environment variables and
ensuring absolute path. | [
"Clean",
"a",
"path",
"by",
"expanding",
"user",
"and",
"environment",
"variables",
"and",
"ensuring",
"absolute",
"path",
"."
] | 71635c2544d5edf9e93af4141467763916a86624 | https://github.com/licenses/lice/blob/71635c2544d5edf9e93af4141467763916a86624/lice/core.py#L93-L100 | train |
licenses/lice | lice/core.py | load_file_template | def load_file_template(path):
""" Load template from the specified filesystem path.
"""
template = StringIO()
if not os.path.exists(path):
raise ValueError("path does not exist: %s" % path)
with open(clean_path(path), "rb") as infile: # opened as binary
for line in infile:
template.write(line.decode("utf-8")) # ensure utf-8
return template | python | def load_file_template(path):
""" Load template from the specified filesystem path.
"""
template = StringIO()
if not os.path.exists(path):
raise ValueError("path does not exist: %s" % path)
with open(clean_path(path), "rb") as infile: # opened as binary
for line in infile:
template.write(line.decode("utf-8")) # ensure utf-8
return template | [
"def",
"load_file_template",
"(",
"path",
")",
":",
"template",
"=",
"StringIO",
"(",
")",
"if",
"not",
"os",
".",
"path",
".",
"exists",
"(",
"path",
")",
":",
"raise",
"ValueError",
"(",
"\"path does not exist: %s\"",
"%",
"path",
")",
"with",
"open",
"(",
"clean_path",
"(",
"path",
")",
",",
"\"rb\"",
")",
"as",
"infile",
":",
"# opened as binary",
"for",
"line",
"in",
"infile",
":",
"template",
".",
"write",
"(",
"line",
".",
"decode",
"(",
"\"utf-8\"",
")",
")",
"# ensure utf-8",
"return",
"template"
] | Load template from the specified filesystem path. | [
"Load",
"template",
"from",
"the",
"specified",
"filesystem",
"path",
"."
] | 71635c2544d5edf9e93af4141467763916a86624 | https://github.com/licenses/lice/blob/71635c2544d5edf9e93af4141467763916a86624/lice/core.py#L126-L135 | train |
licenses/lice | lice/core.py | load_package_template | def load_package_template(license, header=False):
""" Load license template distributed with package.
"""
content = StringIO()
filename = 'template-%s-header.txt' if header else 'template-%s.txt'
with resource_stream(__name__, filename % license) as licfile:
for line in licfile:
content.write(line.decode("utf-8")) # write utf-8 string
return content | python | def load_package_template(license, header=False):
""" Load license template distributed with package.
"""
content = StringIO()
filename = 'template-%s-header.txt' if header else 'template-%s.txt'
with resource_stream(__name__, filename % license) as licfile:
for line in licfile:
content.write(line.decode("utf-8")) # write utf-8 string
return content | [
"def",
"load_package_template",
"(",
"license",
",",
"header",
"=",
"False",
")",
":",
"content",
"=",
"StringIO",
"(",
")",
"filename",
"=",
"'template-%s-header.txt'",
"if",
"header",
"else",
"'template-%s.txt'",
"with",
"resource_stream",
"(",
"__name__",
",",
"filename",
"%",
"license",
")",
"as",
"licfile",
":",
"for",
"line",
"in",
"licfile",
":",
"content",
".",
"write",
"(",
"line",
".",
"decode",
"(",
"\"utf-8\"",
")",
")",
"# write utf-8 string",
"return",
"content"
] | Load license template distributed with package. | [
"Load",
"license",
"template",
"distributed",
"with",
"package",
"."
] | 71635c2544d5edf9e93af4141467763916a86624 | https://github.com/licenses/lice/blob/71635c2544d5edf9e93af4141467763916a86624/lice/core.py#L138-L146 | train |
licenses/lice | lice/core.py | extract_vars | def extract_vars(template):
""" Extract variables from template. Variables are enclosed in
double curly braces.
"""
keys = set()
for match in re.finditer(r"\{\{ (?P<key>\w+) \}\}", template.getvalue()):
keys.add(match.groups()[0])
return sorted(list(keys)) | python | def extract_vars(template):
""" Extract variables from template. Variables are enclosed in
double curly braces.
"""
keys = set()
for match in re.finditer(r"\{\{ (?P<key>\w+) \}\}", template.getvalue()):
keys.add(match.groups()[0])
return sorted(list(keys)) | [
"def",
"extract_vars",
"(",
"template",
")",
":",
"keys",
"=",
"set",
"(",
")",
"for",
"match",
"in",
"re",
".",
"finditer",
"(",
"r\"\\{\\{ (?P<key>\\w+) \\}\\}\"",
",",
"template",
".",
"getvalue",
"(",
")",
")",
":",
"keys",
".",
"add",
"(",
"match",
".",
"groups",
"(",
")",
"[",
"0",
"]",
")",
"return",
"sorted",
"(",
"list",
"(",
"keys",
")",
")"
] | Extract variables from template. Variables are enclosed in
double curly braces. | [
"Extract",
"variables",
"from",
"template",
".",
"Variables",
"are",
"enclosed",
"in",
"double",
"curly",
"braces",
"."
] | 71635c2544d5edf9e93af4141467763916a86624 | https://github.com/licenses/lice/blob/71635c2544d5edf9e93af4141467763916a86624/lice/core.py#L149-L156 | train |
licenses/lice | lice/core.py | generate_license | def generate_license(template, context):
""" Generate a license by extracting variables from the template and
replacing them with the corresponding values in the given context.
"""
out = StringIO()
content = template.getvalue()
for key in extract_vars(template):
if key not in context:
raise ValueError("%s is missing from the template context" % key)
content = content.replace("{{ %s }}" % key, context[key])
template.close() # free template memory (when is garbage collected?)
out.write(content)
return out | python | def generate_license(template, context):
""" Generate a license by extracting variables from the template and
replacing them with the corresponding values in the given context.
"""
out = StringIO()
content = template.getvalue()
for key in extract_vars(template):
if key not in context:
raise ValueError("%s is missing from the template context" % key)
content = content.replace("{{ %s }}" % key, context[key])
template.close() # free template memory (when is garbage collected?)
out.write(content)
return out | [
"def",
"generate_license",
"(",
"template",
",",
"context",
")",
":",
"out",
"=",
"StringIO",
"(",
")",
"content",
"=",
"template",
".",
"getvalue",
"(",
")",
"for",
"key",
"in",
"extract_vars",
"(",
"template",
")",
":",
"if",
"key",
"not",
"in",
"context",
":",
"raise",
"ValueError",
"(",
"\"%s is missing from the template context\"",
"%",
"key",
")",
"content",
"=",
"content",
".",
"replace",
"(",
"\"{{ %s }}\"",
"%",
"key",
",",
"context",
"[",
"key",
"]",
")",
"template",
".",
"close",
"(",
")",
"# free template memory (when is garbage collected?)",
"out",
".",
"write",
"(",
"content",
")",
"return",
"out"
] | Generate a license by extracting variables from the template and
replacing them with the corresponding values in the given context. | [
"Generate",
"a",
"license",
"by",
"extracting",
"variables",
"from",
"the",
"template",
"and",
"replacing",
"them",
"with",
"the",
"corresponding",
"values",
"in",
"the",
"given",
"context",
"."
] | 71635c2544d5edf9e93af4141467763916a86624 | https://github.com/licenses/lice/blob/71635c2544d5edf9e93af4141467763916a86624/lice/core.py#L159-L171 | train |
licenses/lice | lice/core.py | get_suffix | def get_suffix(name):
"""Check if file name have valid suffix for formatting.
if have suffix return it else return False.
"""
a = name.count(".")
if a:
ext = name.split(".")[-1]
if ext in LANGS.keys():
return ext
return False
else:
return False | python | def get_suffix(name):
"""Check if file name have valid suffix for formatting.
if have suffix return it else return False.
"""
a = name.count(".")
if a:
ext = name.split(".")[-1]
if ext in LANGS.keys():
return ext
return False
else:
return False | [
"def",
"get_suffix",
"(",
"name",
")",
":",
"a",
"=",
"name",
".",
"count",
"(",
"\".\"",
")",
"if",
"a",
":",
"ext",
"=",
"name",
".",
"split",
"(",
"\".\"",
")",
"[",
"-",
"1",
"]",
"if",
"ext",
"in",
"LANGS",
".",
"keys",
"(",
")",
":",
"return",
"ext",
"return",
"False",
"else",
":",
"return",
"False"
] | Check if file name have valid suffix for formatting.
if have suffix return it else return False. | [
"Check",
"if",
"file",
"name",
"have",
"valid",
"suffix",
"for",
"formatting",
".",
"if",
"have",
"suffix",
"return",
"it",
"else",
"return",
"False",
"."
] | 71635c2544d5edf9e93af4141467763916a86624 | https://github.com/licenses/lice/blob/71635c2544d5edf9e93af4141467763916a86624/lice/core.py#L191-L202 | train |
crate/crate-python | src/crate/client/http.py | _raise_for_status | def _raise_for_status(response):
""" make sure that only crate.exceptions are raised that are defined in
the DB-API specification """
message = ''
if 400 <= response.status < 500:
message = '%s Client Error: %s' % (response.status, response.reason)
elif 500 <= response.status < 600:
message = '%s Server Error: %s' % (response.status, response.reason)
else:
return
if response.status == 503:
raise ConnectionError(message)
if response.headers.get("content-type", "").startswith("application/json"):
data = json.loads(response.data.decode('utf-8'))
error = data.get('error', {})
error_trace = data.get('error_trace', None)
if "results" in data:
errors = [res["error_message"] for res in data["results"]
if res.get("error_message")]
if errors:
raise ProgrammingError("\n".join(errors))
if isinstance(error, dict):
raise ProgrammingError(error.get('message', ''),
error_trace=error_trace)
raise ProgrammingError(error, error_trace=error_trace)
raise ProgrammingError(message) | python | def _raise_for_status(response):
""" make sure that only crate.exceptions are raised that are defined in
the DB-API specification """
message = ''
if 400 <= response.status < 500:
message = '%s Client Error: %s' % (response.status, response.reason)
elif 500 <= response.status < 600:
message = '%s Server Error: %s' % (response.status, response.reason)
else:
return
if response.status == 503:
raise ConnectionError(message)
if response.headers.get("content-type", "").startswith("application/json"):
data = json.loads(response.data.decode('utf-8'))
error = data.get('error', {})
error_trace = data.get('error_trace', None)
if "results" in data:
errors = [res["error_message"] for res in data["results"]
if res.get("error_message")]
if errors:
raise ProgrammingError("\n".join(errors))
if isinstance(error, dict):
raise ProgrammingError(error.get('message', ''),
error_trace=error_trace)
raise ProgrammingError(error, error_trace=error_trace)
raise ProgrammingError(message) | [
"def",
"_raise_for_status",
"(",
"response",
")",
":",
"message",
"=",
"''",
"if",
"400",
"<=",
"response",
".",
"status",
"<",
"500",
":",
"message",
"=",
"'%s Client Error: %s'",
"%",
"(",
"response",
".",
"status",
",",
"response",
".",
"reason",
")",
"elif",
"500",
"<=",
"response",
".",
"status",
"<",
"600",
":",
"message",
"=",
"'%s Server Error: %s'",
"%",
"(",
"response",
".",
"status",
",",
"response",
".",
"reason",
")",
"else",
":",
"return",
"if",
"response",
".",
"status",
"==",
"503",
":",
"raise",
"ConnectionError",
"(",
"message",
")",
"if",
"response",
".",
"headers",
".",
"get",
"(",
"\"content-type\"",
",",
"\"\"",
")",
".",
"startswith",
"(",
"\"application/json\"",
")",
":",
"data",
"=",
"json",
".",
"loads",
"(",
"response",
".",
"data",
".",
"decode",
"(",
"'utf-8'",
")",
")",
"error",
"=",
"data",
".",
"get",
"(",
"'error'",
",",
"{",
"}",
")",
"error_trace",
"=",
"data",
".",
"get",
"(",
"'error_trace'",
",",
"None",
")",
"if",
"\"results\"",
"in",
"data",
":",
"errors",
"=",
"[",
"res",
"[",
"\"error_message\"",
"]",
"for",
"res",
"in",
"data",
"[",
"\"results\"",
"]",
"if",
"res",
".",
"get",
"(",
"\"error_message\"",
")",
"]",
"if",
"errors",
":",
"raise",
"ProgrammingError",
"(",
"\"\\n\"",
".",
"join",
"(",
"errors",
")",
")",
"if",
"isinstance",
"(",
"error",
",",
"dict",
")",
":",
"raise",
"ProgrammingError",
"(",
"error",
".",
"get",
"(",
"'message'",
",",
"''",
")",
",",
"error_trace",
"=",
"error_trace",
")",
"raise",
"ProgrammingError",
"(",
"error",
",",
"error_trace",
"=",
"error_trace",
")",
"raise",
"ProgrammingError",
"(",
"message",
")"
] | make sure that only crate.exceptions are raised that are defined in
the DB-API specification | [
"make",
"sure",
"that",
"only",
"crate",
".",
"exceptions",
"are",
"raised",
"that",
"are",
"defined",
"in",
"the",
"DB",
"-",
"API",
"specification"
] | 68e39c95f5bbe88b74bbfa26de4347fc644636a8 | https://github.com/crate/crate-python/blob/68e39c95f5bbe88b74bbfa26de4347fc644636a8/src/crate/client/http.py#L164-L189 | train |
crate/crate-python | src/crate/client/http.py | _server_url | def _server_url(server):
"""
Normalizes a given server string to an url
>>> print(_server_url('a'))
http://a
>>> print(_server_url('a:9345'))
http://a:9345
>>> print(_server_url('https://a:9345'))
https://a:9345
>>> print(_server_url('https://a'))
https://a
>>> print(_server_url('demo.crate.io'))
http://demo.crate.io
"""
if not _HTTP_PAT.match(server):
server = 'http://%s' % server
parsed = urlparse(server)
url = '%s://%s' % (parsed.scheme, parsed.netloc)
return url | python | def _server_url(server):
"""
Normalizes a given server string to an url
>>> print(_server_url('a'))
http://a
>>> print(_server_url('a:9345'))
http://a:9345
>>> print(_server_url('https://a:9345'))
https://a:9345
>>> print(_server_url('https://a'))
https://a
>>> print(_server_url('demo.crate.io'))
http://demo.crate.io
"""
if not _HTTP_PAT.match(server):
server = 'http://%s' % server
parsed = urlparse(server)
url = '%s://%s' % (parsed.scheme, parsed.netloc)
return url | [
"def",
"_server_url",
"(",
"server",
")",
":",
"if",
"not",
"_HTTP_PAT",
".",
"match",
"(",
"server",
")",
":",
"server",
"=",
"'http://%s'",
"%",
"server",
"parsed",
"=",
"urlparse",
"(",
"server",
")",
"url",
"=",
"'%s://%s'",
"%",
"(",
"parsed",
".",
"scheme",
",",
"parsed",
".",
"netloc",
")",
"return",
"url"
] | Normalizes a given server string to an url
>>> print(_server_url('a'))
http://a
>>> print(_server_url('a:9345'))
http://a:9345
>>> print(_server_url('https://a:9345'))
https://a:9345
>>> print(_server_url('https://a'))
https://a
>>> print(_server_url('demo.crate.io'))
http://demo.crate.io | [
"Normalizes",
"a",
"given",
"server",
"string",
"to",
"an",
"url"
] | 68e39c95f5bbe88b74bbfa26de4347fc644636a8 | https://github.com/crate/crate-python/blob/68e39c95f5bbe88b74bbfa26de4347fc644636a8/src/crate/client/http.py#L192-L211 | train |
crate/crate-python | src/crate/client/http.py | Client.sql | def sql(self, stmt, parameters=None, bulk_parameters=None):
"""
Execute SQL stmt against the crate server.
"""
if stmt is None:
return None
data = _create_sql_payload(stmt, parameters, bulk_parameters)
logger.debug(
'Sending request to %s with payload: %s', self.path, data)
content = self._json_request('POST', self.path, data=data)
logger.debug("JSON response for stmt(%s): %s", stmt, content)
return content | python | def sql(self, stmt, parameters=None, bulk_parameters=None):
"""
Execute SQL stmt against the crate server.
"""
if stmt is None:
return None
data = _create_sql_payload(stmt, parameters, bulk_parameters)
logger.debug(
'Sending request to %s with payload: %s', self.path, data)
content = self._json_request('POST', self.path, data=data)
logger.debug("JSON response for stmt(%s): %s", stmt, content)
return content | [
"def",
"sql",
"(",
"self",
",",
"stmt",
",",
"parameters",
"=",
"None",
",",
"bulk_parameters",
"=",
"None",
")",
":",
"if",
"stmt",
"is",
"None",
":",
"return",
"None",
"data",
"=",
"_create_sql_payload",
"(",
"stmt",
",",
"parameters",
",",
"bulk_parameters",
")",
"logger",
".",
"debug",
"(",
"'Sending request to %s with payload: %s'",
",",
"self",
".",
"path",
",",
"data",
")",
"content",
"=",
"self",
".",
"_json_request",
"(",
"'POST'",
",",
"self",
".",
"path",
",",
"data",
"=",
"data",
")",
"logger",
".",
"debug",
"(",
"\"JSON response for stmt(%s): %s\"",
",",
"stmt",
",",
"content",
")",
"return",
"content"
] | Execute SQL stmt against the crate server. | [
"Execute",
"SQL",
"stmt",
"against",
"the",
"crate",
"server",
"."
] | 68e39c95f5bbe88b74bbfa26de4347fc644636a8 | https://github.com/crate/crate-python/blob/68e39c95f5bbe88b74bbfa26de4347fc644636a8/src/crate/client/http.py#L318-L331 | train |
crate/crate-python | src/crate/client/http.py | Client.blob_put | def blob_put(self, table, digest, data):
"""
Stores the contents of the file like @data object in a blob under the
given table and digest.
"""
response = self._request('PUT', _blob_path(table, digest),
data=data)
if response.status == 201:
# blob created
return True
if response.status == 409:
# blob exists
return False
if response.status in (400, 404):
raise BlobLocationNotFoundException(table, digest)
_raise_for_status(response) | python | def blob_put(self, table, digest, data):
"""
Stores the contents of the file like @data object in a blob under the
given table and digest.
"""
response = self._request('PUT', _blob_path(table, digest),
data=data)
if response.status == 201:
# blob created
return True
if response.status == 409:
# blob exists
return False
if response.status in (400, 404):
raise BlobLocationNotFoundException(table, digest)
_raise_for_status(response) | [
"def",
"blob_put",
"(",
"self",
",",
"table",
",",
"digest",
",",
"data",
")",
":",
"response",
"=",
"self",
".",
"_request",
"(",
"'PUT'",
",",
"_blob_path",
"(",
"table",
",",
"digest",
")",
",",
"data",
"=",
"data",
")",
"if",
"response",
".",
"status",
"==",
"201",
":",
"# blob created",
"return",
"True",
"if",
"response",
".",
"status",
"==",
"409",
":",
"# blob exists",
"return",
"False",
"if",
"response",
".",
"status",
"in",
"(",
"400",
",",
"404",
")",
":",
"raise",
"BlobLocationNotFoundException",
"(",
"table",
",",
"digest",
")",
"_raise_for_status",
"(",
"response",
")"
] | Stores the contents of the file like @data object in a blob under the
given table and digest. | [
"Stores",
"the",
"contents",
"of",
"the",
"file",
"like"
] | 68e39c95f5bbe88b74bbfa26de4347fc644636a8 | https://github.com/crate/crate-python/blob/68e39c95f5bbe88b74bbfa26de4347fc644636a8/src/crate/client/http.py#L341-L356 | train |
crate/crate-python | src/crate/client/http.py | Client.blob_get | def blob_get(self, table, digest, chunk_size=1024 * 128):
"""
Returns a file like object representing the contents of the blob
with the given digest.
"""
response = self._request('GET', _blob_path(table, digest), stream=True)
if response.status == 404:
raise DigestNotFoundException(table, digest)
_raise_for_status(response)
return response.stream(amt=chunk_size) | python | def blob_get(self, table, digest, chunk_size=1024 * 128):
"""
Returns a file like object representing the contents of the blob
with the given digest.
"""
response = self._request('GET', _blob_path(table, digest), stream=True)
if response.status == 404:
raise DigestNotFoundException(table, digest)
_raise_for_status(response)
return response.stream(amt=chunk_size) | [
"def",
"blob_get",
"(",
"self",
",",
"table",
",",
"digest",
",",
"chunk_size",
"=",
"1024",
"*",
"128",
")",
":",
"response",
"=",
"self",
".",
"_request",
"(",
"'GET'",
",",
"_blob_path",
"(",
"table",
",",
"digest",
")",
",",
"stream",
"=",
"True",
")",
"if",
"response",
".",
"status",
"==",
"404",
":",
"raise",
"DigestNotFoundException",
"(",
"table",
",",
"digest",
")",
"_raise_for_status",
"(",
"response",
")",
"return",
"response",
".",
"stream",
"(",
"amt",
"=",
"chunk_size",
")"
] | Returns a file like object representing the contents of the blob
with the given digest. | [
"Returns",
"a",
"file",
"like",
"object",
"representing",
"the",
"contents",
"of",
"the",
"blob",
"with",
"the",
"given",
"digest",
"."
] | 68e39c95f5bbe88b74bbfa26de4347fc644636a8 | https://github.com/crate/crate-python/blob/68e39c95f5bbe88b74bbfa26de4347fc644636a8/src/crate/client/http.py#L369-L378 | train |
crate/crate-python | src/crate/client/http.py | Client.blob_exists | def blob_exists(self, table, digest):
"""
Returns true if the blob with the given digest exists
under the given table.
"""
response = self._request('HEAD', _blob_path(table, digest))
if response.status == 200:
return True
elif response.status == 404:
return False
_raise_for_status(response) | python | def blob_exists(self, table, digest):
"""
Returns true if the blob with the given digest exists
under the given table.
"""
response = self._request('HEAD', _blob_path(table, digest))
if response.status == 200:
return True
elif response.status == 404:
return False
_raise_for_status(response) | [
"def",
"blob_exists",
"(",
"self",
",",
"table",
",",
"digest",
")",
":",
"response",
"=",
"self",
".",
"_request",
"(",
"'HEAD'",
",",
"_blob_path",
"(",
"table",
",",
"digest",
")",
")",
"if",
"response",
".",
"status",
"==",
"200",
":",
"return",
"True",
"elif",
"response",
".",
"status",
"==",
"404",
":",
"return",
"False",
"_raise_for_status",
"(",
"response",
")"
] | Returns true if the blob with the given digest exists
under the given table. | [
"Returns",
"true",
"if",
"the",
"blob",
"with",
"the",
"given",
"digest",
"exists",
"under",
"the",
"given",
"table",
"."
] | 68e39c95f5bbe88b74bbfa26de4347fc644636a8 | https://github.com/crate/crate-python/blob/68e39c95f5bbe88b74bbfa26de4347fc644636a8/src/crate/client/http.py#L380-L390 | train |
crate/crate-python | src/crate/client/http.py | Client._request | def _request(self, method, path, server=None, **kwargs):
"""Execute a request to the cluster
A server is selected from the server pool.
"""
while True:
next_server = server or self._get_server()
try:
response = self.server_pool[next_server].request(
method, path, username=self.username, password=self.password, schema=self.schema, **kwargs)
redirect_location = response.get_redirect_location()
if redirect_location and 300 <= response.status <= 308:
redirect_server = _server_url(redirect_location)
self._add_server(redirect_server)
return self._request(
method, path, server=redirect_server, **kwargs)
if not server and response.status in SRV_UNAVAILABLE_STATUSES:
with self._lock:
# drop server from active ones
self._drop_server(next_server, response.reason)
else:
return response
except (urllib3.exceptions.MaxRetryError,
urllib3.exceptions.ReadTimeoutError,
urllib3.exceptions.SSLError,
urllib3.exceptions.HTTPError,
urllib3.exceptions.ProxyError,) as ex:
ex_message = _ex_to_message(ex)
if server:
raise ConnectionError(
"Server not available, exception: %s" % ex_message
)
preserve_server = False
if isinstance(ex, urllib3.exceptions.ProtocolError):
preserve_server = any(
t in [type(arg) for arg in ex.args]
for t in PRESERVE_ACTIVE_SERVER_EXCEPTIONS
)
if (not preserve_server):
with self._lock:
# drop server from active ones
self._drop_server(next_server, ex_message)
except Exception as e:
raise ProgrammingError(_ex_to_message(e)) | python | def _request(self, method, path, server=None, **kwargs):
"""Execute a request to the cluster
A server is selected from the server pool.
"""
while True:
next_server = server or self._get_server()
try:
response = self.server_pool[next_server].request(
method, path, username=self.username, password=self.password, schema=self.schema, **kwargs)
redirect_location = response.get_redirect_location()
if redirect_location and 300 <= response.status <= 308:
redirect_server = _server_url(redirect_location)
self._add_server(redirect_server)
return self._request(
method, path, server=redirect_server, **kwargs)
if not server and response.status in SRV_UNAVAILABLE_STATUSES:
with self._lock:
# drop server from active ones
self._drop_server(next_server, response.reason)
else:
return response
except (urllib3.exceptions.MaxRetryError,
urllib3.exceptions.ReadTimeoutError,
urllib3.exceptions.SSLError,
urllib3.exceptions.HTTPError,
urllib3.exceptions.ProxyError,) as ex:
ex_message = _ex_to_message(ex)
if server:
raise ConnectionError(
"Server not available, exception: %s" % ex_message
)
preserve_server = False
if isinstance(ex, urllib3.exceptions.ProtocolError):
preserve_server = any(
t in [type(arg) for arg in ex.args]
for t in PRESERVE_ACTIVE_SERVER_EXCEPTIONS
)
if (not preserve_server):
with self._lock:
# drop server from active ones
self._drop_server(next_server, ex_message)
except Exception as e:
raise ProgrammingError(_ex_to_message(e)) | [
"def",
"_request",
"(",
"self",
",",
"method",
",",
"path",
",",
"server",
"=",
"None",
",",
"*",
"*",
"kwargs",
")",
":",
"while",
"True",
":",
"next_server",
"=",
"server",
"or",
"self",
".",
"_get_server",
"(",
")",
"try",
":",
"response",
"=",
"self",
".",
"server_pool",
"[",
"next_server",
"]",
".",
"request",
"(",
"method",
",",
"path",
",",
"username",
"=",
"self",
".",
"username",
",",
"password",
"=",
"self",
".",
"password",
",",
"schema",
"=",
"self",
".",
"schema",
",",
"*",
"*",
"kwargs",
")",
"redirect_location",
"=",
"response",
".",
"get_redirect_location",
"(",
")",
"if",
"redirect_location",
"and",
"300",
"<=",
"response",
".",
"status",
"<=",
"308",
":",
"redirect_server",
"=",
"_server_url",
"(",
"redirect_location",
")",
"self",
".",
"_add_server",
"(",
"redirect_server",
")",
"return",
"self",
".",
"_request",
"(",
"method",
",",
"path",
",",
"server",
"=",
"redirect_server",
",",
"*",
"*",
"kwargs",
")",
"if",
"not",
"server",
"and",
"response",
".",
"status",
"in",
"SRV_UNAVAILABLE_STATUSES",
":",
"with",
"self",
".",
"_lock",
":",
"# drop server from active ones",
"self",
".",
"_drop_server",
"(",
"next_server",
",",
"response",
".",
"reason",
")",
"else",
":",
"return",
"response",
"except",
"(",
"urllib3",
".",
"exceptions",
".",
"MaxRetryError",
",",
"urllib3",
".",
"exceptions",
".",
"ReadTimeoutError",
",",
"urllib3",
".",
"exceptions",
".",
"SSLError",
",",
"urllib3",
".",
"exceptions",
".",
"HTTPError",
",",
"urllib3",
".",
"exceptions",
".",
"ProxyError",
",",
")",
"as",
"ex",
":",
"ex_message",
"=",
"_ex_to_message",
"(",
"ex",
")",
"if",
"server",
":",
"raise",
"ConnectionError",
"(",
"\"Server not available, exception: %s\"",
"%",
"ex_message",
")",
"preserve_server",
"=",
"False",
"if",
"isinstance",
"(",
"ex",
",",
"urllib3",
".",
"exceptions",
".",
"ProtocolError",
")",
":",
"preserve_server",
"=",
"any",
"(",
"t",
"in",
"[",
"type",
"(",
"arg",
")",
"for",
"arg",
"in",
"ex",
".",
"args",
"]",
"for",
"t",
"in",
"PRESERVE_ACTIVE_SERVER_EXCEPTIONS",
")",
"if",
"(",
"not",
"preserve_server",
")",
":",
"with",
"self",
".",
"_lock",
":",
"# drop server from active ones",
"self",
".",
"_drop_server",
"(",
"next_server",
",",
"ex_message",
")",
"except",
"Exception",
"as",
"e",
":",
"raise",
"ProgrammingError",
"(",
"_ex_to_message",
"(",
"e",
")",
")"
] | Execute a request to the cluster
A server is selected from the server pool. | [
"Execute",
"a",
"request",
"to",
"the",
"cluster"
] | 68e39c95f5bbe88b74bbfa26de4347fc644636a8 | https://github.com/crate/crate-python/blob/68e39c95f5bbe88b74bbfa26de4347fc644636a8/src/crate/client/http.py#L397-L440 | train |
crate/crate-python | src/crate/client/http.py | Client._json_request | def _json_request(self, method, path, data):
"""
Issue request against the crate HTTP API.
"""
response = self._request(method, path, data=data)
_raise_for_status(response)
if len(response.data) > 0:
return _json_from_response(response)
return response.data | python | def _json_request(self, method, path, data):
"""
Issue request against the crate HTTP API.
"""
response = self._request(method, path, data=data)
_raise_for_status(response)
if len(response.data) > 0:
return _json_from_response(response)
return response.data | [
"def",
"_json_request",
"(",
"self",
",",
"method",
",",
"path",
",",
"data",
")",
":",
"response",
"=",
"self",
".",
"_request",
"(",
"method",
",",
"path",
",",
"data",
"=",
"data",
")",
"_raise_for_status",
"(",
"response",
")",
"if",
"len",
"(",
"response",
".",
"data",
")",
">",
"0",
":",
"return",
"_json_from_response",
"(",
"response",
")",
"return",
"response",
".",
"data"
] | Issue request against the crate HTTP API. | [
"Issue",
"request",
"against",
"the",
"crate",
"HTTP",
"API",
"."
] | 68e39c95f5bbe88b74bbfa26de4347fc644636a8 | https://github.com/crate/crate-python/blob/68e39c95f5bbe88b74bbfa26de4347fc644636a8/src/crate/client/http.py#L442-L451 | train |
crate/crate-python | src/crate/client/http.py | Client._get_server | def _get_server(self):
"""
Get server to use for request.
Also process inactive server list, re-add them after given interval.
"""
with self._lock:
inactive_server_count = len(self._inactive_servers)
for i in range(inactive_server_count):
try:
ts, server, message = heapq.heappop(self._inactive_servers)
except IndexError:
pass
else:
if (ts + self.retry_interval) > time():
# Not yet, put it back
heapq.heappush(self._inactive_servers,
(ts, server, message))
else:
self._active_servers.append(server)
logger.warn("Restored server %s into active pool",
server)
# if none is old enough, use oldest
if not self._active_servers:
ts, server, message = heapq.heappop(self._inactive_servers)
self._active_servers.append(server)
logger.info("Restored server %s into active pool", server)
server = self._active_servers[0]
self._roundrobin()
return server | python | def _get_server(self):
"""
Get server to use for request.
Also process inactive server list, re-add them after given interval.
"""
with self._lock:
inactive_server_count = len(self._inactive_servers)
for i in range(inactive_server_count):
try:
ts, server, message = heapq.heappop(self._inactive_servers)
except IndexError:
pass
else:
if (ts + self.retry_interval) > time():
# Not yet, put it back
heapq.heappush(self._inactive_servers,
(ts, server, message))
else:
self._active_servers.append(server)
logger.warn("Restored server %s into active pool",
server)
# if none is old enough, use oldest
if not self._active_servers:
ts, server, message = heapq.heappop(self._inactive_servers)
self._active_servers.append(server)
logger.info("Restored server %s into active pool", server)
server = self._active_servers[0]
self._roundrobin()
return server | [
"def",
"_get_server",
"(",
"self",
")",
":",
"with",
"self",
".",
"_lock",
":",
"inactive_server_count",
"=",
"len",
"(",
"self",
".",
"_inactive_servers",
")",
"for",
"i",
"in",
"range",
"(",
"inactive_server_count",
")",
":",
"try",
":",
"ts",
",",
"server",
",",
"message",
"=",
"heapq",
".",
"heappop",
"(",
"self",
".",
"_inactive_servers",
")",
"except",
"IndexError",
":",
"pass",
"else",
":",
"if",
"(",
"ts",
"+",
"self",
".",
"retry_interval",
")",
">",
"time",
"(",
")",
":",
"# Not yet, put it back",
"heapq",
".",
"heappush",
"(",
"self",
".",
"_inactive_servers",
",",
"(",
"ts",
",",
"server",
",",
"message",
")",
")",
"else",
":",
"self",
".",
"_active_servers",
".",
"append",
"(",
"server",
")",
"logger",
".",
"warn",
"(",
"\"Restored server %s into active pool\"",
",",
"server",
")",
"# if none is old enough, use oldest",
"if",
"not",
"self",
".",
"_active_servers",
":",
"ts",
",",
"server",
",",
"message",
"=",
"heapq",
".",
"heappop",
"(",
"self",
".",
"_inactive_servers",
")",
"self",
".",
"_active_servers",
".",
"append",
"(",
"server",
")",
"logger",
".",
"info",
"(",
"\"Restored server %s into active pool\"",
",",
"server",
")",
"server",
"=",
"self",
".",
"_active_servers",
"[",
"0",
"]",
"self",
".",
"_roundrobin",
"(",
")",
"return",
"server"
] | Get server to use for request.
Also process inactive server list, re-add them after given interval. | [
"Get",
"server",
"to",
"use",
"for",
"request",
".",
"Also",
"process",
"inactive",
"server",
"list",
"re",
"-",
"add",
"them",
"after",
"given",
"interval",
"."
] | 68e39c95f5bbe88b74bbfa26de4347fc644636a8 | https://github.com/crate/crate-python/blob/68e39c95f5bbe88b74bbfa26de4347fc644636a8/src/crate/client/http.py#L453-L484 | train |
crate/crate-python | src/crate/client/http.py | Client._drop_server | def _drop_server(self, server, message):
"""
Drop server from active list and adds it to the inactive ones.
"""
try:
self._active_servers.remove(server)
except ValueError:
pass
else:
heapq.heappush(self._inactive_servers, (time(), server, message))
logger.warning("Removed server %s from active pool", server)
# if this is the last server raise exception, otherwise try next
if not self._active_servers:
raise ConnectionError(
("No more Servers available, "
"exception from last server: %s") % message) | python | def _drop_server(self, server, message):
"""
Drop server from active list and adds it to the inactive ones.
"""
try:
self._active_servers.remove(server)
except ValueError:
pass
else:
heapq.heappush(self._inactive_servers, (time(), server, message))
logger.warning("Removed server %s from active pool", server)
# if this is the last server raise exception, otherwise try next
if not self._active_servers:
raise ConnectionError(
("No more Servers available, "
"exception from last server: %s") % message) | [
"def",
"_drop_server",
"(",
"self",
",",
"server",
",",
"message",
")",
":",
"try",
":",
"self",
".",
"_active_servers",
".",
"remove",
"(",
"server",
")",
"except",
"ValueError",
":",
"pass",
"else",
":",
"heapq",
".",
"heappush",
"(",
"self",
".",
"_inactive_servers",
",",
"(",
"time",
"(",
")",
",",
"server",
",",
"message",
")",
")",
"logger",
".",
"warning",
"(",
"\"Removed server %s from active pool\"",
",",
"server",
")",
"# if this is the last server raise exception, otherwise try next",
"if",
"not",
"self",
".",
"_active_servers",
":",
"raise",
"ConnectionError",
"(",
"(",
"\"No more Servers available, \"",
"\"exception from last server: %s\"",
")",
"%",
"message",
")"
] | Drop server from active list and adds it to the inactive ones. | [
"Drop",
"server",
"from",
"active",
"list",
"and",
"adds",
"it",
"to",
"the",
"inactive",
"ones",
"."
] | 68e39c95f5bbe88b74bbfa26de4347fc644636a8 | https://github.com/crate/crate-python/blob/68e39c95f5bbe88b74bbfa26de4347fc644636a8/src/crate/client/http.py#L492-L508 | train |
crate/crate-python | src/crate/client/sqlalchemy/predicates/__init__.py | match | def match(column, term, match_type=None, options=None):
"""Generates match predicate for fulltext search
:param column: A reference to a column or an index, or a subcolumn, or a
dictionary of subcolumns with boost values.
:param term: The term to match against. This string is analyzed and the
resulting tokens are compared to the index.
:param match_type (optional): The match type. Determine how the term is
applied and the score calculated.
:param options (optional): The match options. Specify match type behaviour.
(Not possible without a specified match type.) Match options must be
supplied as a dictionary.
"""
return Match(column, term, match_type, options) | python | def match(column, term, match_type=None, options=None):
"""Generates match predicate for fulltext search
:param column: A reference to a column or an index, or a subcolumn, or a
dictionary of subcolumns with boost values.
:param term: The term to match against. This string is analyzed and the
resulting tokens are compared to the index.
:param match_type (optional): The match type. Determine how the term is
applied and the score calculated.
:param options (optional): The match options. Specify match type behaviour.
(Not possible without a specified match type.) Match options must be
supplied as a dictionary.
"""
return Match(column, term, match_type, options) | [
"def",
"match",
"(",
"column",
",",
"term",
",",
"match_type",
"=",
"None",
",",
"options",
"=",
"None",
")",
":",
"return",
"Match",
"(",
"column",
",",
"term",
",",
"match_type",
",",
"options",
")"
] | Generates match predicate for fulltext search
:param column: A reference to a column or an index, or a subcolumn, or a
dictionary of subcolumns with boost values.
:param term: The term to match against. This string is analyzed and the
resulting tokens are compared to the index.
:param match_type (optional): The match type. Determine how the term is
applied and the score calculated.
:param options (optional): The match options. Specify match type behaviour.
(Not possible without a specified match type.) Match options must be
supplied as a dictionary. | [
"Generates",
"match",
"predicate",
"for",
"fulltext",
"search"
] | 68e39c95f5bbe88b74bbfa26de4347fc644636a8 | https://github.com/crate/crate-python/blob/68e39c95f5bbe88b74bbfa26de4347fc644636a8/src/crate/client/sqlalchemy/predicates/__init__.py#L70-L86 | train |
crate/crate-python | src/crate/client/blob.py | BlobContainer.put | def put(self, f, digest=None):
"""
Upload a blob
:param f:
File object to be uploaded (required to support seek if digest is
not provided).
:param digest:
Optional SHA-1 hex digest of the file contents. Gets computed
before actual upload if not provided, which requires an extra file
read.
:return:
The hex digest of the uploaded blob if not provided in the call.
Otherwise a boolean indicating if the blob has been newly created.
"""
if digest:
actual_digest = digest
else:
actual_digest = self._compute_digest(f)
created = self.conn.client.blob_put(self.container_name,
actual_digest, f)
if digest:
return created
return actual_digest | python | def put(self, f, digest=None):
"""
Upload a blob
:param f:
File object to be uploaded (required to support seek if digest is
not provided).
:param digest:
Optional SHA-1 hex digest of the file contents. Gets computed
before actual upload if not provided, which requires an extra file
read.
:return:
The hex digest of the uploaded blob if not provided in the call.
Otherwise a boolean indicating if the blob has been newly created.
"""
if digest:
actual_digest = digest
else:
actual_digest = self._compute_digest(f)
created = self.conn.client.blob_put(self.container_name,
actual_digest, f)
if digest:
return created
return actual_digest | [
"def",
"put",
"(",
"self",
",",
"f",
",",
"digest",
"=",
"None",
")",
":",
"if",
"digest",
":",
"actual_digest",
"=",
"digest",
"else",
":",
"actual_digest",
"=",
"self",
".",
"_compute_digest",
"(",
"f",
")",
"created",
"=",
"self",
".",
"conn",
".",
"client",
".",
"blob_put",
"(",
"self",
".",
"container_name",
",",
"actual_digest",
",",
"f",
")",
"if",
"digest",
":",
"return",
"created",
"return",
"actual_digest"
] | Upload a blob
:param f:
File object to be uploaded (required to support seek if digest is
not provided).
:param digest:
Optional SHA-1 hex digest of the file contents. Gets computed
before actual upload if not provided, which requires an extra file
read.
:return:
The hex digest of the uploaded blob if not provided in the call.
Otherwise a boolean indicating if the blob has been newly created. | [
"Upload",
"a",
"blob"
] | 68e39c95f5bbe88b74bbfa26de4347fc644636a8 | https://github.com/crate/crate-python/blob/68e39c95f5bbe88b74bbfa26de4347fc644636a8/src/crate/client/blob.py#L46-L71 | train |
crate/crate-python | src/crate/client/blob.py | BlobContainer.get | def get(self, digest, chunk_size=1024 * 128):
"""
Return the contents of a blob
:param digest: the hex digest of the blob to return
:param chunk_size: the size of the chunks returned on each iteration
:return: generator returning chunks of data
"""
return self.conn.client.blob_get(self.container_name, digest,
chunk_size) | python | def get(self, digest, chunk_size=1024 * 128):
"""
Return the contents of a blob
:param digest: the hex digest of the blob to return
:param chunk_size: the size of the chunks returned on each iteration
:return: generator returning chunks of data
"""
return self.conn.client.blob_get(self.container_name, digest,
chunk_size) | [
"def",
"get",
"(",
"self",
",",
"digest",
",",
"chunk_size",
"=",
"1024",
"*",
"128",
")",
":",
"return",
"self",
".",
"conn",
".",
"client",
".",
"blob_get",
"(",
"self",
".",
"container_name",
",",
"digest",
",",
"chunk_size",
")"
] | Return the contents of a blob
:param digest: the hex digest of the blob to return
:param chunk_size: the size of the chunks returned on each iteration
:return: generator returning chunks of data | [
"Return",
"the",
"contents",
"of",
"a",
"blob"
] | 68e39c95f5bbe88b74bbfa26de4347fc644636a8 | https://github.com/crate/crate-python/blob/68e39c95f5bbe88b74bbfa26de4347fc644636a8/src/crate/client/blob.py#L73-L82 | train |
crate/crate-python | src/crate/client/blob.py | BlobContainer.delete | def delete(self, digest):
"""
Delete a blob
:param digest: the hex digest of the blob to be deleted
:return: True if blob existed
"""
return self.conn.client.blob_del(self.container_name, digest) | python | def delete(self, digest):
"""
Delete a blob
:param digest: the hex digest of the blob to be deleted
:return: True if blob existed
"""
return self.conn.client.blob_del(self.container_name, digest) | [
"def",
"delete",
"(",
"self",
",",
"digest",
")",
":",
"return",
"self",
".",
"conn",
".",
"client",
".",
"blob_del",
"(",
"self",
".",
"container_name",
",",
"digest",
")"
] | Delete a blob
:param digest: the hex digest of the blob to be deleted
:return: True if blob existed | [
"Delete",
"a",
"blob"
] | 68e39c95f5bbe88b74bbfa26de4347fc644636a8 | https://github.com/crate/crate-python/blob/68e39c95f5bbe88b74bbfa26de4347fc644636a8/src/crate/client/blob.py#L84-L91 | train |
crate/crate-python | src/crate/client/blob.py | BlobContainer.exists | def exists(self, digest):
"""
Check if a blob exists
:param digest: Hex digest of the blob
:return: Boolean indicating existence of the blob
"""
return self.conn.client.blob_exists(self.container_name, digest) | python | def exists(self, digest):
"""
Check if a blob exists
:param digest: Hex digest of the blob
:return: Boolean indicating existence of the blob
"""
return self.conn.client.blob_exists(self.container_name, digest) | [
"def",
"exists",
"(",
"self",
",",
"digest",
")",
":",
"return",
"self",
".",
"conn",
".",
"client",
".",
"blob_exists",
"(",
"self",
".",
"container_name",
",",
"digest",
")"
] | Check if a blob exists
:param digest: Hex digest of the blob
:return: Boolean indicating existence of the blob | [
"Check",
"if",
"a",
"blob",
"exists"
] | 68e39c95f5bbe88b74bbfa26de4347fc644636a8 | https://github.com/crate/crate-python/blob/68e39c95f5bbe88b74bbfa26de4347fc644636a8/src/crate/client/blob.py#L93-L100 | train |
crate/crate-python | src/crate/client/cursor.py | Cursor.next | def next(self):
"""
Return the next row of a query result set, respecting if cursor was
closed.
"""
if self.rows is None:
raise ProgrammingError(
"No result available. " +
"execute() or executemany() must be called first."
)
elif not self._closed:
return next(self.rows)
else:
raise ProgrammingError("Cursor closed") | python | def next(self):
"""
Return the next row of a query result set, respecting if cursor was
closed.
"""
if self.rows is None:
raise ProgrammingError(
"No result available. " +
"execute() or executemany() must be called first."
)
elif not self._closed:
return next(self.rows)
else:
raise ProgrammingError("Cursor closed") | [
"def",
"next",
"(",
"self",
")",
":",
"if",
"self",
".",
"rows",
"is",
"None",
":",
"raise",
"ProgrammingError",
"(",
"\"No result available. \"",
"+",
"\"execute() or executemany() must be called first.\"",
")",
"elif",
"not",
"self",
".",
"_closed",
":",
"return",
"next",
"(",
"self",
".",
"rows",
")",
"else",
":",
"raise",
"ProgrammingError",
"(",
"\"Cursor closed\"",
")"
] | Return the next row of a query result set, respecting if cursor was
closed. | [
"Return",
"the",
"next",
"row",
"of",
"a",
"query",
"result",
"set",
"respecting",
"if",
"cursor",
"was",
"closed",
"."
] | 68e39c95f5bbe88b74bbfa26de4347fc644636a8 | https://github.com/crate/crate-python/blob/68e39c95f5bbe88b74bbfa26de4347fc644636a8/src/crate/client/cursor.py#L175-L188 | train |
crate/crate-python | src/crate/client/cursor.py | Cursor.duration | def duration(self):
"""
This read-only attribute specifies the server-side duration of a query
in milliseconds.
"""
if self._closed or \
not self._result or \
"duration" not in self._result:
return -1
return self._result.get("duration", 0) | python | def duration(self):
"""
This read-only attribute specifies the server-side duration of a query
in milliseconds.
"""
if self._closed or \
not self._result or \
"duration" not in self._result:
return -1
return self._result.get("duration", 0) | [
"def",
"duration",
"(",
"self",
")",
":",
"if",
"self",
".",
"_closed",
"or",
"not",
"self",
".",
"_result",
"or",
"\"duration\"",
"not",
"in",
"self",
".",
"_result",
":",
"return",
"-",
"1",
"return",
"self",
".",
"_result",
".",
"get",
"(",
"\"duration\"",
",",
"0",
")"
] | This read-only attribute specifies the server-side duration of a query
in milliseconds. | [
"This",
"read",
"-",
"only",
"attribute",
"specifies",
"the",
"server",
"-",
"side",
"duration",
"of",
"a",
"query",
"in",
"milliseconds",
"."
] | 68e39c95f5bbe88b74bbfa26de4347fc644636a8 | https://github.com/crate/crate-python/blob/68e39c95f5bbe88b74bbfa26de4347fc644636a8/src/crate/client/cursor.py#L212-L221 | train |
crate/crate-python | src/crate/client/sqlalchemy/compiler.py | rewrite_update | def rewrite_update(clauseelement, multiparams, params):
""" change the params to enable partial updates
sqlalchemy by default only supports updates of complex types in the form of
"col = ?", ({"x": 1, "y": 2}
but crate supports
"col['x'] = ?, col['y'] = ?", (1, 2)
by using the `Craty` (`MutableDict`) type.
The update statement is only rewritten if an item of the MutableDict was
changed.
"""
newmultiparams = []
_multiparams = multiparams[0]
if len(_multiparams) == 0:
return clauseelement, multiparams, params
for _params in _multiparams:
newparams = {}
for key, val in _params.items():
if (
not isinstance(val, MutableDict) or
(not any(val._changed_keys) and not any(val._deleted_keys))
):
newparams[key] = val
continue
for subkey, subval in val.items():
if subkey in val._changed_keys:
newparams["{0}['{1}']".format(key, subkey)] = subval
for subkey in val._deleted_keys:
newparams["{0}['{1}']".format(key, subkey)] = None
newmultiparams.append(newparams)
_multiparams = (newmultiparams, )
clause = clauseelement.values(newmultiparams[0])
clause._crate_specific = True
return clause, _multiparams, params | python | def rewrite_update(clauseelement, multiparams, params):
""" change the params to enable partial updates
sqlalchemy by default only supports updates of complex types in the form of
"col = ?", ({"x": 1, "y": 2}
but crate supports
"col['x'] = ?, col['y'] = ?", (1, 2)
by using the `Craty` (`MutableDict`) type.
The update statement is only rewritten if an item of the MutableDict was
changed.
"""
newmultiparams = []
_multiparams = multiparams[0]
if len(_multiparams) == 0:
return clauseelement, multiparams, params
for _params in _multiparams:
newparams = {}
for key, val in _params.items():
if (
not isinstance(val, MutableDict) or
(not any(val._changed_keys) and not any(val._deleted_keys))
):
newparams[key] = val
continue
for subkey, subval in val.items():
if subkey in val._changed_keys:
newparams["{0}['{1}']".format(key, subkey)] = subval
for subkey in val._deleted_keys:
newparams["{0}['{1}']".format(key, subkey)] = None
newmultiparams.append(newparams)
_multiparams = (newmultiparams, )
clause = clauseelement.values(newmultiparams[0])
clause._crate_specific = True
return clause, _multiparams, params | [
"def",
"rewrite_update",
"(",
"clauseelement",
",",
"multiparams",
",",
"params",
")",
":",
"newmultiparams",
"=",
"[",
"]",
"_multiparams",
"=",
"multiparams",
"[",
"0",
"]",
"if",
"len",
"(",
"_multiparams",
")",
"==",
"0",
":",
"return",
"clauseelement",
",",
"multiparams",
",",
"params",
"for",
"_params",
"in",
"_multiparams",
":",
"newparams",
"=",
"{",
"}",
"for",
"key",
",",
"val",
"in",
"_params",
".",
"items",
"(",
")",
":",
"if",
"(",
"not",
"isinstance",
"(",
"val",
",",
"MutableDict",
")",
"or",
"(",
"not",
"any",
"(",
"val",
".",
"_changed_keys",
")",
"and",
"not",
"any",
"(",
"val",
".",
"_deleted_keys",
")",
")",
")",
":",
"newparams",
"[",
"key",
"]",
"=",
"val",
"continue",
"for",
"subkey",
",",
"subval",
"in",
"val",
".",
"items",
"(",
")",
":",
"if",
"subkey",
"in",
"val",
".",
"_changed_keys",
":",
"newparams",
"[",
"\"{0}['{1}']\"",
".",
"format",
"(",
"key",
",",
"subkey",
")",
"]",
"=",
"subval",
"for",
"subkey",
"in",
"val",
".",
"_deleted_keys",
":",
"newparams",
"[",
"\"{0}['{1}']\"",
".",
"format",
"(",
"key",
",",
"subkey",
")",
"]",
"=",
"None",
"newmultiparams",
".",
"append",
"(",
"newparams",
")",
"_multiparams",
"=",
"(",
"newmultiparams",
",",
")",
"clause",
"=",
"clauseelement",
".",
"values",
"(",
"newmultiparams",
"[",
"0",
"]",
")",
"clause",
".",
"_crate_specific",
"=",
"True",
"return",
"clause",
",",
"_multiparams",
",",
"params"
] | change the params to enable partial updates
sqlalchemy by default only supports updates of complex types in the form of
"col = ?", ({"x": 1, "y": 2}
but crate supports
"col['x'] = ?, col['y'] = ?", (1, 2)
by using the `Craty` (`MutableDict`) type.
The update statement is only rewritten if an item of the MutableDict was
changed. | [
"change",
"the",
"params",
"to",
"enable",
"partial",
"updates"
] | 68e39c95f5bbe88b74bbfa26de4347fc644636a8 | https://github.com/crate/crate-python/blob/68e39c95f5bbe88b74bbfa26de4347fc644636a8/src/crate/client/sqlalchemy/compiler.py#L32-L70 | train |
crate/crate-python | src/crate/client/sqlalchemy/compiler.py | CrateCompiler._get_crud_params | def _get_crud_params(compiler, stmt, **kw):
""" extract values from crud parameters
taken from SQLAlchemy's crud module (since 1.0.x) and
adapted for Crate dialect"""
compiler.postfetch = []
compiler.insert_prefetch = []
compiler.update_prefetch = []
compiler.returning = []
# no parameters in the statement, no parameters in the
# compiled params - return binds for all columns
if compiler.column_keys is None and stmt.parameters is None:
return [(c, crud._create_bind_param(compiler, c, None,
required=True))
for c in stmt.table.columns]
if stmt._has_multi_parameters:
stmt_parameters = stmt.parameters[0]
else:
stmt_parameters = stmt.parameters
# getters - these are normally just column.key,
# but in the case of mysql multi-table update, the rules for
# .key must conditionally take tablename into account
if SA_VERSION >= SA_1_1:
_column_as_key, _getattr_col_key, _col_bind_name = \
crud._key_getters_for_crud_column(compiler, stmt)
else:
_column_as_key, _getattr_col_key, _col_bind_name = \
crud._key_getters_for_crud_column(compiler)
# if we have statement parameters - set defaults in the
# compiled params
if compiler.column_keys is None:
parameters = {}
else:
parameters = dict((_column_as_key(key), crud.REQUIRED)
for key in compiler.column_keys
if not stmt_parameters or
key not in stmt_parameters)
# create a list of column assignment clauses as tuples
values = []
if stmt_parameters is not None:
crud._get_stmt_parameters_params(
compiler,
parameters, stmt_parameters, _column_as_key, values, kw)
check_columns = {}
crud._scan_cols(compiler, stmt, parameters,
_getattr_col_key, _column_as_key,
_col_bind_name, check_columns, values, kw)
if stmt._has_multi_parameters:
values = crud._extend_values_for_multiparams(compiler, stmt,
values, kw)
return values | python | def _get_crud_params(compiler, stmt, **kw):
""" extract values from crud parameters
taken from SQLAlchemy's crud module (since 1.0.x) and
adapted for Crate dialect"""
compiler.postfetch = []
compiler.insert_prefetch = []
compiler.update_prefetch = []
compiler.returning = []
# no parameters in the statement, no parameters in the
# compiled params - return binds for all columns
if compiler.column_keys is None and stmt.parameters is None:
return [(c, crud._create_bind_param(compiler, c, None,
required=True))
for c in stmt.table.columns]
if stmt._has_multi_parameters:
stmt_parameters = stmt.parameters[0]
else:
stmt_parameters = stmt.parameters
# getters - these are normally just column.key,
# but in the case of mysql multi-table update, the rules for
# .key must conditionally take tablename into account
if SA_VERSION >= SA_1_1:
_column_as_key, _getattr_col_key, _col_bind_name = \
crud._key_getters_for_crud_column(compiler, stmt)
else:
_column_as_key, _getattr_col_key, _col_bind_name = \
crud._key_getters_for_crud_column(compiler)
# if we have statement parameters - set defaults in the
# compiled params
if compiler.column_keys is None:
parameters = {}
else:
parameters = dict((_column_as_key(key), crud.REQUIRED)
for key in compiler.column_keys
if not stmt_parameters or
key not in stmt_parameters)
# create a list of column assignment clauses as tuples
values = []
if stmt_parameters is not None:
crud._get_stmt_parameters_params(
compiler,
parameters, stmt_parameters, _column_as_key, values, kw)
check_columns = {}
crud._scan_cols(compiler, stmt, parameters,
_getattr_col_key, _column_as_key,
_col_bind_name, check_columns, values, kw)
if stmt._has_multi_parameters:
values = crud._extend_values_for_multiparams(compiler, stmt,
values, kw)
return values | [
"def",
"_get_crud_params",
"(",
"compiler",
",",
"stmt",
",",
"*",
"*",
"kw",
")",
":",
"compiler",
".",
"postfetch",
"=",
"[",
"]",
"compiler",
".",
"insert_prefetch",
"=",
"[",
"]",
"compiler",
".",
"update_prefetch",
"=",
"[",
"]",
"compiler",
".",
"returning",
"=",
"[",
"]",
"# no parameters in the statement, no parameters in the",
"# compiled params - return binds for all columns",
"if",
"compiler",
".",
"column_keys",
"is",
"None",
"and",
"stmt",
".",
"parameters",
"is",
"None",
":",
"return",
"[",
"(",
"c",
",",
"crud",
".",
"_create_bind_param",
"(",
"compiler",
",",
"c",
",",
"None",
",",
"required",
"=",
"True",
")",
")",
"for",
"c",
"in",
"stmt",
".",
"table",
".",
"columns",
"]",
"if",
"stmt",
".",
"_has_multi_parameters",
":",
"stmt_parameters",
"=",
"stmt",
".",
"parameters",
"[",
"0",
"]",
"else",
":",
"stmt_parameters",
"=",
"stmt",
".",
"parameters",
"# getters - these are normally just column.key,",
"# but in the case of mysql multi-table update, the rules for",
"# .key must conditionally take tablename into account",
"if",
"SA_VERSION",
">=",
"SA_1_1",
":",
"_column_as_key",
",",
"_getattr_col_key",
",",
"_col_bind_name",
"=",
"crud",
".",
"_key_getters_for_crud_column",
"(",
"compiler",
",",
"stmt",
")",
"else",
":",
"_column_as_key",
",",
"_getattr_col_key",
",",
"_col_bind_name",
"=",
"crud",
".",
"_key_getters_for_crud_column",
"(",
"compiler",
")",
"# if we have statement parameters - set defaults in the",
"# compiled params",
"if",
"compiler",
".",
"column_keys",
"is",
"None",
":",
"parameters",
"=",
"{",
"}",
"else",
":",
"parameters",
"=",
"dict",
"(",
"(",
"_column_as_key",
"(",
"key",
")",
",",
"crud",
".",
"REQUIRED",
")",
"for",
"key",
"in",
"compiler",
".",
"column_keys",
"if",
"not",
"stmt_parameters",
"or",
"key",
"not",
"in",
"stmt_parameters",
")",
"# create a list of column assignment clauses as tuples",
"values",
"=",
"[",
"]",
"if",
"stmt_parameters",
"is",
"not",
"None",
":",
"crud",
".",
"_get_stmt_parameters_params",
"(",
"compiler",
",",
"parameters",
",",
"stmt_parameters",
",",
"_column_as_key",
",",
"values",
",",
"kw",
")",
"check_columns",
"=",
"{",
"}",
"crud",
".",
"_scan_cols",
"(",
"compiler",
",",
"stmt",
",",
"parameters",
",",
"_getattr_col_key",
",",
"_column_as_key",
",",
"_col_bind_name",
",",
"check_columns",
",",
"values",
",",
"kw",
")",
"if",
"stmt",
".",
"_has_multi_parameters",
":",
"values",
"=",
"crud",
".",
"_extend_values_for_multiparams",
"(",
"compiler",
",",
"stmt",
",",
"values",
",",
"kw",
")",
"return",
"values"
] | extract values from crud parameters
taken from SQLAlchemy's crud module (since 1.0.x) and
adapted for Crate dialect | [
"extract",
"values",
"from",
"crud",
"parameters"
] | 68e39c95f5bbe88b74bbfa26de4347fc644636a8 | https://github.com/crate/crate-python/blob/68e39c95f5bbe88b74bbfa26de4347fc644636a8/src/crate/client/sqlalchemy/compiler.py#L362-L423 | train |
kstateome/django-cas | cas/models.py | get_tgt_for | def get_tgt_for(user):
"""
Fetch a ticket granting ticket for a given user.
:param user: UserObj
:return: TGT or Exepction
"""
if not settings.CAS_PROXY_CALLBACK:
raise CasConfigException("No proxy callback set in settings")
try:
return Tgt.objects.get(username=user.username)
except ObjectDoesNotExist:
logger.warning('No ticket found for user {user}'.format(
user=user.username
))
raise CasTicketException("no ticket found for user " + user.username) | python | def get_tgt_for(user):
"""
Fetch a ticket granting ticket for a given user.
:param user: UserObj
:return: TGT or Exepction
"""
if not settings.CAS_PROXY_CALLBACK:
raise CasConfigException("No proxy callback set in settings")
try:
return Tgt.objects.get(username=user.username)
except ObjectDoesNotExist:
logger.warning('No ticket found for user {user}'.format(
user=user.username
))
raise CasTicketException("no ticket found for user " + user.username) | [
"def",
"get_tgt_for",
"(",
"user",
")",
":",
"if",
"not",
"settings",
".",
"CAS_PROXY_CALLBACK",
":",
"raise",
"CasConfigException",
"(",
"\"No proxy callback set in settings\"",
")",
"try",
":",
"return",
"Tgt",
".",
"objects",
".",
"get",
"(",
"username",
"=",
"user",
".",
"username",
")",
"except",
"ObjectDoesNotExist",
":",
"logger",
".",
"warning",
"(",
"'No ticket found for user {user}'",
".",
"format",
"(",
"user",
"=",
"user",
".",
"username",
")",
")",
"raise",
"CasTicketException",
"(",
"\"no ticket found for user \"",
"+",
"user",
".",
"username",
")"
] | Fetch a ticket granting ticket for a given user.
:param user: UserObj
:return: TGT or Exepction | [
"Fetch",
"a",
"ticket",
"granting",
"ticket",
"for",
"a",
"given",
"user",
"."
] | 8a871093966f001b4dadf7d097ac326169f3c066 | https://github.com/kstateome/django-cas/blob/8a871093966f001b4dadf7d097ac326169f3c066/cas/models.py#L77-L94 | train |
kstateome/django-cas | cas/models.py | Tgt.get_proxy_ticket_for | def get_proxy_ticket_for(self, service):
"""
Verifies CAS 2.0+ XML-based authentication ticket.
:param: service
Returns username on success and None on failure.
"""
if not settings.CAS_PROXY_CALLBACK:
raise CasConfigException("No proxy callback set in settings")
params = {'pgt': self.tgt, 'targetService': service}
url = (urljoin(settings.CAS_SERVER_URL, 'proxy') + '?' +
urlencode(params))
page = urlopen(url)
try:
response = page.read()
tree = ElementTree.fromstring(response)
if tree[0].tag.endswith('proxySuccess'):
return tree[0][0].text
else:
logger.warning('Failed to get proxy ticket')
raise CasTicketException('Failed to get proxy ticket: %s' % \
tree[0].text.strip())
finally:
page.close() | python | def get_proxy_ticket_for(self, service):
"""
Verifies CAS 2.0+ XML-based authentication ticket.
:param: service
Returns username on success and None on failure.
"""
if not settings.CAS_PROXY_CALLBACK:
raise CasConfigException("No proxy callback set in settings")
params = {'pgt': self.tgt, 'targetService': service}
url = (urljoin(settings.CAS_SERVER_URL, 'proxy') + '?' +
urlencode(params))
page = urlopen(url)
try:
response = page.read()
tree = ElementTree.fromstring(response)
if tree[0].tag.endswith('proxySuccess'):
return tree[0][0].text
else:
logger.warning('Failed to get proxy ticket')
raise CasTicketException('Failed to get proxy ticket: %s' % \
tree[0].text.strip())
finally:
page.close() | [
"def",
"get_proxy_ticket_for",
"(",
"self",
",",
"service",
")",
":",
"if",
"not",
"settings",
".",
"CAS_PROXY_CALLBACK",
":",
"raise",
"CasConfigException",
"(",
"\"No proxy callback set in settings\"",
")",
"params",
"=",
"{",
"'pgt'",
":",
"self",
".",
"tgt",
",",
"'targetService'",
":",
"service",
"}",
"url",
"=",
"(",
"urljoin",
"(",
"settings",
".",
"CAS_SERVER_URL",
",",
"'proxy'",
")",
"+",
"'?'",
"+",
"urlencode",
"(",
"params",
")",
")",
"page",
"=",
"urlopen",
"(",
"url",
")",
"try",
":",
"response",
"=",
"page",
".",
"read",
"(",
")",
"tree",
"=",
"ElementTree",
".",
"fromstring",
"(",
"response",
")",
"if",
"tree",
"[",
"0",
"]",
".",
"tag",
".",
"endswith",
"(",
"'proxySuccess'",
")",
":",
"return",
"tree",
"[",
"0",
"]",
"[",
"0",
"]",
".",
"text",
"else",
":",
"logger",
".",
"warning",
"(",
"'Failed to get proxy ticket'",
")",
"raise",
"CasTicketException",
"(",
"'Failed to get proxy ticket: %s'",
"%",
"tree",
"[",
"0",
"]",
".",
"text",
".",
"strip",
"(",
")",
")",
"finally",
":",
"page",
".",
"close",
"(",
")"
] | Verifies CAS 2.0+ XML-based authentication ticket.
:param: service
Returns username on success and None on failure. | [
"Verifies",
"CAS",
"2",
".",
"0",
"+",
"XML",
"-",
"based",
"authentication",
"ticket",
"."
] | 8a871093966f001b4dadf7d097ac326169f3c066 | https://github.com/kstateome/django-cas/blob/8a871093966f001b4dadf7d097ac326169f3c066/cas/models.py#L36-L65 | train |
kstateome/django-cas | cas/backends.py | _internal_verify_cas | def _internal_verify_cas(ticket, service, suffix):
"""Verifies CAS 2.0 and 3.0 XML-based authentication ticket.
Returns username on success and None on failure.
"""
params = {'ticket': ticket, 'service': service}
if settings.CAS_PROXY_CALLBACK:
params['pgtUrl'] = settings.CAS_PROXY_CALLBACK
url = (urljoin(settings.CAS_SERVER_URL, suffix) + '?' +
urlencode(params))
page = urlopen(url)
username = None
try:
response = page.read()
tree = ElementTree.fromstring(response)
document = minidom.parseString(response)
if tree[0].tag.endswith('authenticationSuccess'):
if settings.CAS_RESPONSE_CALLBACKS:
cas_response_callbacks(tree)
username = tree[0][0].text
pgt_el = document.getElementsByTagName('cas:proxyGrantingTicket')
if pgt_el:
pgt = pgt_el[0].firstChild.nodeValue
try:
pgtIou = _get_pgtiou(pgt)
tgt = Tgt.objects.get(username=username)
tgt.tgt = pgtIou.tgt
tgt.save()
pgtIou.delete()
except Tgt.DoesNotExist:
Tgt.objects.create(username=username, tgt=pgtIou.tgt)
logger.info('Creating TGT ticket for {user}'.format(
user=username
))
pgtIou.delete()
except Exception as e:
logger.warning('Failed to do proxy authentication. {message}'.format(
message=e
))
else:
failure = document.getElementsByTagName('cas:authenticationFailure')
if failure:
logger.warn('Authentication failed from CAS server: %s',
failure[0].firstChild.nodeValue)
except Exception as e:
logger.error('Failed to verify CAS authentication: {message}'.format(
message=e
))
finally:
page.close()
return username | python | def _internal_verify_cas(ticket, service, suffix):
"""Verifies CAS 2.0 and 3.0 XML-based authentication ticket.
Returns username on success and None on failure.
"""
params = {'ticket': ticket, 'service': service}
if settings.CAS_PROXY_CALLBACK:
params['pgtUrl'] = settings.CAS_PROXY_CALLBACK
url = (urljoin(settings.CAS_SERVER_URL, suffix) + '?' +
urlencode(params))
page = urlopen(url)
username = None
try:
response = page.read()
tree = ElementTree.fromstring(response)
document = minidom.parseString(response)
if tree[0].tag.endswith('authenticationSuccess'):
if settings.CAS_RESPONSE_CALLBACKS:
cas_response_callbacks(tree)
username = tree[0][0].text
pgt_el = document.getElementsByTagName('cas:proxyGrantingTicket')
if pgt_el:
pgt = pgt_el[0].firstChild.nodeValue
try:
pgtIou = _get_pgtiou(pgt)
tgt = Tgt.objects.get(username=username)
tgt.tgt = pgtIou.tgt
tgt.save()
pgtIou.delete()
except Tgt.DoesNotExist:
Tgt.objects.create(username=username, tgt=pgtIou.tgt)
logger.info('Creating TGT ticket for {user}'.format(
user=username
))
pgtIou.delete()
except Exception as e:
logger.warning('Failed to do proxy authentication. {message}'.format(
message=e
))
else:
failure = document.getElementsByTagName('cas:authenticationFailure')
if failure:
logger.warn('Authentication failed from CAS server: %s',
failure[0].firstChild.nodeValue)
except Exception as e:
logger.error('Failed to verify CAS authentication: {message}'.format(
message=e
))
finally:
page.close()
return username | [
"def",
"_internal_verify_cas",
"(",
"ticket",
",",
"service",
",",
"suffix",
")",
":",
"params",
"=",
"{",
"'ticket'",
":",
"ticket",
",",
"'service'",
":",
"service",
"}",
"if",
"settings",
".",
"CAS_PROXY_CALLBACK",
":",
"params",
"[",
"'pgtUrl'",
"]",
"=",
"settings",
".",
"CAS_PROXY_CALLBACK",
"url",
"=",
"(",
"urljoin",
"(",
"settings",
".",
"CAS_SERVER_URL",
",",
"suffix",
")",
"+",
"'?'",
"+",
"urlencode",
"(",
"params",
")",
")",
"page",
"=",
"urlopen",
"(",
"url",
")",
"username",
"=",
"None",
"try",
":",
"response",
"=",
"page",
".",
"read",
"(",
")",
"tree",
"=",
"ElementTree",
".",
"fromstring",
"(",
"response",
")",
"document",
"=",
"minidom",
".",
"parseString",
"(",
"response",
")",
"if",
"tree",
"[",
"0",
"]",
".",
"tag",
".",
"endswith",
"(",
"'authenticationSuccess'",
")",
":",
"if",
"settings",
".",
"CAS_RESPONSE_CALLBACKS",
":",
"cas_response_callbacks",
"(",
"tree",
")",
"username",
"=",
"tree",
"[",
"0",
"]",
"[",
"0",
"]",
".",
"text",
"pgt_el",
"=",
"document",
".",
"getElementsByTagName",
"(",
"'cas:proxyGrantingTicket'",
")",
"if",
"pgt_el",
":",
"pgt",
"=",
"pgt_el",
"[",
"0",
"]",
".",
"firstChild",
".",
"nodeValue",
"try",
":",
"pgtIou",
"=",
"_get_pgtiou",
"(",
"pgt",
")",
"tgt",
"=",
"Tgt",
".",
"objects",
".",
"get",
"(",
"username",
"=",
"username",
")",
"tgt",
".",
"tgt",
"=",
"pgtIou",
".",
"tgt",
"tgt",
".",
"save",
"(",
")",
"pgtIou",
".",
"delete",
"(",
")",
"except",
"Tgt",
".",
"DoesNotExist",
":",
"Tgt",
".",
"objects",
".",
"create",
"(",
"username",
"=",
"username",
",",
"tgt",
"=",
"pgtIou",
".",
"tgt",
")",
"logger",
".",
"info",
"(",
"'Creating TGT ticket for {user}'",
".",
"format",
"(",
"user",
"=",
"username",
")",
")",
"pgtIou",
".",
"delete",
"(",
")",
"except",
"Exception",
"as",
"e",
":",
"logger",
".",
"warning",
"(",
"'Failed to do proxy authentication. {message}'",
".",
"format",
"(",
"message",
"=",
"e",
")",
")",
"else",
":",
"failure",
"=",
"document",
".",
"getElementsByTagName",
"(",
"'cas:authenticationFailure'",
")",
"if",
"failure",
":",
"logger",
".",
"warn",
"(",
"'Authentication failed from CAS server: %s'",
",",
"failure",
"[",
"0",
"]",
".",
"firstChild",
".",
"nodeValue",
")",
"except",
"Exception",
"as",
"e",
":",
"logger",
".",
"error",
"(",
"'Failed to verify CAS authentication: {message}'",
".",
"format",
"(",
"message",
"=",
"e",
")",
")",
"finally",
":",
"page",
".",
"close",
"(",
")",
"return",
"username"
] | Verifies CAS 2.0 and 3.0 XML-based authentication ticket.
Returns username on success and None on failure. | [
"Verifies",
"CAS",
"2",
".",
"0",
"and",
"3",
".",
"0",
"XML",
"-",
"based",
"authentication",
"ticket",
"."
] | 8a871093966f001b4dadf7d097ac326169f3c066 | https://github.com/kstateome/django-cas/blob/8a871093966f001b4dadf7d097ac326169f3c066/cas/backends.py#L75-L138 | train |
kstateome/django-cas | cas/backends.py | verify_proxy_ticket | def verify_proxy_ticket(ticket, service):
"""
Verifies CAS 2.0+ XML-based proxy ticket.
:param: ticket
:param: service
Returns username on success and None on failure.
"""
params = {'ticket': ticket, 'service': service}
url = (urljoin(settings.CAS_SERVER_URL, 'proxyValidate') + '?' +
urlencode(params))
page = urlopen(url)
try:
response = page.read()
tree = ElementTree.fromstring(response)
if tree[0].tag.endswith('authenticationSuccess'):
username = tree[0][0].text
proxies = []
if len(tree[0]) > 1:
for element in tree[0][1]:
proxies.append(element.text)
return {"username": username, "proxies": proxies}
else:
return None
finally:
page.close() | python | def verify_proxy_ticket(ticket, service):
"""
Verifies CAS 2.0+ XML-based proxy ticket.
:param: ticket
:param: service
Returns username on success and None on failure.
"""
params = {'ticket': ticket, 'service': service}
url = (urljoin(settings.CAS_SERVER_URL, 'proxyValidate') + '?' +
urlencode(params))
page = urlopen(url)
try:
response = page.read()
tree = ElementTree.fromstring(response)
if tree[0].tag.endswith('authenticationSuccess'):
username = tree[0][0].text
proxies = []
if len(tree[0]) > 1:
for element in tree[0][1]:
proxies.append(element.text)
return {"username": username, "proxies": proxies}
else:
return None
finally:
page.close() | [
"def",
"verify_proxy_ticket",
"(",
"ticket",
",",
"service",
")",
":",
"params",
"=",
"{",
"'ticket'",
":",
"ticket",
",",
"'service'",
":",
"service",
"}",
"url",
"=",
"(",
"urljoin",
"(",
"settings",
".",
"CAS_SERVER_URL",
",",
"'proxyValidate'",
")",
"+",
"'?'",
"+",
"urlencode",
"(",
"params",
")",
")",
"page",
"=",
"urlopen",
"(",
"url",
")",
"try",
":",
"response",
"=",
"page",
".",
"read",
"(",
")",
"tree",
"=",
"ElementTree",
".",
"fromstring",
"(",
"response",
")",
"if",
"tree",
"[",
"0",
"]",
".",
"tag",
".",
"endswith",
"(",
"'authenticationSuccess'",
")",
":",
"username",
"=",
"tree",
"[",
"0",
"]",
"[",
"0",
"]",
".",
"text",
"proxies",
"=",
"[",
"]",
"if",
"len",
"(",
"tree",
"[",
"0",
"]",
")",
">",
"1",
":",
"for",
"element",
"in",
"tree",
"[",
"0",
"]",
"[",
"1",
"]",
":",
"proxies",
".",
"append",
"(",
"element",
".",
"text",
")",
"return",
"{",
"\"username\"",
":",
"username",
",",
"\"proxies\"",
":",
"proxies",
"}",
"else",
":",
"return",
"None",
"finally",
":",
"page",
".",
"close",
"(",
")"
] | Verifies CAS 2.0+ XML-based proxy ticket.
:param: ticket
:param: service
Returns username on success and None on failure. | [
"Verifies",
"CAS",
"2",
".",
"0",
"+",
"XML",
"-",
"based",
"proxy",
"ticket",
"."
] | 8a871093966f001b4dadf7d097ac326169f3c066 | https://github.com/kstateome/django-cas/blob/8a871093966f001b4dadf7d097ac326169f3c066/cas/backends.py#L141-L171 | train |
kstateome/django-cas | cas/backends.py | _get_pgtiou | def _get_pgtiou(pgt):
"""
Returns a PgtIOU object given a pgt.
The PgtIOU (tgt) is set by the CAS server in a different request
that has completed before this call, however, it may not be found in
the database by this calling thread, hence the attempt to get the
ticket is retried for up to 5 seconds. This should be handled some
better way.
Users can opt out of this waiting period by setting CAS_PGT_FETCH_WAIT = False
:param: pgt
"""
pgtIou = None
retries_left = 5
if not settings.CAS_PGT_FETCH_WAIT:
retries_left = 1
while not pgtIou and retries_left:
try:
return PgtIOU.objects.get(tgt=pgt)
except PgtIOU.DoesNotExist:
if settings.CAS_PGT_FETCH_WAIT:
time.sleep(1)
retries_left -= 1
logger.info('Did not fetch ticket, trying again. {tries} tries left.'.format(
tries=retries_left
))
raise CasTicketException("Could not find pgtIou for pgt %s" % pgt) | python | def _get_pgtiou(pgt):
"""
Returns a PgtIOU object given a pgt.
The PgtIOU (tgt) is set by the CAS server in a different request
that has completed before this call, however, it may not be found in
the database by this calling thread, hence the attempt to get the
ticket is retried for up to 5 seconds. This should be handled some
better way.
Users can opt out of this waiting period by setting CAS_PGT_FETCH_WAIT = False
:param: pgt
"""
pgtIou = None
retries_left = 5
if not settings.CAS_PGT_FETCH_WAIT:
retries_left = 1
while not pgtIou and retries_left:
try:
return PgtIOU.objects.get(tgt=pgt)
except PgtIOU.DoesNotExist:
if settings.CAS_PGT_FETCH_WAIT:
time.sleep(1)
retries_left -= 1
logger.info('Did not fetch ticket, trying again. {tries} tries left.'.format(
tries=retries_left
))
raise CasTicketException("Could not find pgtIou for pgt %s" % pgt) | [
"def",
"_get_pgtiou",
"(",
"pgt",
")",
":",
"pgtIou",
"=",
"None",
"retries_left",
"=",
"5",
"if",
"not",
"settings",
".",
"CAS_PGT_FETCH_WAIT",
":",
"retries_left",
"=",
"1",
"while",
"not",
"pgtIou",
"and",
"retries_left",
":",
"try",
":",
"return",
"PgtIOU",
".",
"objects",
".",
"get",
"(",
"tgt",
"=",
"pgt",
")",
"except",
"PgtIOU",
".",
"DoesNotExist",
":",
"if",
"settings",
".",
"CAS_PGT_FETCH_WAIT",
":",
"time",
".",
"sleep",
"(",
"1",
")",
"retries_left",
"-=",
"1",
"logger",
".",
"info",
"(",
"'Did not fetch ticket, trying again. {tries} tries left.'",
".",
"format",
"(",
"tries",
"=",
"retries_left",
")",
")",
"raise",
"CasTicketException",
"(",
"\"Could not find pgtIou for pgt %s\"",
"%",
"pgt",
")"
] | Returns a PgtIOU object given a pgt.
The PgtIOU (tgt) is set by the CAS server in a different request
that has completed before this call, however, it may not be found in
the database by this calling thread, hence the attempt to get the
ticket is retried for up to 5 seconds. This should be handled some
better way.
Users can opt out of this waiting period by setting CAS_PGT_FETCH_WAIT = False
:param: pgt | [
"Returns",
"a",
"PgtIOU",
"object",
"given",
"a",
"pgt",
"."
] | 8a871093966f001b4dadf7d097ac326169f3c066 | https://github.com/kstateome/django-cas/blob/8a871093966f001b4dadf7d097ac326169f3c066/cas/backends.py#L181-L213 | train |
kstateome/django-cas | cas/decorators.py | gateway | def gateway():
"""
Authenticates single sign on session if ticket is available,
but doesn't redirect to sign in url otherwise.
"""
if settings.CAS_GATEWAY == False:
raise ImproperlyConfigured('CAS_GATEWAY must be set to True')
def wrap(func):
def wrapped_f(*args):
from cas.views import login
request = args[0]
try:
# use callable for pre-django 2.0
is_authenticated = request.user.is_authenticated()
except TypeError:
is_authenticated = request.user.is_authenticated
if is_authenticated:
# Is Authed, fine
pass
else:
path_with_params = request.path + '?' + urlencode(request.GET.copy())
if request.GET.get('ticket'):
# Not Authed, but have a ticket!
# Try to authenticate
response = login(request, path_with_params, False, True)
if isinstance(response, HttpResponseRedirect):
# For certain instances where a forbidden occurs, we need to pass instead of return a response.
return response
else:
#Not Authed, but no ticket
gatewayed = request.GET.get('gatewayed')
if gatewayed == 'true':
pass
else:
# Not Authed, try to authenticate
response = login(request, path_with_params, False, True)
if isinstance(response, HttpResponseRedirect):
return response
return func(*args)
return wrapped_f
return wrap | python | def gateway():
"""
Authenticates single sign on session if ticket is available,
but doesn't redirect to sign in url otherwise.
"""
if settings.CAS_GATEWAY == False:
raise ImproperlyConfigured('CAS_GATEWAY must be set to True')
def wrap(func):
def wrapped_f(*args):
from cas.views import login
request = args[0]
try:
# use callable for pre-django 2.0
is_authenticated = request.user.is_authenticated()
except TypeError:
is_authenticated = request.user.is_authenticated
if is_authenticated:
# Is Authed, fine
pass
else:
path_with_params = request.path + '?' + urlencode(request.GET.copy())
if request.GET.get('ticket'):
# Not Authed, but have a ticket!
# Try to authenticate
response = login(request, path_with_params, False, True)
if isinstance(response, HttpResponseRedirect):
# For certain instances where a forbidden occurs, we need to pass instead of return a response.
return response
else:
#Not Authed, but no ticket
gatewayed = request.GET.get('gatewayed')
if gatewayed == 'true':
pass
else:
# Not Authed, try to authenticate
response = login(request, path_with_params, False, True)
if isinstance(response, HttpResponseRedirect):
return response
return func(*args)
return wrapped_f
return wrap | [
"def",
"gateway",
"(",
")",
":",
"if",
"settings",
".",
"CAS_GATEWAY",
"==",
"False",
":",
"raise",
"ImproperlyConfigured",
"(",
"'CAS_GATEWAY must be set to True'",
")",
"def",
"wrap",
"(",
"func",
")",
":",
"def",
"wrapped_f",
"(",
"*",
"args",
")",
":",
"from",
"cas",
".",
"views",
"import",
"login",
"request",
"=",
"args",
"[",
"0",
"]",
"try",
":",
"# use callable for pre-django 2.0",
"is_authenticated",
"=",
"request",
".",
"user",
".",
"is_authenticated",
"(",
")",
"except",
"TypeError",
":",
"is_authenticated",
"=",
"request",
".",
"user",
".",
"is_authenticated",
"if",
"is_authenticated",
":",
"# Is Authed, fine",
"pass",
"else",
":",
"path_with_params",
"=",
"request",
".",
"path",
"+",
"'?'",
"+",
"urlencode",
"(",
"request",
".",
"GET",
".",
"copy",
"(",
")",
")",
"if",
"request",
".",
"GET",
".",
"get",
"(",
"'ticket'",
")",
":",
"# Not Authed, but have a ticket!",
"# Try to authenticate",
"response",
"=",
"login",
"(",
"request",
",",
"path_with_params",
",",
"False",
",",
"True",
")",
"if",
"isinstance",
"(",
"response",
",",
"HttpResponseRedirect",
")",
":",
"# For certain instances where a forbidden occurs, we need to pass instead of return a response.",
"return",
"response",
"else",
":",
"#Not Authed, but no ticket",
"gatewayed",
"=",
"request",
".",
"GET",
".",
"get",
"(",
"'gatewayed'",
")",
"if",
"gatewayed",
"==",
"'true'",
":",
"pass",
"else",
":",
"# Not Authed, try to authenticate",
"response",
"=",
"login",
"(",
"request",
",",
"path_with_params",
",",
"False",
",",
"True",
")",
"if",
"isinstance",
"(",
"response",
",",
"HttpResponseRedirect",
")",
":",
"return",
"response",
"return",
"func",
"(",
"*",
"args",
")",
"return",
"wrapped_f",
"return",
"wrap"
] | Authenticates single sign on session if ticket is available,
but doesn't redirect to sign in url otherwise. | [
"Authenticates",
"single",
"sign",
"on",
"session",
"if",
"ticket",
"is",
"available",
"but",
"doesn",
"t",
"redirect",
"to",
"sign",
"in",
"url",
"otherwise",
"."
] | 8a871093966f001b4dadf7d097ac326169f3c066 | https://github.com/kstateome/django-cas/blob/8a871093966f001b4dadf7d097ac326169f3c066/cas/decorators.py#L60-L106 | train |
kstateome/django-cas | cas/views.py | _service_url | def _service_url(request, redirect_to=None, gateway=False):
"""
Generates application service URL for CAS
:param: request Request Object
:param: redirect_to URL to redriect to
:param: gateway Should this be a gatewayed pass through
"""
if settings.CAS_FORCE_SSL_SERVICE_URL:
protocol = 'https://'
else:
protocol = ('http://', 'https://')[request.is_secure()]
host = request.get_host()
service = protocol + host + request.path
if redirect_to:
if '?' in service:
service += '&'
else:
service += '?'
if gateway:
""" If gateway, capture params and reencode them before returning a url """
gateway_params = [(REDIRECT_FIELD_NAME, redirect_to), ('gatewayed', 'true')]
query_dict = request.GET.copy()
try:
del query_dict['ticket']
except:
pass
query_list = query_dict.items()
# remove duplicate params
for item in query_list:
for index, item2 in enumerate(gateway_params):
if item[0] == item2[0]:
gateway_params.pop(index)
extra_params = gateway_params + query_list
#Sort params by key name so they are always in the same order.
sorted_params = sorted(extra_params, key=itemgetter(0))
service += urlencode(sorted_params)
else:
service += urlencode({REDIRECT_FIELD_NAME: redirect_to})
return service | python | def _service_url(request, redirect_to=None, gateway=False):
"""
Generates application service URL for CAS
:param: request Request Object
:param: redirect_to URL to redriect to
:param: gateway Should this be a gatewayed pass through
"""
if settings.CAS_FORCE_SSL_SERVICE_URL:
protocol = 'https://'
else:
protocol = ('http://', 'https://')[request.is_secure()]
host = request.get_host()
service = protocol + host + request.path
if redirect_to:
if '?' in service:
service += '&'
else:
service += '?'
if gateway:
""" If gateway, capture params and reencode them before returning a url """
gateway_params = [(REDIRECT_FIELD_NAME, redirect_to), ('gatewayed', 'true')]
query_dict = request.GET.copy()
try:
del query_dict['ticket']
except:
pass
query_list = query_dict.items()
# remove duplicate params
for item in query_list:
for index, item2 in enumerate(gateway_params):
if item[0] == item2[0]:
gateway_params.pop(index)
extra_params = gateway_params + query_list
#Sort params by key name so they are always in the same order.
sorted_params = sorted(extra_params, key=itemgetter(0))
service += urlencode(sorted_params)
else:
service += urlencode({REDIRECT_FIELD_NAME: redirect_to})
return service | [
"def",
"_service_url",
"(",
"request",
",",
"redirect_to",
"=",
"None",
",",
"gateway",
"=",
"False",
")",
":",
"if",
"settings",
".",
"CAS_FORCE_SSL_SERVICE_URL",
":",
"protocol",
"=",
"'https://'",
"else",
":",
"protocol",
"=",
"(",
"'http://'",
",",
"'https://'",
")",
"[",
"request",
".",
"is_secure",
"(",
")",
"]",
"host",
"=",
"request",
".",
"get_host",
"(",
")",
"service",
"=",
"protocol",
"+",
"host",
"+",
"request",
".",
"path",
"if",
"redirect_to",
":",
"if",
"'?'",
"in",
"service",
":",
"service",
"+=",
"'&'",
"else",
":",
"service",
"+=",
"'?'",
"if",
"gateway",
":",
"\"\"\" If gateway, capture params and reencode them before returning a url \"\"\"",
"gateway_params",
"=",
"[",
"(",
"REDIRECT_FIELD_NAME",
",",
"redirect_to",
")",
",",
"(",
"'gatewayed'",
",",
"'true'",
")",
"]",
"query_dict",
"=",
"request",
".",
"GET",
".",
"copy",
"(",
")",
"try",
":",
"del",
"query_dict",
"[",
"'ticket'",
"]",
"except",
":",
"pass",
"query_list",
"=",
"query_dict",
".",
"items",
"(",
")",
"# remove duplicate params",
"for",
"item",
"in",
"query_list",
":",
"for",
"index",
",",
"item2",
"in",
"enumerate",
"(",
"gateway_params",
")",
":",
"if",
"item",
"[",
"0",
"]",
"==",
"item2",
"[",
"0",
"]",
":",
"gateway_params",
".",
"pop",
"(",
"index",
")",
"extra_params",
"=",
"gateway_params",
"+",
"query_list",
"#Sort params by key name so they are always in the same order.",
"sorted_params",
"=",
"sorted",
"(",
"extra_params",
",",
"key",
"=",
"itemgetter",
"(",
"0",
")",
")",
"service",
"+=",
"urlencode",
"(",
"sorted_params",
")",
"else",
":",
"service",
"+=",
"urlencode",
"(",
"{",
"REDIRECT_FIELD_NAME",
":",
"redirect_to",
"}",
")",
"return",
"service"
] | Generates application service URL for CAS
:param: request Request Object
:param: redirect_to URL to redriect to
:param: gateway Should this be a gatewayed pass through | [
"Generates",
"application",
"service",
"URL",
"for",
"CAS"
] | 8a871093966f001b4dadf7d097ac326169f3c066 | https://github.com/kstateome/django-cas/blob/8a871093966f001b4dadf7d097ac326169f3c066/cas/views.py#L32-L79 | train |
kstateome/django-cas | cas/views.py | proxy_callback | def proxy_callback(request):
"""Handles CAS 2.0+ XML-based proxy callback call.
Stores the proxy granting ticket in the database for
future use.
NB: Use created and set it in python in case database
has issues with setting up the default timestamp value
"""
pgtIou = request.GET.get('pgtIou')
tgt = request.GET.get('pgtId')
if not (pgtIou and tgt):
logger.info('No pgtIou or tgt found in request.GET')
return HttpResponse('No pgtIOO', content_type="text/plain")
try:
PgtIOU.objects.create(tgt=tgt, pgtIou=pgtIou, created=datetime.datetime.now())
request.session['pgt-TICKET'] = pgtIou
return HttpResponse('PGT ticket is: {ticket}'.format(ticket=pgtIou), content_type="text/plain")
except Exception as e:
logger.warning('PGT storage failed. {message}'.format(
message=e
))
return HttpResponse('PGT storage failed for {request}'.format(request=str(request.GET)),
content_type="text/plain") | python | def proxy_callback(request):
"""Handles CAS 2.0+ XML-based proxy callback call.
Stores the proxy granting ticket in the database for
future use.
NB: Use created and set it in python in case database
has issues with setting up the default timestamp value
"""
pgtIou = request.GET.get('pgtIou')
tgt = request.GET.get('pgtId')
if not (pgtIou and tgt):
logger.info('No pgtIou or tgt found in request.GET')
return HttpResponse('No pgtIOO', content_type="text/plain")
try:
PgtIOU.objects.create(tgt=tgt, pgtIou=pgtIou, created=datetime.datetime.now())
request.session['pgt-TICKET'] = pgtIou
return HttpResponse('PGT ticket is: {ticket}'.format(ticket=pgtIou), content_type="text/plain")
except Exception as e:
logger.warning('PGT storage failed. {message}'.format(
message=e
))
return HttpResponse('PGT storage failed for {request}'.format(request=str(request.GET)),
content_type="text/plain") | [
"def",
"proxy_callback",
"(",
"request",
")",
":",
"pgtIou",
"=",
"request",
".",
"GET",
".",
"get",
"(",
"'pgtIou'",
")",
"tgt",
"=",
"request",
".",
"GET",
".",
"get",
"(",
"'pgtId'",
")",
"if",
"not",
"(",
"pgtIou",
"and",
"tgt",
")",
":",
"logger",
".",
"info",
"(",
"'No pgtIou or tgt found in request.GET'",
")",
"return",
"HttpResponse",
"(",
"'No pgtIOO'",
",",
"content_type",
"=",
"\"text/plain\"",
")",
"try",
":",
"PgtIOU",
".",
"objects",
".",
"create",
"(",
"tgt",
"=",
"tgt",
",",
"pgtIou",
"=",
"pgtIou",
",",
"created",
"=",
"datetime",
".",
"datetime",
".",
"now",
"(",
")",
")",
"request",
".",
"session",
"[",
"'pgt-TICKET'",
"]",
"=",
"pgtIou",
"return",
"HttpResponse",
"(",
"'PGT ticket is: {ticket}'",
".",
"format",
"(",
"ticket",
"=",
"pgtIou",
")",
",",
"content_type",
"=",
"\"text/plain\"",
")",
"except",
"Exception",
"as",
"e",
":",
"logger",
".",
"warning",
"(",
"'PGT storage failed. {message}'",
".",
"format",
"(",
"message",
"=",
"e",
")",
")",
"return",
"HttpResponse",
"(",
"'PGT storage failed for {request}'",
".",
"format",
"(",
"request",
"=",
"str",
"(",
"request",
".",
"GET",
")",
")",
",",
"content_type",
"=",
"\"text/plain\"",
")"
] | Handles CAS 2.0+ XML-based proxy callback call.
Stores the proxy granting ticket in the database for
future use.
NB: Use created and set it in python in case database
has issues with setting up the default timestamp value | [
"Handles",
"CAS",
"2",
".",
"0",
"+",
"XML",
"-",
"based",
"proxy",
"callback",
"call",
".",
"Stores",
"the",
"proxy",
"granting",
"ticket",
"in",
"the",
"database",
"for",
"future",
"use",
"."
] | 8a871093966f001b4dadf7d097ac326169f3c066 | https://github.com/kstateome/django-cas/blob/8a871093966f001b4dadf7d097ac326169f3c066/cas/views.py#L245-L270 | train |
eventbrite/eventbrite-sdk-python | eventbrite/decorators.py | objectify | def objectify(func):
""" Converts the returned value from a models.Payload to
a models.EventbriteObject. Used by the access methods
of the client.Eventbrite object
"""
@functools.wraps(func)
def wrapper(*args, **kwargs):
try:
payload = func(*args, **kwargs)
except requests.exceptions.ConnectionError as e:
raise InternetConnectionError(e)
return EventbriteObject.create(payload)
return wrapper | python | def objectify(func):
""" Converts the returned value from a models.Payload to
a models.EventbriteObject. Used by the access methods
of the client.Eventbrite object
"""
@functools.wraps(func)
def wrapper(*args, **kwargs):
try:
payload = func(*args, **kwargs)
except requests.exceptions.ConnectionError as e:
raise InternetConnectionError(e)
return EventbriteObject.create(payload)
return wrapper | [
"def",
"objectify",
"(",
"func",
")",
":",
"@",
"functools",
".",
"wraps",
"(",
"func",
")",
"def",
"wrapper",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"try",
":",
"payload",
"=",
"func",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
"except",
"requests",
".",
"exceptions",
".",
"ConnectionError",
"as",
"e",
":",
"raise",
"InternetConnectionError",
"(",
"e",
")",
"return",
"EventbriteObject",
".",
"create",
"(",
"payload",
")",
"return",
"wrapper"
] | Converts the returned value from a models.Payload to
a models.EventbriteObject. Used by the access methods
of the client.Eventbrite object | [
"Converts",
"the",
"returned",
"value",
"from",
"a",
"models",
".",
"Payload",
"to",
"a",
"models",
".",
"EventbriteObject",
".",
"Used",
"by",
"the",
"access",
"methods",
"of",
"the",
"client",
".",
"Eventbrite",
"object"
] | f2e5dc5aa1aa3e45766de13f16fd65722163d91a | https://github.com/eventbrite/eventbrite-sdk-python/blob/f2e5dc5aa1aa3e45766de13f16fd65722163d91a/eventbrite/decorators.py#L9-L22 | train |
eventbrite/eventbrite-sdk-python | eventbrite/client.py | Eventbrite.get_user | def get_user(self, user_id=None):
"""
Returns a user for the specified user as user.
GET users/:id/
:param int user_id: (optional) The id assigned to a user
"""
if user_id:
return self.get('/users/{0}/'.format(user_id))
return self.get('/users/me/') | python | def get_user(self, user_id=None):
"""
Returns a user for the specified user as user.
GET users/:id/
:param int user_id: (optional) The id assigned to a user
"""
if user_id:
return self.get('/users/{0}/'.format(user_id))
return self.get('/users/me/') | [
"def",
"get_user",
"(",
"self",
",",
"user_id",
"=",
"None",
")",
":",
"if",
"user_id",
":",
"return",
"self",
".",
"get",
"(",
"'/users/{0}/'",
".",
"format",
"(",
"user_id",
")",
")",
"return",
"self",
".",
"get",
"(",
"'/users/me/'",
")"
] | Returns a user for the specified user as user.
GET users/:id/
:param int user_id: (optional) The id assigned to a user | [
"Returns",
"a",
"user",
"for",
"the",
"specified",
"user",
"as",
"user",
"."
] | f2e5dc5aa1aa3e45766de13f16fd65722163d91a | https://github.com/eventbrite/eventbrite-sdk-python/blob/f2e5dc5aa1aa3e45766de13f16fd65722163d91a/eventbrite/client.py#L94-L105 | train |
eventbrite/eventbrite-sdk-python | eventbrite/client.py | Eventbrite.get_event_attendees | def get_event_attendees(self, event_id, status=None, changed_since=None):
"""
Returns a paginated response with a key of attendees, containing a
list of attendee.
GET /events/:id/attendees/
"""
data = {}
if status: # TODO - check the types of valid status
data['status'] = status
if changed_since:
data['changed_since'] = changed_since
return self.get("/events/{0}/attendees/".format(event_id), data=data) | python | def get_event_attendees(self, event_id, status=None, changed_since=None):
"""
Returns a paginated response with a key of attendees, containing a
list of attendee.
GET /events/:id/attendees/
"""
data = {}
if status: # TODO - check the types of valid status
data['status'] = status
if changed_since:
data['changed_since'] = changed_since
return self.get("/events/{0}/attendees/".format(event_id), data=data) | [
"def",
"get_event_attendees",
"(",
"self",
",",
"event_id",
",",
"status",
"=",
"None",
",",
"changed_since",
"=",
"None",
")",
":",
"data",
"=",
"{",
"}",
"if",
"status",
":",
"# TODO - check the types of valid status",
"data",
"[",
"'status'",
"]",
"=",
"status",
"if",
"changed_since",
":",
"data",
"[",
"'changed_since'",
"]",
"=",
"changed_since",
"return",
"self",
".",
"get",
"(",
"\"/events/{0}/attendees/\"",
".",
"format",
"(",
"event_id",
")",
",",
"data",
"=",
"data",
")"
] | Returns a paginated response with a key of attendees, containing a
list of attendee.
GET /events/:id/attendees/ | [
"Returns",
"a",
"paginated",
"response",
"with",
"a",
"key",
"of",
"attendees",
"containing",
"a",
"list",
"of",
"attendee",
"."
] | f2e5dc5aa1aa3e45766de13f16fd65722163d91a | https://github.com/eventbrite/eventbrite-sdk-python/blob/f2e5dc5aa1aa3e45766de13f16fd65722163d91a/eventbrite/client.py#L133-L145 | train |
eventbrite/eventbrite-sdk-python | eventbrite/client.py | Eventbrite.webhook_to_object | def webhook_to_object(self, webhook):
"""
Converts JSON sent by an Eventbrite Webhook to the appropriate
Eventbrite object.
# TODO - Add capability to handle Django request objects
"""
if isinstance(webhook, string_type):
# If still JSON, convert to a Python dict
webhook = json.dumps(webhook)
# if a flask.Request object, try to convert that to a webhook
if not isinstance(webhook, dict):
webhook = get_webhook_from_request(webhook)
try:
webhook['api_url']
except KeyError:
raise InvalidWebhook
payload = self.get(webhook['api_url'])
return payload | python | def webhook_to_object(self, webhook):
"""
Converts JSON sent by an Eventbrite Webhook to the appropriate
Eventbrite object.
# TODO - Add capability to handle Django request objects
"""
if isinstance(webhook, string_type):
# If still JSON, convert to a Python dict
webhook = json.dumps(webhook)
# if a flask.Request object, try to convert that to a webhook
if not isinstance(webhook, dict):
webhook = get_webhook_from_request(webhook)
try:
webhook['api_url']
except KeyError:
raise InvalidWebhook
payload = self.get(webhook['api_url'])
return payload | [
"def",
"webhook_to_object",
"(",
"self",
",",
"webhook",
")",
":",
"if",
"isinstance",
"(",
"webhook",
",",
"string_type",
")",
":",
"# If still JSON, convert to a Python dict",
"webhook",
"=",
"json",
".",
"dumps",
"(",
"webhook",
")",
"# if a flask.Request object, try to convert that to a webhook",
"if",
"not",
"isinstance",
"(",
"webhook",
",",
"dict",
")",
":",
"webhook",
"=",
"get_webhook_from_request",
"(",
"webhook",
")",
"try",
":",
"webhook",
"[",
"'api_url'",
"]",
"except",
"KeyError",
":",
"raise",
"InvalidWebhook",
"payload",
"=",
"self",
".",
"get",
"(",
"webhook",
"[",
"'api_url'",
"]",
")",
"return",
"payload"
] | Converts JSON sent by an Eventbrite Webhook to the appropriate
Eventbrite object.
# TODO - Add capability to handle Django request objects | [
"Converts",
"JSON",
"sent",
"by",
"an",
"Eventbrite",
"Webhook",
"to",
"the",
"appropriate",
"Eventbrite",
"object",
"."
] | f2e5dc5aa1aa3e45766de13f16fd65722163d91a | https://github.com/eventbrite/eventbrite-sdk-python/blob/f2e5dc5aa1aa3e45766de13f16fd65722163d91a/eventbrite/client.py#L227-L249 | train |
eventbrite/eventbrite-sdk-python | utils/generate_access_methods.py | get_params_from_page | def get_params_from_page(path, file_name, method_count):
""" This function accesses the rendered content.
We must do this because how the params are not defined in the docs,
but rather the rendered HTML
"""
# open the rendered file.
file_name = file_name.replace(".rst", "")
file_path = "{0}/../_build/html/endpoints/{1}/index.html".format(
path, file_name)
soup = bs4.BeautifulSoup(open(file_path))
# Pull out the relevant section
section = soup.find_all('div', class_='section')[method_count]
# get the tbody of the params table
tbody = section.find('tbody')
params = []
if tbody is not None:
for row in tbody.find_all('tr'):
name, param_type, required, description = row.find_all('td')
required = required.text == 'Yes'
param = dict(
name=name.text,
type=param_type.text,
required=required,
description=description.text
)
params.append(param)
params = sorted(params, key=lambda k: not k['required'])
return params | python | def get_params_from_page(path, file_name, method_count):
""" This function accesses the rendered content.
We must do this because how the params are not defined in the docs,
but rather the rendered HTML
"""
# open the rendered file.
file_name = file_name.replace(".rst", "")
file_path = "{0}/../_build/html/endpoints/{1}/index.html".format(
path, file_name)
soup = bs4.BeautifulSoup(open(file_path))
# Pull out the relevant section
section = soup.find_all('div', class_='section')[method_count]
# get the tbody of the params table
tbody = section.find('tbody')
params = []
if tbody is not None:
for row in tbody.find_all('tr'):
name, param_type, required, description = row.find_all('td')
required = required.text == 'Yes'
param = dict(
name=name.text,
type=param_type.text,
required=required,
description=description.text
)
params.append(param)
params = sorted(params, key=lambda k: not k['required'])
return params | [
"def",
"get_params_from_page",
"(",
"path",
",",
"file_name",
",",
"method_count",
")",
":",
"# open the rendered file.",
"file_name",
"=",
"file_name",
".",
"replace",
"(",
"\".rst\"",
",",
"\"\"",
")",
"file_path",
"=",
"\"{0}/../_build/html/endpoints/{1}/index.html\"",
".",
"format",
"(",
"path",
",",
"file_name",
")",
"soup",
"=",
"bs4",
".",
"BeautifulSoup",
"(",
"open",
"(",
"file_path",
")",
")",
"# Pull out the relevant section",
"section",
"=",
"soup",
".",
"find_all",
"(",
"'div'",
",",
"class_",
"=",
"'section'",
")",
"[",
"method_count",
"]",
"# get the tbody of the params table",
"tbody",
"=",
"section",
".",
"find",
"(",
"'tbody'",
")",
"params",
"=",
"[",
"]",
"if",
"tbody",
"is",
"not",
"None",
":",
"for",
"row",
"in",
"tbody",
".",
"find_all",
"(",
"'tr'",
")",
":",
"name",
",",
"param_type",
",",
"required",
",",
"description",
"=",
"row",
".",
"find_all",
"(",
"'td'",
")",
"required",
"=",
"required",
".",
"text",
"==",
"'Yes'",
"param",
"=",
"dict",
"(",
"name",
"=",
"name",
".",
"text",
",",
"type",
"=",
"param_type",
".",
"text",
",",
"required",
"=",
"required",
",",
"description",
"=",
"description",
".",
"text",
")",
"params",
".",
"append",
"(",
"param",
")",
"params",
"=",
"sorted",
"(",
"params",
",",
"key",
"=",
"lambda",
"k",
":",
"not",
"k",
"[",
"'required'",
"]",
")",
"return",
"params"
] | This function accesses the rendered content.
We must do this because how the params are not defined in the docs,
but rather the rendered HTML | [
"This",
"function",
"accesses",
"the",
"rendered",
"content",
".",
"We",
"must",
"do",
"this",
"because",
"how",
"the",
"params",
"are",
"not",
"defined",
"in",
"the",
"docs",
"but",
"rather",
"the",
"rendered",
"HTML"
] | f2e5dc5aa1aa3e45766de13f16fd65722163d91a | https://github.com/eventbrite/eventbrite-sdk-python/blob/f2e5dc5aa1aa3e45766de13f16fd65722163d91a/utils/generate_access_methods.py#L172-L201 | train |
robromano/django-adminrestrict | adminrestrict/middleware.py | AdminPagesRestrictMiddleware.process_request | def process_request(self, request):
"""
Check if the request is made form an allowed IP
"""
# Section adjusted to restrict login to ?edit
# (sing cms-toolbar-login)into DjangoCMS login.
restricted_request_uri = request.path.startswith(
reverse('admin:index') or "cms-toolbar-login" in request.build_absolute_uri()
)
if restricted_request_uri and request.method == 'POST':
# AllowedIP table emty means access is always granted
if AllowedIP.objects.count() > 0:
# If there are wildcard IPs access is always granted
if AllowedIP.objects.filter(ip_address="*").count() == 0:
request_ip = get_ip_address_from_request(request)
# If the request_ip is in the AllowedIP the access
# is granted
if AllowedIP.objects.filter(ip_address=request_ip).count() == 0:
# We check regular expressions defining ranges
# of IPs. If any range contains the request_ip
# the access is granted
for regex_ip_range in AllowedIP.objects.filter(ip_address__endswith="*"):
if re.match(regex_ip_range.ip_address.replace("*", ".*"), request_ip):
return None
return HttpResponseForbidden("Access to admin is denied.") | python | def process_request(self, request):
"""
Check if the request is made form an allowed IP
"""
# Section adjusted to restrict login to ?edit
# (sing cms-toolbar-login)into DjangoCMS login.
restricted_request_uri = request.path.startswith(
reverse('admin:index') or "cms-toolbar-login" in request.build_absolute_uri()
)
if restricted_request_uri and request.method == 'POST':
# AllowedIP table emty means access is always granted
if AllowedIP.objects.count() > 0:
# If there are wildcard IPs access is always granted
if AllowedIP.objects.filter(ip_address="*").count() == 0:
request_ip = get_ip_address_from_request(request)
# If the request_ip is in the AllowedIP the access
# is granted
if AllowedIP.objects.filter(ip_address=request_ip).count() == 0:
# We check regular expressions defining ranges
# of IPs. If any range contains the request_ip
# the access is granted
for regex_ip_range in AllowedIP.objects.filter(ip_address__endswith="*"):
if re.match(regex_ip_range.ip_address.replace("*", ".*"), request_ip):
return None
return HttpResponseForbidden("Access to admin is denied.") | [
"def",
"process_request",
"(",
"self",
",",
"request",
")",
":",
"# Section adjusted to restrict login to ?edit",
"# (sing cms-toolbar-login)into DjangoCMS login.",
"restricted_request_uri",
"=",
"request",
".",
"path",
".",
"startswith",
"(",
"reverse",
"(",
"'admin:index'",
")",
"or",
"\"cms-toolbar-login\"",
"in",
"request",
".",
"build_absolute_uri",
"(",
")",
")",
"if",
"restricted_request_uri",
"and",
"request",
".",
"method",
"==",
"'POST'",
":",
"# AllowedIP table emty means access is always granted",
"if",
"AllowedIP",
".",
"objects",
".",
"count",
"(",
")",
">",
"0",
":",
"# If there are wildcard IPs access is always granted",
"if",
"AllowedIP",
".",
"objects",
".",
"filter",
"(",
"ip_address",
"=",
"\"*\"",
")",
".",
"count",
"(",
")",
"==",
"0",
":",
"request_ip",
"=",
"get_ip_address_from_request",
"(",
"request",
")",
"# If the request_ip is in the AllowedIP the access",
"# is granted",
"if",
"AllowedIP",
".",
"objects",
".",
"filter",
"(",
"ip_address",
"=",
"request_ip",
")",
".",
"count",
"(",
")",
"==",
"0",
":",
"# We check regular expressions defining ranges",
"# of IPs. If any range contains the request_ip",
"# the access is granted",
"for",
"regex_ip_range",
"in",
"AllowedIP",
".",
"objects",
".",
"filter",
"(",
"ip_address__endswith",
"=",
"\"*\"",
")",
":",
"if",
"re",
".",
"match",
"(",
"regex_ip_range",
".",
"ip_address",
".",
"replace",
"(",
"\"*\"",
",",
"\".*\"",
")",
",",
"request_ip",
")",
":",
"return",
"None",
"return",
"HttpResponseForbidden",
"(",
"\"Access to admin is denied.\"",
")"
] | Check if the request is made form an allowed IP | [
"Check",
"if",
"the",
"request",
"is",
"made",
"form",
"an",
"allowed",
"IP"
] | f05fd21e49677731e3d291da956b84bcac9a5c69 | https://github.com/robromano/django-adminrestrict/blob/f05fd21e49677731e3d291da956b84bcac9a5c69/adminrestrict/middleware.py#L87-L116 | train |
pydanny-archive/django-wysiwyg | django_wysiwyg/templatetags/wysiwyg.py | get_settings | def get_settings(editor_override=None):
"""Utility function to retrieve settings.py values with defaults"""
flavor = getattr(settings, "DJANGO_WYSIWYG_FLAVOR", "yui")
if editor_override is not None:
flavor = editor_override
return {
"DJANGO_WYSIWYG_MEDIA_URL": getattr(settings, "DJANGO_WYSIWYG_MEDIA_URL", urljoin(settings.STATIC_URL, flavor) + '/'),
"DJANGO_WYSIWYG_FLAVOR": flavor,
} | python | def get_settings(editor_override=None):
"""Utility function to retrieve settings.py values with defaults"""
flavor = getattr(settings, "DJANGO_WYSIWYG_FLAVOR", "yui")
if editor_override is not None:
flavor = editor_override
return {
"DJANGO_WYSIWYG_MEDIA_URL": getattr(settings, "DJANGO_WYSIWYG_MEDIA_URL", urljoin(settings.STATIC_URL, flavor) + '/'),
"DJANGO_WYSIWYG_FLAVOR": flavor,
} | [
"def",
"get_settings",
"(",
"editor_override",
"=",
"None",
")",
":",
"flavor",
"=",
"getattr",
"(",
"settings",
",",
"\"DJANGO_WYSIWYG_FLAVOR\"",
",",
"\"yui\"",
")",
"if",
"editor_override",
"is",
"not",
"None",
":",
"flavor",
"=",
"editor_override",
"return",
"{",
"\"DJANGO_WYSIWYG_MEDIA_URL\"",
":",
"getattr",
"(",
"settings",
",",
"\"DJANGO_WYSIWYG_MEDIA_URL\"",
",",
"urljoin",
"(",
"settings",
".",
"STATIC_URL",
",",
"flavor",
")",
"+",
"'/'",
")",
",",
"\"DJANGO_WYSIWYG_FLAVOR\"",
":",
"flavor",
",",
"}"
] | Utility function to retrieve settings.py values with defaults | [
"Utility",
"function",
"to",
"retrieve",
"settings",
".",
"py",
"values",
"with",
"defaults"
] | f05866356d417309624ec4863acdebd2084b1bc2 | https://github.com/pydanny-archive/django-wysiwyg/blob/f05866356d417309624ec4863acdebd2084b1bc2/django_wysiwyg/templatetags/wysiwyg.py#L13-L23 | train |