id
int32 0
252k
| repo
stringlengths 7
55
| path
stringlengths 4
127
| func_name
stringlengths 1
88
| original_string
stringlengths 75
19.8k
| language
stringclasses 1
value | code
stringlengths 75
19.8k
| code_tokens
sequence | docstring
stringlengths 3
17.3k
| docstring_tokens
sequence | sha
stringlengths 40
40
| url
stringlengths 87
242
|
---|---|---|---|---|---|---|---|---|---|---|---|
249,400 | bmweiner/skillful | skillful/interface.py | ResponseBody.set_reprompt_ssml | def set_reprompt_ssml(self, ssml):
"""Set response reprompt output speech as SSML type.
Args:
ssml: str. Response speech used when type is 'SSML', should be formatted
with Speech Synthesis Markup Language. Cannot exceed 8,000
characters.
"""
self.response.reprompt.outputSpeech.type = 'SSML'
self.response.reprompt.outputSpeech.ssml = ssml | python | def set_reprompt_ssml(self, ssml):
"""Set response reprompt output speech as SSML type.
Args:
ssml: str. Response speech used when type is 'SSML', should be formatted
with Speech Synthesis Markup Language. Cannot exceed 8,000
characters.
"""
self.response.reprompt.outputSpeech.type = 'SSML'
self.response.reprompt.outputSpeech.ssml = ssml | [
"def",
"set_reprompt_ssml",
"(",
"self",
",",
"ssml",
")",
":",
"self",
".",
"response",
".",
"reprompt",
".",
"outputSpeech",
".",
"type",
"=",
"'SSML'",
"self",
".",
"response",
".",
"reprompt",
".",
"outputSpeech",
".",
"ssml",
"=",
"ssml"
] | Set response reprompt output speech as SSML type.
Args:
ssml: str. Response speech used when type is 'SSML', should be formatted
with Speech Synthesis Markup Language. Cannot exceed 8,000
characters. | [
"Set",
"response",
"reprompt",
"output",
"speech",
"as",
"SSML",
"type",
"."
] | 8646f54faf62cb63f165f7699b8ace5b4a08233c | https://github.com/bmweiner/skillful/blob/8646f54faf62cb63f165f7699b8ace5b4a08233c/skillful/interface.py#L435-L444 |
249,401 | salimm/httpoutpustream | httpoutputstream/stream.py | HttpBufferedOutstream.flush | def flush(self):
'''
Flushes the buffer to socket. Only call when the write is done. Calling flush after each write will prevent the buffer to act as efficiently as possible
'''
# return if empty
if self.__bufferidx == 0:
return
# send here the data
self.conn.send("%s\r\n" % hex(self.__bufferidx)[2:])
self.conn.send("%s\r\n" % ''.join(self.__buffer[0:self.__bufferidx]))
# reset buffer index = 0 (beginning of the buffer)
self.__bufferidx = 0 | python | def flush(self):
'''
Flushes the buffer to socket. Only call when the write is done. Calling flush after each write will prevent the buffer to act as efficiently as possible
'''
# return if empty
if self.__bufferidx == 0:
return
# send here the data
self.conn.send("%s\r\n" % hex(self.__bufferidx)[2:])
self.conn.send("%s\r\n" % ''.join(self.__buffer[0:self.__bufferidx]))
# reset buffer index = 0 (beginning of the buffer)
self.__bufferidx = 0 | [
"def",
"flush",
"(",
"self",
")",
":",
"# return if empty",
"if",
"self",
".",
"__bufferidx",
"==",
"0",
":",
"return",
"# send here the data",
"self",
".",
"conn",
".",
"send",
"(",
"\"%s\\r\\n\"",
"%",
"hex",
"(",
"self",
".",
"__bufferidx",
")",
"[",
"2",
":",
"]",
")",
"self",
".",
"conn",
".",
"send",
"(",
"\"%s\\r\\n\"",
"%",
"''",
".",
"join",
"(",
"self",
".",
"__buffer",
"[",
"0",
":",
"self",
".",
"__bufferidx",
"]",
")",
")",
"# reset buffer index = 0 (beginning of the buffer)",
"self",
".",
"__bufferidx",
"=",
"0"
] | Flushes the buffer to socket. Only call when the write is done. Calling flush after each write will prevent the buffer to act as efficiently as possible | [
"Flushes",
"the",
"buffer",
"to",
"socket",
".",
"Only",
"call",
"when",
"the",
"write",
"is",
"done",
".",
"Calling",
"flush",
"after",
"each",
"write",
"will",
"prevent",
"the",
"buffer",
"to",
"act",
"as",
"efficiently",
"as",
"possible"
] | c3c785e3c6faf6348494b742669cd8025659f763 | https://github.com/salimm/httpoutpustream/blob/c3c785e3c6faf6348494b742669cd8025659f763/httpoutputstream/stream.py#L92-L103 |
249,402 | salimm/httpoutpustream | httpoutputstream/stream.py | HttpBufferedOutstream.close | def close(self):
'''
Closes the stream to output. It destroys the buffer and the buffer pointer. However, it will not close the the client connection
'''
#write all that is remained in buffer
self.flush()
# delete buffer
self.__buffer = None
#reset buffer index to -1 to indicate no where
self.__bufferidx = -1
#writing the final empty chunk to the socket
# send here the data
self.conn.send("0\r\n")
self.conn.send("\r\n" )
#set closed flag
self.__closed = True | python | def close(self):
'''
Closes the stream to output. It destroys the buffer and the buffer pointer. However, it will not close the the client connection
'''
#write all that is remained in buffer
self.flush()
# delete buffer
self.__buffer = None
#reset buffer index to -1 to indicate no where
self.__bufferidx = -1
#writing the final empty chunk to the socket
# send here the data
self.conn.send("0\r\n")
self.conn.send("\r\n" )
#set closed flag
self.__closed = True | [
"def",
"close",
"(",
"self",
")",
":",
"#write all that is remained in buffer",
"self",
".",
"flush",
"(",
")",
"# delete buffer",
"self",
".",
"__buffer",
"=",
"None",
"#reset buffer index to -1 to indicate no where",
"self",
".",
"__bufferidx",
"=",
"-",
"1",
"#writing the final empty chunk to the socket",
"# send here the data",
"self",
".",
"conn",
".",
"send",
"(",
"\"0\\r\\n\"",
")",
"self",
".",
"conn",
".",
"send",
"(",
"\"\\r\\n\"",
")",
"#set closed flag",
"self",
".",
"__closed",
"=",
"True"
] | Closes the stream to output. It destroys the buffer and the buffer pointer. However, it will not close the the client connection | [
"Closes",
"the",
"stream",
"to",
"output",
".",
"It",
"destroys",
"the",
"buffer",
"and",
"the",
"buffer",
"pointer",
".",
"However",
"it",
"will",
"not",
"close",
"the",
"the",
"client",
"connection"
] | c3c785e3c6faf6348494b742669cd8025659f763 | https://github.com/salimm/httpoutpustream/blob/c3c785e3c6faf6348494b742669cd8025659f763/httpoutputstream/stream.py#L106-L121 |
249,403 | cfobel/clutter-webcam-viewer | clutter_webcam_viewer/warp_control.py | WarpControl.create_ui | def create_ui(self):
'''
Create UI elements and connect signals.
'''
box = Gtk.Box()
rotate_left = Gtk.Button('Rotate left')
rotate_right = Gtk.Button('Rotate right')
flip_horizontal = Gtk.Button('Flip horizontal')
flip_vertical = Gtk.Button('Flip vertical')
reset = Gtk.Button('Reset')
load = Gtk.Button('Load...')
save = Gtk.Button('Save...')
rotate_left.connect('clicked', lambda *args: self.rotate_left())
rotate_right.connect('clicked', lambda *args: self.rotate_right())
flip_horizontal.connect('clicked', lambda *args:
self.flip_horizontal())
flip_vertical.connect('clicked', lambda *args: self.flip_vertical())
reset.connect('clicked', lambda *args: self.reset())
load.connect('clicked', lambda *args: GObject.idle_add(self.load))
save.connect('clicked', lambda *args: GObject.idle_add(self.save))
for b in (rotate_left, rotate_right, flip_horizontal, flip_vertical,
reset, load, save):
box.pack_start(b, False, False, 0)
box.show_all()
self.widget.pack_start(box, False, False, 0)
if self.warp_actor.parent_corners is None:
for b in (rotate_left, rotate_right, flip_horizontal,
flip_vertical, reset, load, save):
b.set_sensitive(False)
def check_init():
if self.warp_actor.parent_corners is not None:
for b in (rotate_left, rotate_right, flip_horizontal,
flip_vertical, reset, load, save):
b.set_sensitive(True)
return False
return True
GObject.timeout_add(100, check_init) | python | def create_ui(self):
'''
Create UI elements and connect signals.
'''
box = Gtk.Box()
rotate_left = Gtk.Button('Rotate left')
rotate_right = Gtk.Button('Rotate right')
flip_horizontal = Gtk.Button('Flip horizontal')
flip_vertical = Gtk.Button('Flip vertical')
reset = Gtk.Button('Reset')
load = Gtk.Button('Load...')
save = Gtk.Button('Save...')
rotate_left.connect('clicked', lambda *args: self.rotate_left())
rotate_right.connect('clicked', lambda *args: self.rotate_right())
flip_horizontal.connect('clicked', lambda *args:
self.flip_horizontal())
flip_vertical.connect('clicked', lambda *args: self.flip_vertical())
reset.connect('clicked', lambda *args: self.reset())
load.connect('clicked', lambda *args: GObject.idle_add(self.load))
save.connect('clicked', lambda *args: GObject.idle_add(self.save))
for b in (rotate_left, rotate_right, flip_horizontal, flip_vertical,
reset, load, save):
box.pack_start(b, False, False, 0)
box.show_all()
self.widget.pack_start(box, False, False, 0)
if self.warp_actor.parent_corners is None:
for b in (rotate_left, rotate_right, flip_horizontal,
flip_vertical, reset, load, save):
b.set_sensitive(False)
def check_init():
if self.warp_actor.parent_corners is not None:
for b in (rotate_left, rotate_right, flip_horizontal,
flip_vertical, reset, load, save):
b.set_sensitive(True)
return False
return True
GObject.timeout_add(100, check_init) | [
"def",
"create_ui",
"(",
"self",
")",
":",
"box",
"=",
"Gtk",
".",
"Box",
"(",
")",
"rotate_left",
"=",
"Gtk",
".",
"Button",
"(",
"'Rotate left'",
")",
"rotate_right",
"=",
"Gtk",
".",
"Button",
"(",
"'Rotate right'",
")",
"flip_horizontal",
"=",
"Gtk",
".",
"Button",
"(",
"'Flip horizontal'",
")",
"flip_vertical",
"=",
"Gtk",
".",
"Button",
"(",
"'Flip vertical'",
")",
"reset",
"=",
"Gtk",
".",
"Button",
"(",
"'Reset'",
")",
"load",
"=",
"Gtk",
".",
"Button",
"(",
"'Load...'",
")",
"save",
"=",
"Gtk",
".",
"Button",
"(",
"'Save...'",
")",
"rotate_left",
".",
"connect",
"(",
"'clicked'",
",",
"lambda",
"*",
"args",
":",
"self",
".",
"rotate_left",
"(",
")",
")",
"rotate_right",
".",
"connect",
"(",
"'clicked'",
",",
"lambda",
"*",
"args",
":",
"self",
".",
"rotate_right",
"(",
")",
")",
"flip_horizontal",
".",
"connect",
"(",
"'clicked'",
",",
"lambda",
"*",
"args",
":",
"self",
".",
"flip_horizontal",
"(",
")",
")",
"flip_vertical",
".",
"connect",
"(",
"'clicked'",
",",
"lambda",
"*",
"args",
":",
"self",
".",
"flip_vertical",
"(",
")",
")",
"reset",
".",
"connect",
"(",
"'clicked'",
",",
"lambda",
"*",
"args",
":",
"self",
".",
"reset",
"(",
")",
")",
"load",
".",
"connect",
"(",
"'clicked'",
",",
"lambda",
"*",
"args",
":",
"GObject",
".",
"idle_add",
"(",
"self",
".",
"load",
")",
")",
"save",
".",
"connect",
"(",
"'clicked'",
",",
"lambda",
"*",
"args",
":",
"GObject",
".",
"idle_add",
"(",
"self",
".",
"save",
")",
")",
"for",
"b",
"in",
"(",
"rotate_left",
",",
"rotate_right",
",",
"flip_horizontal",
",",
"flip_vertical",
",",
"reset",
",",
"load",
",",
"save",
")",
":",
"box",
".",
"pack_start",
"(",
"b",
",",
"False",
",",
"False",
",",
"0",
")",
"box",
".",
"show_all",
"(",
")",
"self",
".",
"widget",
".",
"pack_start",
"(",
"box",
",",
"False",
",",
"False",
",",
"0",
")",
"if",
"self",
".",
"warp_actor",
".",
"parent_corners",
"is",
"None",
":",
"for",
"b",
"in",
"(",
"rotate_left",
",",
"rotate_right",
",",
"flip_horizontal",
",",
"flip_vertical",
",",
"reset",
",",
"load",
",",
"save",
")",
":",
"b",
".",
"set_sensitive",
"(",
"False",
")",
"def",
"check_init",
"(",
")",
":",
"if",
"self",
".",
"warp_actor",
".",
"parent_corners",
"is",
"not",
"None",
":",
"for",
"b",
"in",
"(",
"rotate_left",
",",
"rotate_right",
",",
"flip_horizontal",
",",
"flip_vertical",
",",
"reset",
",",
"load",
",",
"save",
")",
":",
"b",
".",
"set_sensitive",
"(",
"True",
")",
"return",
"False",
"return",
"True",
"GObject",
".",
"timeout_add",
"(",
"100",
",",
"check_init",
")"
] | Create UI elements and connect signals. | [
"Create",
"UI",
"elements",
"and",
"connect",
"signals",
"."
] | b227d2ae02d750194e65c13bcf178550755c3afc | https://github.com/cfobel/clutter-webcam-viewer/blob/b227d2ae02d750194e65c13bcf178550755c3afc/clutter_webcam_viewer/warp_control.py#L15-L55 |
249,404 | cfobel/clutter-webcam-viewer | clutter_webcam_viewer/warp_control.py | WarpControl.save | def save(self):
'''
Save warp projection settings to HDF file.
'''
response = pu.open(title='Save perspective warp', patterns=['*.h5'])
if response is not None:
self.warp_actor.save(response) | python | def save(self):
'''
Save warp projection settings to HDF file.
'''
response = pu.open(title='Save perspective warp', patterns=['*.h5'])
if response is not None:
self.warp_actor.save(response) | [
"def",
"save",
"(",
"self",
")",
":",
"response",
"=",
"pu",
".",
"open",
"(",
"title",
"=",
"'Save perspective warp'",
",",
"patterns",
"=",
"[",
"'*.h5'",
"]",
")",
"if",
"response",
"is",
"not",
"None",
":",
"self",
".",
"warp_actor",
".",
"save",
"(",
"response",
")"
] | Save warp projection settings to HDF file. | [
"Save",
"warp",
"projection",
"settings",
"to",
"HDF",
"file",
"."
] | b227d2ae02d750194e65c13bcf178550755c3afc | https://github.com/cfobel/clutter-webcam-viewer/blob/b227d2ae02d750194e65c13bcf178550755c3afc/clutter_webcam_viewer/warp_control.py#L57-L63 |
249,405 | cfobel/clutter-webcam-viewer | clutter_webcam_viewer/warp_control.py | WarpControl.load | def load(self):
'''
Load warp projection settings from HDF file.
'''
response = pu.open(title='Load perspective warp', patterns=['*.h5'])
if response is not None:
self.warp_actor.load(response) | python | def load(self):
'''
Load warp projection settings from HDF file.
'''
response = pu.open(title='Load perspective warp', patterns=['*.h5'])
if response is not None:
self.warp_actor.load(response) | [
"def",
"load",
"(",
"self",
")",
":",
"response",
"=",
"pu",
".",
"open",
"(",
"title",
"=",
"'Load perspective warp'",
",",
"patterns",
"=",
"[",
"'*.h5'",
"]",
")",
"if",
"response",
"is",
"not",
"None",
":",
"self",
".",
"warp_actor",
".",
"load",
"(",
"response",
")"
] | Load warp projection settings from HDF file. | [
"Load",
"warp",
"projection",
"settings",
"from",
"HDF",
"file",
"."
] | b227d2ae02d750194e65c13bcf178550755c3afc | https://github.com/cfobel/clutter-webcam-viewer/blob/b227d2ae02d750194e65c13bcf178550755c3afc/clutter_webcam_viewer/warp_control.py#L65-L71 |
249,406 | tbobm/devscripts | devscripts/logs.py | simple_logger | def simple_logger(**kwargs):
"""
Creates a simple logger
:param str name: The logger's name ('api', 'back'...)
:param int base_level: Lowest level allowed to log (Default: DEBUG)
:param str log_format: Logging format used for STDOUT
(Default: logs.FORMAT)
:param bool should_stdout: Allows to log to stdout (Default: True)
:param int stdout_level: Lowest level allowed to log to STDOUT
(Default: DEBUG)
:param bool should_http: Allows to log to HTTP server
:param int http_level: Lowest level allowed to log to the HTTP server
(Has to be superior or equals to base_level)
:param str http_host: Address of the HTTP Server
:param str http_url: Url of the HTTP Server
"""
# Args
logger_name = kwargs.get('name')
base_level = kwargs.get('base_level', logging.DEBUG)
should_stdout = kwargs.get('should_stdout', True)
should_http = kwargs.get('should_http', False)
# Generate base logger
logger = logging.getLogger(logger_name)
logger.setLevel(base_level)
# Define stdout handler
if should_stdout:
logger.addHandler(_add_stream_handler(**kwargs))
if should_http:
logger.addHandler(_add_http_handler(**kwargs))
return logger | python | def simple_logger(**kwargs):
"""
Creates a simple logger
:param str name: The logger's name ('api', 'back'...)
:param int base_level: Lowest level allowed to log (Default: DEBUG)
:param str log_format: Logging format used for STDOUT
(Default: logs.FORMAT)
:param bool should_stdout: Allows to log to stdout (Default: True)
:param int stdout_level: Lowest level allowed to log to STDOUT
(Default: DEBUG)
:param bool should_http: Allows to log to HTTP server
:param int http_level: Lowest level allowed to log to the HTTP server
(Has to be superior or equals to base_level)
:param str http_host: Address of the HTTP Server
:param str http_url: Url of the HTTP Server
"""
# Args
logger_name = kwargs.get('name')
base_level = kwargs.get('base_level', logging.DEBUG)
should_stdout = kwargs.get('should_stdout', True)
should_http = kwargs.get('should_http', False)
# Generate base logger
logger = logging.getLogger(logger_name)
logger.setLevel(base_level)
# Define stdout handler
if should_stdout:
logger.addHandler(_add_stream_handler(**kwargs))
if should_http:
logger.addHandler(_add_http_handler(**kwargs))
return logger | [
"def",
"simple_logger",
"(",
"*",
"*",
"kwargs",
")",
":",
"# Args",
"logger_name",
"=",
"kwargs",
".",
"get",
"(",
"'name'",
")",
"base_level",
"=",
"kwargs",
".",
"get",
"(",
"'base_level'",
",",
"logging",
".",
"DEBUG",
")",
"should_stdout",
"=",
"kwargs",
".",
"get",
"(",
"'should_stdout'",
",",
"True",
")",
"should_http",
"=",
"kwargs",
".",
"get",
"(",
"'should_http'",
",",
"False",
")",
"# Generate base logger",
"logger",
"=",
"logging",
".",
"getLogger",
"(",
"logger_name",
")",
"logger",
".",
"setLevel",
"(",
"base_level",
")",
"# Define stdout handler",
"if",
"should_stdout",
":",
"logger",
".",
"addHandler",
"(",
"_add_stream_handler",
"(",
"*",
"*",
"kwargs",
")",
")",
"if",
"should_http",
":",
"logger",
".",
"addHandler",
"(",
"_add_http_handler",
"(",
"*",
"*",
"kwargs",
")",
")",
"return",
"logger"
] | Creates a simple logger
:param str name: The logger's name ('api', 'back'...)
:param int base_level: Lowest level allowed to log (Default: DEBUG)
:param str log_format: Logging format used for STDOUT
(Default: logs.FORMAT)
:param bool should_stdout: Allows to log to stdout (Default: True)
:param int stdout_level: Lowest level allowed to log to STDOUT
(Default: DEBUG)
:param bool should_http: Allows to log to HTTP server
:param int http_level: Lowest level allowed to log to the HTTP server
(Has to be superior or equals to base_level)
:param str http_host: Address of the HTTP Server
:param str http_url: Url of the HTTP Server | [
"Creates",
"a",
"simple",
"logger"
] | beb23371ba80739afb5474766e8049ead3837925 | https://github.com/tbobm/devscripts/blob/beb23371ba80739afb5474766e8049ead3837925/devscripts/logs.py#L12-L49 |
249,407 | PSU-OIT-ARC/elasticmodels | elasticmodels/analysis.py | compare_dicts | def compare_dicts(d1, d2):
"""
Returns a diff string of the two dicts.
"""
a = json.dumps(d1, indent=4, sort_keys=True)
b = json.dumps(d2, indent=4, sort_keys=True)
# stolen from cpython
# https://github.com/python/cpython/blob/01fd68752e2d2d0a5f90ae8944ca35df0a5ddeaa/Lib/unittest/case.py#L1091
diff = ('\n' + '\n'.join(difflib.ndiff(
a.splitlines(),
b.splitlines())))
return diff | python | def compare_dicts(d1, d2):
"""
Returns a diff string of the two dicts.
"""
a = json.dumps(d1, indent=4, sort_keys=True)
b = json.dumps(d2, indent=4, sort_keys=True)
# stolen from cpython
# https://github.com/python/cpython/blob/01fd68752e2d2d0a5f90ae8944ca35df0a5ddeaa/Lib/unittest/case.py#L1091
diff = ('\n' + '\n'.join(difflib.ndiff(
a.splitlines(),
b.splitlines())))
return diff | [
"def",
"compare_dicts",
"(",
"d1",
",",
"d2",
")",
":",
"a",
"=",
"json",
".",
"dumps",
"(",
"d1",
",",
"indent",
"=",
"4",
",",
"sort_keys",
"=",
"True",
")",
"b",
"=",
"json",
".",
"dumps",
"(",
"d2",
",",
"indent",
"=",
"4",
",",
"sort_keys",
"=",
"True",
")",
"# stolen from cpython",
"# https://github.com/python/cpython/blob/01fd68752e2d2d0a5f90ae8944ca35df0a5ddeaa/Lib/unittest/case.py#L1091",
"diff",
"=",
"(",
"'\\n'",
"+",
"'\\n'",
".",
"join",
"(",
"difflib",
".",
"ndiff",
"(",
"a",
".",
"splitlines",
"(",
")",
",",
"b",
".",
"splitlines",
"(",
")",
")",
")",
")",
"return",
"diff"
] | Returns a diff string of the two dicts. | [
"Returns",
"a",
"diff",
"string",
"of",
"the",
"two",
"dicts",
"."
] | 67870508096f66123ef10b89789bbac06571cc80 | https://github.com/PSU-OIT-ARC/elasticmodels/blob/67870508096f66123ef10b89789bbac06571cc80/elasticmodels/analysis.py#L13-L24 |
249,408 | PSU-OIT-ARC/elasticmodels | elasticmodels/analysis.py | diff_analysis | def diff_analysis(using):
"""
Returns a diff string comparing the analysis defined in ES, with
the analysis defined in Python land for the connection `using`
"""
python_analysis = collect_analysis(using)
es_analysis = existing_analysis(using)
return compare_dicts(es_analysis, python_analysis) | python | def diff_analysis(using):
"""
Returns a diff string comparing the analysis defined in ES, with
the analysis defined in Python land for the connection `using`
"""
python_analysis = collect_analysis(using)
es_analysis = existing_analysis(using)
return compare_dicts(es_analysis, python_analysis) | [
"def",
"diff_analysis",
"(",
"using",
")",
":",
"python_analysis",
"=",
"collect_analysis",
"(",
"using",
")",
"es_analysis",
"=",
"existing_analysis",
"(",
"using",
")",
"return",
"compare_dicts",
"(",
"es_analysis",
",",
"python_analysis",
")"
] | Returns a diff string comparing the analysis defined in ES, with
the analysis defined in Python land for the connection `using` | [
"Returns",
"a",
"diff",
"string",
"comparing",
"the",
"analysis",
"defined",
"in",
"ES",
"with",
"the",
"analysis",
"defined",
"in",
"Python",
"land",
"for",
"the",
"connection",
"using"
] | 67870508096f66123ef10b89789bbac06571cc80 | https://github.com/PSU-OIT-ARC/elasticmodels/blob/67870508096f66123ef10b89789bbac06571cc80/elasticmodels/analysis.py#L42-L49 |
249,409 | PSU-OIT-ARC/elasticmodels | elasticmodels/analysis.py | collect_analysis | def collect_analysis(using):
"""
generate the analysis settings from Python land
"""
python_analysis = defaultdict(dict)
for index in registry.indexes_for_connection(using):
python_analysis.update(index._doc_type.mapping._collect_analysis())
return stringer(python_analysis) | python | def collect_analysis(using):
"""
generate the analysis settings from Python land
"""
python_analysis = defaultdict(dict)
for index in registry.indexes_for_connection(using):
python_analysis.update(index._doc_type.mapping._collect_analysis())
return stringer(python_analysis) | [
"def",
"collect_analysis",
"(",
"using",
")",
":",
"python_analysis",
"=",
"defaultdict",
"(",
"dict",
")",
"for",
"index",
"in",
"registry",
".",
"indexes_for_connection",
"(",
"using",
")",
":",
"python_analysis",
".",
"update",
"(",
"index",
".",
"_doc_type",
".",
"mapping",
".",
"_collect_analysis",
"(",
")",
")",
"return",
"stringer",
"(",
"python_analysis",
")"
] | generate the analysis settings from Python land | [
"generate",
"the",
"analysis",
"settings",
"from",
"Python",
"land"
] | 67870508096f66123ef10b89789bbac06571cc80 | https://github.com/PSU-OIT-ARC/elasticmodels/blob/67870508096f66123ef10b89789bbac06571cc80/elasticmodels/analysis.py#L52-L60 |
249,410 | PSU-OIT-ARC/elasticmodels | elasticmodels/analysis.py | existing_analysis | def existing_analysis(using):
"""
Get the existing analysis for the `using` Elasticsearch connection
"""
es = connections.get_connection(using)
index_name = settings.ELASTICSEARCH_CONNECTIONS[using]['index_name']
if es.indices.exists(index=index_name):
return stringer(es.indices.get_settings(index=index_name)[index_name]['settings']['index'].get('analysis', {}))
return DOES_NOT_EXIST | python | def existing_analysis(using):
"""
Get the existing analysis for the `using` Elasticsearch connection
"""
es = connections.get_connection(using)
index_name = settings.ELASTICSEARCH_CONNECTIONS[using]['index_name']
if es.indices.exists(index=index_name):
return stringer(es.indices.get_settings(index=index_name)[index_name]['settings']['index'].get('analysis', {}))
return DOES_NOT_EXIST | [
"def",
"existing_analysis",
"(",
"using",
")",
":",
"es",
"=",
"connections",
".",
"get_connection",
"(",
"using",
")",
"index_name",
"=",
"settings",
".",
"ELASTICSEARCH_CONNECTIONS",
"[",
"using",
"]",
"[",
"'index_name'",
"]",
"if",
"es",
".",
"indices",
".",
"exists",
"(",
"index",
"=",
"index_name",
")",
":",
"return",
"stringer",
"(",
"es",
".",
"indices",
".",
"get_settings",
"(",
"index",
"=",
"index_name",
")",
"[",
"index_name",
"]",
"[",
"'settings'",
"]",
"[",
"'index'",
"]",
".",
"get",
"(",
"'analysis'",
",",
"{",
"}",
")",
")",
"return",
"DOES_NOT_EXIST"
] | Get the existing analysis for the `using` Elasticsearch connection | [
"Get",
"the",
"existing",
"analysis",
"for",
"the",
"using",
"Elasticsearch",
"connection"
] | 67870508096f66123ef10b89789bbac06571cc80 | https://github.com/PSU-OIT-ARC/elasticmodels/blob/67870508096f66123ef10b89789bbac06571cc80/elasticmodels/analysis.py#L63-L71 |
249,411 | PSU-OIT-ARC/elasticmodels | elasticmodels/analysis.py | is_analysis_compatible | def is_analysis_compatible(using):
"""
Returns True if the analysis defined in Python land and ES for the connection `using` are compatible
"""
python_analysis = collect_analysis(using)
es_analysis = existing_analysis(using)
if es_analysis == DOES_NOT_EXIST:
return True
# we want to ensure everything defined in Python land is exactly matched in ES land
for section in python_analysis:
# there is an analysis section (analysis, tokenizers, filters, etc) defined in Python that isn't in ES
if section not in es_analysis:
return False
# for this section of analysis (analysis, tokenizer, filter, etc), get
# all the items defined in that section, and make sure they exist, and
# are equal in Python land
subdict_python = python_analysis[section]
subdict_es = es_analysis[section]
for name in subdict_python:
# this analyzer, filter, etc isn't defined in ES
if name not in subdict_es:
return False
# this analyzer, filter etc doesn't match what is in ES
if subdict_python[name] != subdict_es[name]:
return False
return True | python | def is_analysis_compatible(using):
"""
Returns True if the analysis defined in Python land and ES for the connection `using` are compatible
"""
python_analysis = collect_analysis(using)
es_analysis = existing_analysis(using)
if es_analysis == DOES_NOT_EXIST:
return True
# we want to ensure everything defined in Python land is exactly matched in ES land
for section in python_analysis:
# there is an analysis section (analysis, tokenizers, filters, etc) defined in Python that isn't in ES
if section not in es_analysis:
return False
# for this section of analysis (analysis, tokenizer, filter, etc), get
# all the items defined in that section, and make sure they exist, and
# are equal in Python land
subdict_python = python_analysis[section]
subdict_es = es_analysis[section]
for name in subdict_python:
# this analyzer, filter, etc isn't defined in ES
if name not in subdict_es:
return False
# this analyzer, filter etc doesn't match what is in ES
if subdict_python[name] != subdict_es[name]:
return False
return True | [
"def",
"is_analysis_compatible",
"(",
"using",
")",
":",
"python_analysis",
"=",
"collect_analysis",
"(",
"using",
")",
"es_analysis",
"=",
"existing_analysis",
"(",
"using",
")",
"if",
"es_analysis",
"==",
"DOES_NOT_EXIST",
":",
"return",
"True",
"# we want to ensure everything defined in Python land is exactly matched in ES land",
"for",
"section",
"in",
"python_analysis",
":",
"# there is an analysis section (analysis, tokenizers, filters, etc) defined in Python that isn't in ES",
"if",
"section",
"not",
"in",
"es_analysis",
":",
"return",
"False",
"# for this section of analysis (analysis, tokenizer, filter, etc), get",
"# all the items defined in that section, and make sure they exist, and",
"# are equal in Python land",
"subdict_python",
"=",
"python_analysis",
"[",
"section",
"]",
"subdict_es",
"=",
"es_analysis",
"[",
"section",
"]",
"for",
"name",
"in",
"subdict_python",
":",
"# this analyzer, filter, etc isn't defined in ES",
"if",
"name",
"not",
"in",
"subdict_es",
":",
"return",
"False",
"# this analyzer, filter etc doesn't match what is in ES",
"if",
"subdict_python",
"[",
"name",
"]",
"!=",
"subdict_es",
"[",
"name",
"]",
":",
"return",
"False",
"return",
"True"
] | Returns True if the analysis defined in Python land and ES for the connection `using` are compatible | [
"Returns",
"True",
"if",
"the",
"analysis",
"defined",
"in",
"Python",
"land",
"and",
"ES",
"for",
"the",
"connection",
"using",
"are",
"compatible"
] | 67870508096f66123ef10b89789bbac06571cc80 | https://github.com/PSU-OIT-ARC/elasticmodels/blob/67870508096f66123ef10b89789bbac06571cc80/elasticmodels/analysis.py#L74-L102 |
249,412 | PSU-OIT-ARC/elasticmodels | elasticmodels/analysis.py | combined_analysis | def combined_analysis(using):
"""
Combine the analysis in ES with the analysis defined in Python. The one in
Python takes precedence
"""
python_analysis = collect_analysis(using)
es_analysis = existing_analysis(using)
if es_analysis == DOES_NOT_EXIST:
return python_analysis
# we want to ensure everything defined in Python land is added, or
# overrides the things defined in ES
for section in python_analysis:
if section not in es_analysis:
es_analysis[section] = python_analysis[section]
subdict_python = python_analysis[section]
subdict_es = es_analysis[section]
for name in subdict_python:
subdict_es[name] = subdict_python[name]
return es_analysis | python | def combined_analysis(using):
"""
Combine the analysis in ES with the analysis defined in Python. The one in
Python takes precedence
"""
python_analysis = collect_analysis(using)
es_analysis = existing_analysis(using)
if es_analysis == DOES_NOT_EXIST:
return python_analysis
# we want to ensure everything defined in Python land is added, or
# overrides the things defined in ES
for section in python_analysis:
if section not in es_analysis:
es_analysis[section] = python_analysis[section]
subdict_python = python_analysis[section]
subdict_es = es_analysis[section]
for name in subdict_python:
subdict_es[name] = subdict_python[name]
return es_analysis | [
"def",
"combined_analysis",
"(",
"using",
")",
":",
"python_analysis",
"=",
"collect_analysis",
"(",
"using",
")",
"es_analysis",
"=",
"existing_analysis",
"(",
"using",
")",
"if",
"es_analysis",
"==",
"DOES_NOT_EXIST",
":",
"return",
"python_analysis",
"# we want to ensure everything defined in Python land is added, or",
"# overrides the things defined in ES",
"for",
"section",
"in",
"python_analysis",
":",
"if",
"section",
"not",
"in",
"es_analysis",
":",
"es_analysis",
"[",
"section",
"]",
"=",
"python_analysis",
"[",
"section",
"]",
"subdict_python",
"=",
"python_analysis",
"[",
"section",
"]",
"subdict_es",
"=",
"es_analysis",
"[",
"section",
"]",
"for",
"name",
"in",
"subdict_python",
":",
"subdict_es",
"[",
"name",
"]",
"=",
"subdict_python",
"[",
"name",
"]",
"return",
"es_analysis"
] | Combine the analysis in ES with the analysis defined in Python. The one in
Python takes precedence | [
"Combine",
"the",
"analysis",
"in",
"ES",
"with",
"the",
"analysis",
"defined",
"in",
"Python",
".",
"The",
"one",
"in",
"Python",
"takes",
"precedence"
] | 67870508096f66123ef10b89789bbac06571cc80 | https://github.com/PSU-OIT-ARC/elasticmodels/blob/67870508096f66123ef10b89789bbac06571cc80/elasticmodels/analysis.py#L105-L126 |
249,413 | xethorn/oto | oto/adaptors/flask.py | flaskify | def flaskify(response, headers=None, encoder=None):
"""Format the response to be consumeable by flask.
The api returns mostly JSON responses. The format method converts the dicts
into a json object (as a string), and the right response is returned (with
the valid mimetype, charset and status.)
Args:
response (Response): The dictionary object to convert into a json
object. If the value is a string, a dictionary is created with the
key "message".
headers (dict): optional headers for the flask response.
encoder (Class): The class of the encoder (if any).
Returns:
flask.Response: The flask response with formatted data, headers, and
mimetype.
"""
status_code = response.status
data = response.errors or response.message
mimetype = 'text/plain'
if isinstance(data, list) or isinstance(data, dict):
mimetype = 'application/json'
data = json.dumps(data, cls=encoder)
return flask.Response(
response=data, status=status_code, headers=headers, mimetype=mimetype) | python | def flaskify(response, headers=None, encoder=None):
"""Format the response to be consumeable by flask.
The api returns mostly JSON responses. The format method converts the dicts
into a json object (as a string), and the right response is returned (with
the valid mimetype, charset and status.)
Args:
response (Response): The dictionary object to convert into a json
object. If the value is a string, a dictionary is created with the
key "message".
headers (dict): optional headers for the flask response.
encoder (Class): The class of the encoder (if any).
Returns:
flask.Response: The flask response with formatted data, headers, and
mimetype.
"""
status_code = response.status
data = response.errors or response.message
mimetype = 'text/plain'
if isinstance(data, list) or isinstance(data, dict):
mimetype = 'application/json'
data = json.dumps(data, cls=encoder)
return flask.Response(
response=data, status=status_code, headers=headers, mimetype=mimetype) | [
"def",
"flaskify",
"(",
"response",
",",
"headers",
"=",
"None",
",",
"encoder",
"=",
"None",
")",
":",
"status_code",
"=",
"response",
".",
"status",
"data",
"=",
"response",
".",
"errors",
"or",
"response",
".",
"message",
"mimetype",
"=",
"'text/plain'",
"if",
"isinstance",
"(",
"data",
",",
"list",
")",
"or",
"isinstance",
"(",
"data",
",",
"dict",
")",
":",
"mimetype",
"=",
"'application/json'",
"data",
"=",
"json",
".",
"dumps",
"(",
"data",
",",
"cls",
"=",
"encoder",
")",
"return",
"flask",
".",
"Response",
"(",
"response",
"=",
"data",
",",
"status",
"=",
"status_code",
",",
"headers",
"=",
"headers",
",",
"mimetype",
"=",
"mimetype",
")"
] | Format the response to be consumeable by flask.
The api returns mostly JSON responses. The format method converts the dicts
into a json object (as a string), and the right response is returned (with
the valid mimetype, charset and status.)
Args:
response (Response): The dictionary object to convert into a json
object. If the value is a string, a dictionary is created with the
key "message".
headers (dict): optional headers for the flask response.
encoder (Class): The class of the encoder (if any).
Returns:
flask.Response: The flask response with formatted data, headers, and
mimetype. | [
"Format",
"the",
"response",
"to",
"be",
"consumeable",
"by",
"flask",
"."
] | 2a76d374ccc4c85fdf81ae1c43698a94c0594d7b | https://github.com/xethorn/oto/blob/2a76d374ccc4c85fdf81ae1c43698a94c0594d7b/oto/adaptors/flask.py#L6-L34 |
249,414 | hitchtest/hitchserve | hitchserve/service_handle.py | ServiceHandle.stop | def stop(self):
"""Ask politely, first, with SIGINT and SIGQUIT."""
if hasattr(self, 'process'):
if self.process is not None:
try:
is_running = self.process.poll() is None
except AttributeError:
is_running = False
if is_running:
self.bundle_engine.logline("Stopping {0}".format(self.service.name))
self.term_signal_sent = True
# Politely ask all child processes to die first
try:
for childproc in psutil.Process(self.process.pid).children(recursive=True):
childproc.send_signal(signal.SIGINT)
except psutil.NoSuchProcess:
pass
except AttributeError:
pass
try:
self.process.send_signal(self.service.stop_signal)
except OSError as e:
if e.errno == 3: # No such process
pass
else:
self.bundle_engine.warnline("{0} stopped prematurely.".format(self.service.name))
else:
self.bundle_engine.warnline("{0} stopped prematurely.".format(self.service.name))
else:
self.bundle_engine.warnline("{0} was never successfully started.".format(self.service.name)) | python | def stop(self):
"""Ask politely, first, with SIGINT and SIGQUIT."""
if hasattr(self, 'process'):
if self.process is not None:
try:
is_running = self.process.poll() is None
except AttributeError:
is_running = False
if is_running:
self.bundle_engine.logline("Stopping {0}".format(self.service.name))
self.term_signal_sent = True
# Politely ask all child processes to die first
try:
for childproc in psutil.Process(self.process.pid).children(recursive=True):
childproc.send_signal(signal.SIGINT)
except psutil.NoSuchProcess:
pass
except AttributeError:
pass
try:
self.process.send_signal(self.service.stop_signal)
except OSError as e:
if e.errno == 3: # No such process
pass
else:
self.bundle_engine.warnline("{0} stopped prematurely.".format(self.service.name))
else:
self.bundle_engine.warnline("{0} stopped prematurely.".format(self.service.name))
else:
self.bundle_engine.warnline("{0} was never successfully started.".format(self.service.name)) | [
"def",
"stop",
"(",
"self",
")",
":",
"if",
"hasattr",
"(",
"self",
",",
"'process'",
")",
":",
"if",
"self",
".",
"process",
"is",
"not",
"None",
":",
"try",
":",
"is_running",
"=",
"self",
".",
"process",
".",
"poll",
"(",
")",
"is",
"None",
"except",
"AttributeError",
":",
"is_running",
"=",
"False",
"if",
"is_running",
":",
"self",
".",
"bundle_engine",
".",
"logline",
"(",
"\"Stopping {0}\"",
".",
"format",
"(",
"self",
".",
"service",
".",
"name",
")",
")",
"self",
".",
"term_signal_sent",
"=",
"True",
"# Politely ask all child processes to die first",
"try",
":",
"for",
"childproc",
"in",
"psutil",
".",
"Process",
"(",
"self",
".",
"process",
".",
"pid",
")",
".",
"children",
"(",
"recursive",
"=",
"True",
")",
":",
"childproc",
".",
"send_signal",
"(",
"signal",
".",
"SIGINT",
")",
"except",
"psutil",
".",
"NoSuchProcess",
":",
"pass",
"except",
"AttributeError",
":",
"pass",
"try",
":",
"self",
".",
"process",
".",
"send_signal",
"(",
"self",
".",
"service",
".",
"stop_signal",
")",
"except",
"OSError",
"as",
"e",
":",
"if",
"e",
".",
"errno",
"==",
"3",
":",
"# No such process",
"pass",
"else",
":",
"self",
".",
"bundle_engine",
".",
"warnline",
"(",
"\"{0} stopped prematurely.\"",
".",
"format",
"(",
"self",
".",
"service",
".",
"name",
")",
")",
"else",
":",
"self",
".",
"bundle_engine",
".",
"warnline",
"(",
"\"{0} stopped prematurely.\"",
".",
"format",
"(",
"self",
".",
"service",
".",
"name",
")",
")",
"else",
":",
"self",
".",
"bundle_engine",
".",
"warnline",
"(",
"\"{0} was never successfully started.\"",
".",
"format",
"(",
"self",
".",
"service",
".",
"name",
")",
")"
] | Ask politely, first, with SIGINT and SIGQUIT. | [
"Ask",
"politely",
"first",
"with",
"SIGINT",
"and",
"SIGQUIT",
"."
] | a2def19979264186d283e76f7f0c88f3ed97f2e0 | https://github.com/hitchtest/hitchserve/blob/a2def19979264186d283e76f7f0c88f3ed97f2e0/hitchserve/service_handle.py#L101-L133 |
249,415 | hitchtest/hitchserve | hitchserve/service_handle.py | ServiceHandle.kill | def kill(self):
"""Murder the children of this service in front of it, and then murder the service itself."""
if not self.is_dead():
self.bundle_engine.warnline("{0} did not shut down cleanly, killing.".format(self.service.name))
try:
if hasattr(self.process, 'pid'):
for child in psutil.Process(self.process.pid).children(recursive=True):
os.kill(child.pid, signal.SIGKILL)
self.process.kill()
except psutil.NoSuchProcess:
pass | python | def kill(self):
"""Murder the children of this service in front of it, and then murder the service itself."""
if not self.is_dead():
self.bundle_engine.warnline("{0} did not shut down cleanly, killing.".format(self.service.name))
try:
if hasattr(self.process, 'pid'):
for child in psutil.Process(self.process.pid).children(recursive=True):
os.kill(child.pid, signal.SIGKILL)
self.process.kill()
except psutil.NoSuchProcess:
pass | [
"def",
"kill",
"(",
"self",
")",
":",
"if",
"not",
"self",
".",
"is_dead",
"(",
")",
":",
"self",
".",
"bundle_engine",
".",
"warnline",
"(",
"\"{0} did not shut down cleanly, killing.\"",
".",
"format",
"(",
"self",
".",
"service",
".",
"name",
")",
")",
"try",
":",
"if",
"hasattr",
"(",
"self",
".",
"process",
",",
"'pid'",
")",
":",
"for",
"child",
"in",
"psutil",
".",
"Process",
"(",
"self",
".",
"process",
".",
"pid",
")",
".",
"children",
"(",
"recursive",
"=",
"True",
")",
":",
"os",
".",
"kill",
"(",
"child",
".",
"pid",
",",
"signal",
".",
"SIGKILL",
")",
"self",
".",
"process",
".",
"kill",
"(",
")",
"except",
"psutil",
".",
"NoSuchProcess",
":",
"pass"
] | Murder the children of this service in front of it, and then murder the service itself. | [
"Murder",
"the",
"children",
"of",
"this",
"service",
"in",
"front",
"of",
"it",
"and",
"then",
"murder",
"the",
"service",
"itself",
"."
] | a2def19979264186d283e76f7f0c88f3ed97f2e0 | https://github.com/hitchtest/hitchserve/blob/a2def19979264186d283e76f7f0c88f3ed97f2e0/hitchserve/service_handle.py#L144-L154 |
249,416 | abe-winter/pg13-py | pg13/pgmock.py | TablesDict.cascade_delete | def cascade_delete(self, name):
"this fails under diamond inheritance"
for child in self[name].child_tables:
self.cascade_delete(child.name)
del self[name] | python | def cascade_delete(self, name):
"this fails under diamond inheritance"
for child in self[name].child_tables:
self.cascade_delete(child.name)
del self[name] | [
"def",
"cascade_delete",
"(",
"self",
",",
"name",
")",
":",
"for",
"child",
"in",
"self",
"[",
"name",
"]",
".",
"child_tables",
":",
"self",
".",
"cascade_delete",
"(",
"child",
".",
"name",
")",
"del",
"self",
"[",
"name",
"]"
] | this fails under diamond inheritance | [
"this",
"fails",
"under",
"diamond",
"inheritance"
] | c78806f99f35541a8756987e86edca3438aa97f5 | https://github.com/abe-winter/pg13-py/blob/c78806f99f35541a8756987e86edca3438aa97f5/pg13/pgmock.py#L66-L70 |
249,417 | abe-winter/pg13-py | pg13/pgmock.py | TablesDict.create | def create(self, ex):
"helper for apply_sql in CreateX case"
if ex.name in self:
if ex.nexists: return
raise ValueError('table_exists',ex.name)
if any(c.pkey for c in ex.cols):
if ex.pkey:
raise sqparse2.SQLSyntaxError("don't mix table-level and column-level pkeys",ex)
# todo(spec): is multi pkey permitted when defined per column?
ex.pkey = sqparse2.PKeyX([c.name for c in ex.cols if c.pkey])
if ex.inherits:
# todo: what if child table specifies constraints etc? this needs work.
if len(ex.inherits) > 1: raise NotImplementedError('todo: multi-table inherit')
parent = self[ex.inherits[0]] = copy.deepcopy(self[ex.inherits[0]]) # copy so rollback works
child = self[ex.name] = table.Table(ex.name, parent.fields, parent.pkey)
parent.child_tables.append(child)
child.parent_table = parent
else:
self[ex.name]=table.Table(ex.name,ex.cols,ex.pkey.fields if ex.pkey else []) | python | def create(self, ex):
"helper for apply_sql in CreateX case"
if ex.name in self:
if ex.nexists: return
raise ValueError('table_exists',ex.name)
if any(c.pkey for c in ex.cols):
if ex.pkey:
raise sqparse2.SQLSyntaxError("don't mix table-level and column-level pkeys",ex)
# todo(spec): is multi pkey permitted when defined per column?
ex.pkey = sqparse2.PKeyX([c.name for c in ex.cols if c.pkey])
if ex.inherits:
# todo: what if child table specifies constraints etc? this needs work.
if len(ex.inherits) > 1: raise NotImplementedError('todo: multi-table inherit')
parent = self[ex.inherits[0]] = copy.deepcopy(self[ex.inherits[0]]) # copy so rollback works
child = self[ex.name] = table.Table(ex.name, parent.fields, parent.pkey)
parent.child_tables.append(child)
child.parent_table = parent
else:
self[ex.name]=table.Table(ex.name,ex.cols,ex.pkey.fields if ex.pkey else []) | [
"def",
"create",
"(",
"self",
",",
"ex",
")",
":",
"if",
"ex",
".",
"name",
"in",
"self",
":",
"if",
"ex",
".",
"nexists",
":",
"return",
"raise",
"ValueError",
"(",
"'table_exists'",
",",
"ex",
".",
"name",
")",
"if",
"any",
"(",
"c",
".",
"pkey",
"for",
"c",
"in",
"ex",
".",
"cols",
")",
":",
"if",
"ex",
".",
"pkey",
":",
"raise",
"sqparse2",
".",
"SQLSyntaxError",
"(",
"\"don't mix table-level and column-level pkeys\"",
",",
"ex",
")",
"# todo(spec): is multi pkey permitted when defined per column?\r",
"ex",
".",
"pkey",
"=",
"sqparse2",
".",
"PKeyX",
"(",
"[",
"c",
".",
"name",
"for",
"c",
"in",
"ex",
".",
"cols",
"if",
"c",
".",
"pkey",
"]",
")",
"if",
"ex",
".",
"inherits",
":",
"# todo: what if child table specifies constraints etc? this needs work.\r",
"if",
"len",
"(",
"ex",
".",
"inherits",
")",
">",
"1",
":",
"raise",
"NotImplementedError",
"(",
"'todo: multi-table inherit'",
")",
"parent",
"=",
"self",
"[",
"ex",
".",
"inherits",
"[",
"0",
"]",
"]",
"=",
"copy",
".",
"deepcopy",
"(",
"self",
"[",
"ex",
".",
"inherits",
"[",
"0",
"]",
"]",
")",
"# copy so rollback works\r",
"child",
"=",
"self",
"[",
"ex",
".",
"name",
"]",
"=",
"table",
".",
"Table",
"(",
"ex",
".",
"name",
",",
"parent",
".",
"fields",
",",
"parent",
".",
"pkey",
")",
"parent",
".",
"child_tables",
".",
"append",
"(",
"child",
")",
"child",
".",
"parent_table",
"=",
"parent",
"else",
":",
"self",
"[",
"ex",
".",
"name",
"]",
"=",
"table",
".",
"Table",
"(",
"ex",
".",
"name",
",",
"ex",
".",
"cols",
",",
"ex",
".",
"pkey",
".",
"fields",
"if",
"ex",
".",
"pkey",
"else",
"[",
"]",
")"
] | helper for apply_sql in CreateX case | [
"helper",
"for",
"apply_sql",
"in",
"CreateX",
"case"
] | c78806f99f35541a8756987e86edca3438aa97f5 | https://github.com/abe-winter/pg13-py/blob/c78806f99f35541a8756987e86edca3438aa97f5/pg13/pgmock.py#L72-L90 |
249,418 | abe-winter/pg13-py | pg13/pgmock.py | TablesDict.drop | def drop(self, ex):
"helper for apply_sql in DropX case"
# todo: factor out inheritance logic (for readability)
if ex.name not in self:
if ex.ifexists: return
raise KeyError(ex.name)
table_ = self[ex.name]
parent = table_.parent_table
if table_.child_tables:
if not ex.cascade:
raise table.IntegrityError('delete_parent_without_cascade',ex.name)
self.cascade_delete(ex.name)
else: del self[ex.name]
if parent: parent.child_tables.remove(table_) | python | def drop(self, ex):
"helper for apply_sql in DropX case"
# todo: factor out inheritance logic (for readability)
if ex.name not in self:
if ex.ifexists: return
raise KeyError(ex.name)
table_ = self[ex.name]
parent = table_.parent_table
if table_.child_tables:
if not ex.cascade:
raise table.IntegrityError('delete_parent_without_cascade',ex.name)
self.cascade_delete(ex.name)
else: del self[ex.name]
if parent: parent.child_tables.remove(table_) | [
"def",
"drop",
"(",
"self",
",",
"ex",
")",
":",
"# todo: factor out inheritance logic (for readability)\r",
"if",
"ex",
".",
"name",
"not",
"in",
"self",
":",
"if",
"ex",
".",
"ifexists",
":",
"return",
"raise",
"KeyError",
"(",
"ex",
".",
"name",
")",
"table_",
"=",
"self",
"[",
"ex",
".",
"name",
"]",
"parent",
"=",
"table_",
".",
"parent_table",
"if",
"table_",
".",
"child_tables",
":",
"if",
"not",
"ex",
".",
"cascade",
":",
"raise",
"table",
".",
"IntegrityError",
"(",
"'delete_parent_without_cascade'",
",",
"ex",
".",
"name",
")",
"self",
".",
"cascade_delete",
"(",
"ex",
".",
"name",
")",
"else",
":",
"del",
"self",
"[",
"ex",
".",
"name",
"]",
"if",
"parent",
":",
"parent",
".",
"child_tables",
".",
"remove",
"(",
"table_",
")"
] | helper for apply_sql in DropX case | [
"helper",
"for",
"apply_sql",
"in",
"DropX",
"case"
] | c78806f99f35541a8756987e86edca3438aa97f5 | https://github.com/abe-winter/pg13-py/blob/c78806f99f35541a8756987e86edca3438aa97f5/pg13/pgmock.py#L92-L105 |
249,419 | amcfague/webunit2 | webunit2/framework.py | Framework._prepare_uri | def _prepare_uri(self, path, query_params={}):
"""
Prepares a full URI with the selected information.
``path``:
Path can be in one of two formats:
- If :attr:`server` was defined, the ``path`` will be appended
to the existing host, or
- an absolute URL
``query_params``:
Used to generate a query string, which will be appended to the end
of the absolute URL.
Returns an absolute URL.
"""
query_str = urllib.urlencode(query_params)
# If we have a relative path (as opposed to a full URL), build it of
# the connection info
if path.startswith('/') and self.server:
protocol = self.protocol
server = self.server
else:
protocol, server, path, _, _, _ = urlparse.urlparse(path)
assert server, "%s is not a valid URL" % path
return urlparse.urlunparse((
protocol, server, path, None, query_str, None)) | python | def _prepare_uri(self, path, query_params={}):
"""
Prepares a full URI with the selected information.
``path``:
Path can be in one of two formats:
- If :attr:`server` was defined, the ``path`` will be appended
to the existing host, or
- an absolute URL
``query_params``:
Used to generate a query string, which will be appended to the end
of the absolute URL.
Returns an absolute URL.
"""
query_str = urllib.urlencode(query_params)
# If we have a relative path (as opposed to a full URL), build it of
# the connection info
if path.startswith('/') and self.server:
protocol = self.protocol
server = self.server
else:
protocol, server, path, _, _, _ = urlparse.urlparse(path)
assert server, "%s is not a valid URL" % path
return urlparse.urlunparse((
protocol, server, path, None, query_str, None)) | [
"def",
"_prepare_uri",
"(",
"self",
",",
"path",
",",
"query_params",
"=",
"{",
"}",
")",
":",
"query_str",
"=",
"urllib",
".",
"urlencode",
"(",
"query_params",
")",
"# If we have a relative path (as opposed to a full URL), build it of",
"# the connection info",
"if",
"path",
".",
"startswith",
"(",
"'/'",
")",
"and",
"self",
".",
"server",
":",
"protocol",
"=",
"self",
".",
"protocol",
"server",
"=",
"self",
".",
"server",
"else",
":",
"protocol",
",",
"server",
",",
"path",
",",
"_",
",",
"_",
",",
"_",
"=",
"urlparse",
".",
"urlparse",
"(",
"path",
")",
"assert",
"server",
",",
"\"%s is not a valid URL\"",
"%",
"path",
"return",
"urlparse",
".",
"urlunparse",
"(",
"(",
"protocol",
",",
"server",
",",
"path",
",",
"None",
",",
"query_str",
",",
"None",
")",
")"
] | Prepares a full URI with the selected information.
``path``:
Path can be in one of two formats:
- If :attr:`server` was defined, the ``path`` will be appended
to the existing host, or
- an absolute URL
``query_params``:
Used to generate a query string, which will be appended to the end
of the absolute URL.
Returns an absolute URL. | [
"Prepares",
"a",
"full",
"URI",
"with",
"the",
"selected",
"information",
"."
] | 3157e5837aad0810800628c1383f1fe11ee3e513 | https://github.com/amcfague/webunit2/blob/3157e5837aad0810800628c1383f1fe11ee3e513/webunit2/framework.py#L69-L97 |
249,420 | amcfague/webunit2 | webunit2/framework.py | Framework.retrieve_page | def retrieve_page(self, method, path, post_params={}, headers={},
status=200, username=None, password=None,
*args, **kwargs):
"""
Makes the actual request. This will also go through and generate the
needed steps to make the request, i.e. basic auth.
``method``:
Any supported HTTP methods defined in :rfc:`2616`.
``path``:
Absolute or relative path. See :meth:`_prepare_uri` for more
detail.
``post_params``:
Dictionary of key/value pairs to be added as `POST` parameters.
``headers``:
Dictionary of key/value pairs to be added to the HTTP headers.
``status``:
Will error out if the HTTP status code does not match this value.
Set this to `None` to disable checking.
``username``, ``password``:
Username and password for basic auth; see
:meth:`_prepare_basicauth` for more detail.
An important note is that when ``post_params`` is specified, its
behavior depends on the ``method``. That is, for `PUT` and `POST`
requests, the dictionary is multipart encoded and put into the body of
the request. For everything else, it is added as a query string to the
URL.
"""
# Copy headers so that making changes here won't affect the original
headers = headers.copy()
# Update basic auth information
basicauth = self._prepare_basicauth(username, password)
if basicauth:
headers.update([basicauth])
# If this is a POST or PUT, we can put the data into the body as
# form-data encoded; otherwise, it should be part of the query string.
if method in ["PUT", "POST"]:
datagen, form_hdrs = poster.encode.multipart_encode(post_params)
body = "".join(datagen)
headers.update(form_hdrs)
uri = self._prepare_uri(path)
else:
body = ""
uri = self._prepare_uri(path, post_params)
# Make the actual request
response = self._make_request(uri, method, body, headers)
# Assert that the status we received was expected.
if status:
real_status = int(response.status_int)
assert real_status == int(status), \
"expected %s, received %s." % (status, real_status)
return response | python | def retrieve_page(self, method, path, post_params={}, headers={},
status=200, username=None, password=None,
*args, **kwargs):
"""
Makes the actual request. This will also go through and generate the
needed steps to make the request, i.e. basic auth.
``method``:
Any supported HTTP methods defined in :rfc:`2616`.
``path``:
Absolute or relative path. See :meth:`_prepare_uri` for more
detail.
``post_params``:
Dictionary of key/value pairs to be added as `POST` parameters.
``headers``:
Dictionary of key/value pairs to be added to the HTTP headers.
``status``:
Will error out if the HTTP status code does not match this value.
Set this to `None` to disable checking.
``username``, ``password``:
Username and password for basic auth; see
:meth:`_prepare_basicauth` for more detail.
An important note is that when ``post_params`` is specified, its
behavior depends on the ``method``. That is, for `PUT` and `POST`
requests, the dictionary is multipart encoded and put into the body of
the request. For everything else, it is added as a query string to the
URL.
"""
# Copy headers so that making changes here won't affect the original
headers = headers.copy()
# Update basic auth information
basicauth = self._prepare_basicauth(username, password)
if basicauth:
headers.update([basicauth])
# If this is a POST or PUT, we can put the data into the body as
# form-data encoded; otherwise, it should be part of the query string.
if method in ["PUT", "POST"]:
datagen, form_hdrs = poster.encode.multipart_encode(post_params)
body = "".join(datagen)
headers.update(form_hdrs)
uri = self._prepare_uri(path)
else:
body = ""
uri = self._prepare_uri(path, post_params)
# Make the actual request
response = self._make_request(uri, method, body, headers)
# Assert that the status we received was expected.
if status:
real_status = int(response.status_int)
assert real_status == int(status), \
"expected %s, received %s." % (status, real_status)
return response | [
"def",
"retrieve_page",
"(",
"self",
",",
"method",
",",
"path",
",",
"post_params",
"=",
"{",
"}",
",",
"headers",
"=",
"{",
"}",
",",
"status",
"=",
"200",
",",
"username",
"=",
"None",
",",
"password",
"=",
"None",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"# Copy headers so that making changes here won't affect the original",
"headers",
"=",
"headers",
".",
"copy",
"(",
")",
"# Update basic auth information",
"basicauth",
"=",
"self",
".",
"_prepare_basicauth",
"(",
"username",
",",
"password",
")",
"if",
"basicauth",
":",
"headers",
".",
"update",
"(",
"[",
"basicauth",
"]",
")",
"# If this is a POST or PUT, we can put the data into the body as",
"# form-data encoded; otherwise, it should be part of the query string.",
"if",
"method",
"in",
"[",
"\"PUT\"",
",",
"\"POST\"",
"]",
":",
"datagen",
",",
"form_hdrs",
"=",
"poster",
".",
"encode",
".",
"multipart_encode",
"(",
"post_params",
")",
"body",
"=",
"\"\"",
".",
"join",
"(",
"datagen",
")",
"headers",
".",
"update",
"(",
"form_hdrs",
")",
"uri",
"=",
"self",
".",
"_prepare_uri",
"(",
"path",
")",
"else",
":",
"body",
"=",
"\"\"",
"uri",
"=",
"self",
".",
"_prepare_uri",
"(",
"path",
",",
"post_params",
")",
"# Make the actual request",
"response",
"=",
"self",
".",
"_make_request",
"(",
"uri",
",",
"method",
",",
"body",
",",
"headers",
")",
"# Assert that the status we received was expected.",
"if",
"status",
":",
"real_status",
"=",
"int",
"(",
"response",
".",
"status_int",
")",
"assert",
"real_status",
"==",
"int",
"(",
"status",
")",
",",
"\"expected %s, received %s.\"",
"%",
"(",
"status",
",",
"real_status",
")",
"return",
"response"
] | Makes the actual request. This will also go through and generate the
needed steps to make the request, i.e. basic auth.
``method``:
Any supported HTTP methods defined in :rfc:`2616`.
``path``:
Absolute or relative path. See :meth:`_prepare_uri` for more
detail.
``post_params``:
Dictionary of key/value pairs to be added as `POST` parameters.
``headers``:
Dictionary of key/value pairs to be added to the HTTP headers.
``status``:
Will error out if the HTTP status code does not match this value.
Set this to `None` to disable checking.
``username``, ``password``:
Username and password for basic auth; see
:meth:`_prepare_basicauth` for more detail.
An important note is that when ``post_params`` is specified, its
behavior depends on the ``method``. That is, for `PUT` and `POST`
requests, the dictionary is multipart encoded and put into the body of
the request. For everything else, it is added as a query string to the
URL. | [
"Makes",
"the",
"actual",
"request",
".",
"This",
"will",
"also",
"go",
"through",
"and",
"generate",
"the",
"needed",
"steps",
"to",
"make",
"the",
"request",
"i",
".",
"e",
".",
"basic",
"auth",
"."
] | 3157e5837aad0810800628c1383f1fe11ee3e513 | https://github.com/amcfague/webunit2/blob/3157e5837aad0810800628c1383f1fe11ee3e513/webunit2/framework.py#L122-L179 |
249,421 | ojake/django-tracked-model | tracked_model/control.py | create_track_token | def create_track_token(request):
"""Returns ``TrackToken``.
``TrackToken' contains request and user making changes.
It can be passed to ``TrackedModel.save`` instead of ``request``.
It is intended to be used when passing ``request`` is not possible
e.g. when ``TrackedModel.save`` will be called from celery task.
"""
from tracked_model.models import RequestInfo
request_pk = RequestInfo.create_or_get_from_request(request).pk
user_pk = None
if request.user.is_authenticated():
user_pk = request.user.pk
return TrackToken(request_pk=request_pk, user_pk=user_pk) | python | def create_track_token(request):
"""Returns ``TrackToken``.
``TrackToken' contains request and user making changes.
It can be passed to ``TrackedModel.save`` instead of ``request``.
It is intended to be used when passing ``request`` is not possible
e.g. when ``TrackedModel.save`` will be called from celery task.
"""
from tracked_model.models import RequestInfo
request_pk = RequestInfo.create_or_get_from_request(request).pk
user_pk = None
if request.user.is_authenticated():
user_pk = request.user.pk
return TrackToken(request_pk=request_pk, user_pk=user_pk) | [
"def",
"create_track_token",
"(",
"request",
")",
":",
"from",
"tracked_model",
".",
"models",
"import",
"RequestInfo",
"request_pk",
"=",
"RequestInfo",
".",
"create_or_get_from_request",
"(",
"request",
")",
".",
"pk",
"user_pk",
"=",
"None",
"if",
"request",
".",
"user",
".",
"is_authenticated",
"(",
")",
":",
"user_pk",
"=",
"request",
".",
"user",
".",
"pk",
"return",
"TrackToken",
"(",
"request_pk",
"=",
"request_pk",
",",
"user_pk",
"=",
"user_pk",
")"
] | Returns ``TrackToken``.
``TrackToken' contains request and user making changes.
It can be passed to ``TrackedModel.save`` instead of ``request``.
It is intended to be used when passing ``request`` is not possible
e.g. when ``TrackedModel.save`` will be called from celery task. | [
"Returns",
"TrackToken",
".",
"TrackToken",
"contains",
"request",
"and",
"user",
"making",
"changes",
"."
] | 19bc48874dd2e5fb5defedc6b8c5c3915cce1424 | https://github.com/ojake/django-tracked-model/blob/19bc48874dd2e5fb5defedc6b8c5c3915cce1424/tracked_model/control.py#L7-L21 |
249,422 | ojake/django-tracked-model | tracked_model/control.py | TrackedModelMixin.save | def save(self, *args, **kwargs):
"""Saves changes made on model instance if ``request`` or
``track_token`` keyword are provided.
"""
from tracked_model.models import History, RequestInfo
if self.pk:
action = ActionType.UPDATE
changes = None
else:
action = ActionType.CREATE
changes = serializer.dump_model(self)
request = kwargs.pop('request', None)
track_token = kwargs.pop('track_token', None)
super().save(*args, **kwargs)
if not changes:
changes = self._tracked_model_diff()
if changes:
hist = History()
hist.model_name = self._meta.model.__name__
hist.app_label = self._meta.app_label
hist.table_name = self._meta.db_table
hist.table_id = self.pk
hist.change_log = serializer.to_json(changes)
hist.action_type = action
if request:
if request.user.is_authenticated():
hist.revision_author = request.user
req_info = RequestInfo.create_or_get_from_request(request)
hist.revision_request = req_info
elif track_token:
hist.revision_author_id = track_token.user_pk
hist.revision_request_id = track_token.request_pk
hist.save()
self._tracked_model_initial_state = serializer.dump_model(self) | python | def save(self, *args, **kwargs):
"""Saves changes made on model instance if ``request`` or
``track_token`` keyword are provided.
"""
from tracked_model.models import History, RequestInfo
if self.pk:
action = ActionType.UPDATE
changes = None
else:
action = ActionType.CREATE
changes = serializer.dump_model(self)
request = kwargs.pop('request', None)
track_token = kwargs.pop('track_token', None)
super().save(*args, **kwargs)
if not changes:
changes = self._tracked_model_diff()
if changes:
hist = History()
hist.model_name = self._meta.model.__name__
hist.app_label = self._meta.app_label
hist.table_name = self._meta.db_table
hist.table_id = self.pk
hist.change_log = serializer.to_json(changes)
hist.action_type = action
if request:
if request.user.is_authenticated():
hist.revision_author = request.user
req_info = RequestInfo.create_or_get_from_request(request)
hist.revision_request = req_info
elif track_token:
hist.revision_author_id = track_token.user_pk
hist.revision_request_id = track_token.request_pk
hist.save()
self._tracked_model_initial_state = serializer.dump_model(self) | [
"def",
"save",
"(",
"self",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"from",
"tracked_model",
".",
"models",
"import",
"History",
",",
"RequestInfo",
"if",
"self",
".",
"pk",
":",
"action",
"=",
"ActionType",
".",
"UPDATE",
"changes",
"=",
"None",
"else",
":",
"action",
"=",
"ActionType",
".",
"CREATE",
"changes",
"=",
"serializer",
".",
"dump_model",
"(",
"self",
")",
"request",
"=",
"kwargs",
".",
"pop",
"(",
"'request'",
",",
"None",
")",
"track_token",
"=",
"kwargs",
".",
"pop",
"(",
"'track_token'",
",",
"None",
")",
"super",
"(",
")",
".",
"save",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
"if",
"not",
"changes",
":",
"changes",
"=",
"self",
".",
"_tracked_model_diff",
"(",
")",
"if",
"changes",
":",
"hist",
"=",
"History",
"(",
")",
"hist",
".",
"model_name",
"=",
"self",
".",
"_meta",
".",
"model",
".",
"__name__",
"hist",
".",
"app_label",
"=",
"self",
".",
"_meta",
".",
"app_label",
"hist",
".",
"table_name",
"=",
"self",
".",
"_meta",
".",
"db_table",
"hist",
".",
"table_id",
"=",
"self",
".",
"pk",
"hist",
".",
"change_log",
"=",
"serializer",
".",
"to_json",
"(",
"changes",
")",
"hist",
".",
"action_type",
"=",
"action",
"if",
"request",
":",
"if",
"request",
".",
"user",
".",
"is_authenticated",
"(",
")",
":",
"hist",
".",
"revision_author",
"=",
"request",
".",
"user",
"req_info",
"=",
"RequestInfo",
".",
"create_or_get_from_request",
"(",
"request",
")",
"hist",
".",
"revision_request",
"=",
"req_info",
"elif",
"track_token",
":",
"hist",
".",
"revision_author_id",
"=",
"track_token",
".",
"user_pk",
"hist",
".",
"revision_request_id",
"=",
"track_token",
".",
"request_pk",
"hist",
".",
"save",
"(",
")",
"self",
".",
"_tracked_model_initial_state",
"=",
"serializer",
".",
"dump_model",
"(",
"self",
")"
] | Saves changes made on model instance if ``request`` or
``track_token`` keyword are provided. | [
"Saves",
"changes",
"made",
"on",
"model",
"instance",
"if",
"request",
"or",
"track_token",
"keyword",
"are",
"provided",
"."
] | 19bc48874dd2e5fb5defedc6b8c5c3915cce1424 | https://github.com/ojake/django-tracked-model/blob/19bc48874dd2e5fb5defedc6b8c5c3915cce1424/tracked_model/control.py#L38-L76 |
249,423 | ojake/django-tracked-model | tracked_model/control.py | TrackedModelMixin._tracked_model_diff | def _tracked_model_diff(self):
"""Returns changes made to model instance.
Returns None if no changes were made.
"""
initial_state = self._tracked_model_initial_state
current_state = serializer.dump_model(self)
if current_state == initial_state:
return None
change_log = {}
for field in initial_state:
old_value = initial_state[field][Field.VALUE]
new_value = current_state[field][Field.VALUE]
if old_value == new_value:
continue
field_data = initial_state.copy()[field]
del field_data[Field.VALUE]
field_data[Field.OLD] = old_value
field_data[Field.NEW] = new_value
change_log[field] = field_data
return change_log or None | python | def _tracked_model_diff(self):
"""Returns changes made to model instance.
Returns None if no changes were made.
"""
initial_state = self._tracked_model_initial_state
current_state = serializer.dump_model(self)
if current_state == initial_state:
return None
change_log = {}
for field in initial_state:
old_value = initial_state[field][Field.VALUE]
new_value = current_state[field][Field.VALUE]
if old_value == new_value:
continue
field_data = initial_state.copy()[field]
del field_data[Field.VALUE]
field_data[Field.OLD] = old_value
field_data[Field.NEW] = new_value
change_log[field] = field_data
return change_log or None | [
"def",
"_tracked_model_diff",
"(",
"self",
")",
":",
"initial_state",
"=",
"self",
".",
"_tracked_model_initial_state",
"current_state",
"=",
"serializer",
".",
"dump_model",
"(",
"self",
")",
"if",
"current_state",
"==",
"initial_state",
":",
"return",
"None",
"change_log",
"=",
"{",
"}",
"for",
"field",
"in",
"initial_state",
":",
"old_value",
"=",
"initial_state",
"[",
"field",
"]",
"[",
"Field",
".",
"VALUE",
"]",
"new_value",
"=",
"current_state",
"[",
"field",
"]",
"[",
"Field",
".",
"VALUE",
"]",
"if",
"old_value",
"==",
"new_value",
":",
"continue",
"field_data",
"=",
"initial_state",
".",
"copy",
"(",
")",
"[",
"field",
"]",
"del",
"field_data",
"[",
"Field",
".",
"VALUE",
"]",
"field_data",
"[",
"Field",
".",
"OLD",
"]",
"=",
"old_value",
"field_data",
"[",
"Field",
".",
"NEW",
"]",
"=",
"new_value",
"change_log",
"[",
"field",
"]",
"=",
"field_data",
"return",
"change_log",
"or",
"None"
] | Returns changes made to model instance.
Returns None if no changes were made. | [
"Returns",
"changes",
"made",
"to",
"model",
"instance",
".",
"Returns",
"None",
"if",
"no",
"changes",
"were",
"made",
"."
] | 19bc48874dd2e5fb5defedc6b8c5c3915cce1424 | https://github.com/ojake/django-tracked-model/blob/19bc48874dd2e5fb5defedc6b8c5c3915cce1424/tracked_model/control.py#L103-L124 |
249,424 | ojake/django-tracked-model | tracked_model/control.py | TrackedModelMixin.tracked_model_history | def tracked_model_history(self):
"""Returns history of a tracked object"""
from tracked_model.models import History
return History.objects.filter(
table_name=self._meta.db_table, table_id=self.pk) | python | def tracked_model_history(self):
"""Returns history of a tracked object"""
from tracked_model.models import History
return History.objects.filter(
table_name=self._meta.db_table, table_id=self.pk) | [
"def",
"tracked_model_history",
"(",
"self",
")",
":",
"from",
"tracked_model",
".",
"models",
"import",
"History",
"return",
"History",
".",
"objects",
".",
"filter",
"(",
"table_name",
"=",
"self",
".",
"_meta",
".",
"db_table",
",",
"table_id",
"=",
"self",
".",
"pk",
")"
] | Returns history of a tracked object | [
"Returns",
"history",
"of",
"a",
"tracked",
"object"
] | 19bc48874dd2e5fb5defedc6b8c5c3915cce1424 | https://github.com/ojake/django-tracked-model/blob/19bc48874dd2e5fb5defedc6b8c5c3915cce1424/tracked_model/control.py#L126-L130 |
249,425 | foliant-docs/foliantcontrib.init | foliant/cli/init/__init__.py | replace_placeholders | def replace_placeholders(path: Path, properties: Dict[str, str]):
'''Replace placeholders in a file with the values from the mapping.'''
with open(path, encoding='utf8') as file:
file_content = Template(file.read())
with open(path, 'w', encoding='utf8') as file:
file.write(file_content.safe_substitute(properties)) | python | def replace_placeholders(path: Path, properties: Dict[str, str]):
'''Replace placeholders in a file with the values from the mapping.'''
with open(path, encoding='utf8') as file:
file_content = Template(file.read())
with open(path, 'w', encoding='utf8') as file:
file.write(file_content.safe_substitute(properties)) | [
"def",
"replace_placeholders",
"(",
"path",
":",
"Path",
",",
"properties",
":",
"Dict",
"[",
"str",
",",
"str",
"]",
")",
":",
"with",
"open",
"(",
"path",
",",
"encoding",
"=",
"'utf8'",
")",
"as",
"file",
":",
"file_content",
"=",
"Template",
"(",
"file",
".",
"read",
"(",
")",
")",
"with",
"open",
"(",
"path",
",",
"'w'",
",",
"encoding",
"=",
"'utf8'",
")",
"as",
"file",
":",
"file",
".",
"write",
"(",
"file_content",
".",
"safe_substitute",
"(",
"properties",
")",
")"
] | Replace placeholders in a file with the values from the mapping. | [
"Replace",
"placeholders",
"in",
"a",
"file",
"with",
"the",
"values",
"from",
"the",
"mapping",
"."
] | 39aa38949b6270a750c800b79b4e71dd827f28d8 | https://github.com/foliant-docs/foliantcontrib.init/blob/39aa38949b6270a750c800b79b4e71dd827f28d8/foliant/cli/init/__init__.py#L41-L48 |
249,426 | foliant-docs/foliantcontrib.init | foliant/cli/init/__init__.py | BuiltinTemplateValidator.validate | def validate(self, document):
'''Check if the selected template exists.'''
template = document.text
if template not in self.builtin_templates:
raise ValidationError(
message=f'Template {template} not found. '
+ f'Available templates are: {", ".join(self.builtin_templates)}.',
cursor_position=0
) | python | def validate(self, document):
'''Check if the selected template exists.'''
template = document.text
if template not in self.builtin_templates:
raise ValidationError(
message=f'Template {template} not found. '
+ f'Available templates are: {", ".join(self.builtin_templates)}.',
cursor_position=0
) | [
"def",
"validate",
"(",
"self",
",",
"document",
")",
":",
"template",
"=",
"document",
".",
"text",
"if",
"template",
"not",
"in",
"self",
".",
"builtin_templates",
":",
"raise",
"ValidationError",
"(",
"message",
"=",
"f'Template {template} not found. '",
"+",
"f'Available templates are: {\", \".join(self.builtin_templates)}.'",
",",
"cursor_position",
"=",
"0",
")"
] | Check if the selected template exists. | [
"Check",
"if",
"the",
"selected",
"template",
"exists",
"."
] | 39aa38949b6270a750c800b79b4e71dd827f28d8 | https://github.com/foliant-docs/foliantcontrib.init/blob/39aa38949b6270a750c800b79b4e71dd827f28d8/foliant/cli/init/__init__.py#L28-L38 |
249,427 | aptiko/simpletail | simpletail/__init__.py | ropen.get_start_of_line | def get_start_of_line(self):
"""Return index of start of last line stored in self.buf.
This function never fetches more data from the file; therefore,
if it returns zero, meaning the line starts at the beginning of the
buffer, the caller should then fetch more data and retry.
"""
if self.newline in ('\r', '\n', '\r\n'):
return self.buf.rfind(self.newline.encode('ascii'), 0, -1) + 1
if self.newline:
raise ValueError(r"ropen newline argument must be one of "
r"None, '', '\r', '\n', '\r\n'.")
# self.newline is None or ''; universal newlines mode
end_of_search = -1
if len(self.buf) >= 2 and self.buf[-2:] == b'\r\n':
end_of_search = -2
return max(self.buf.rfind(b'\n', 0, end_of_search),
self.buf.rfind(b'\r', 0, end_of_search)) + 1 | python | def get_start_of_line(self):
"""Return index of start of last line stored in self.buf.
This function never fetches more data from the file; therefore,
if it returns zero, meaning the line starts at the beginning of the
buffer, the caller should then fetch more data and retry.
"""
if self.newline in ('\r', '\n', '\r\n'):
return self.buf.rfind(self.newline.encode('ascii'), 0, -1) + 1
if self.newline:
raise ValueError(r"ropen newline argument must be one of "
r"None, '', '\r', '\n', '\r\n'.")
# self.newline is None or ''; universal newlines mode
end_of_search = -1
if len(self.buf) >= 2 and self.buf[-2:] == b'\r\n':
end_of_search = -2
return max(self.buf.rfind(b'\n', 0, end_of_search),
self.buf.rfind(b'\r', 0, end_of_search)) + 1 | [
"def",
"get_start_of_line",
"(",
"self",
")",
":",
"if",
"self",
".",
"newline",
"in",
"(",
"'\\r'",
",",
"'\\n'",
",",
"'\\r\\n'",
")",
":",
"return",
"self",
".",
"buf",
".",
"rfind",
"(",
"self",
".",
"newline",
".",
"encode",
"(",
"'ascii'",
")",
",",
"0",
",",
"-",
"1",
")",
"+",
"1",
"if",
"self",
".",
"newline",
":",
"raise",
"ValueError",
"(",
"r\"ropen newline argument must be one of \"",
"r\"None, '', '\\r', '\\n', '\\r\\n'.\"",
")",
"# self.newline is None or ''; universal newlines mode",
"end_of_search",
"=",
"-",
"1",
"if",
"len",
"(",
"self",
".",
"buf",
")",
">=",
"2",
"and",
"self",
".",
"buf",
"[",
"-",
"2",
":",
"]",
"==",
"b'\\r\\n'",
":",
"end_of_search",
"=",
"-",
"2",
"return",
"max",
"(",
"self",
".",
"buf",
".",
"rfind",
"(",
"b'\\n'",
",",
"0",
",",
"end_of_search",
")",
",",
"self",
".",
"buf",
".",
"rfind",
"(",
"b'\\r'",
",",
"0",
",",
"end_of_search",
")",
")",
"+",
"1"
] | Return index of start of last line stored in self.buf.
This function never fetches more data from the file; therefore,
if it returns zero, meaning the line starts at the beginning of the
buffer, the caller should then fetch more data and retry. | [
"Return",
"index",
"of",
"start",
"of",
"last",
"line",
"stored",
"in",
"self",
".",
"buf",
".",
"This",
"function",
"never",
"fetches",
"more",
"data",
"from",
"the",
"file",
";",
"therefore",
"if",
"it",
"returns",
"zero",
"meaning",
"the",
"line",
"starts",
"at",
"the",
"beginning",
"of",
"the",
"buffer",
"the",
"caller",
"should",
"then",
"fetch",
"more",
"data",
"and",
"retry",
"."
] | 4ebe31950c9a3b7fac243dc83afd915894ce678f | https://github.com/aptiko/simpletail/blob/4ebe31950c9a3b7fac243dc83afd915894ce678f/simpletail/__init__.py#L59-L76 |
249,428 | aptiko/simpletail | simpletail/__init__.py | ropen.read_next_into_buf | def read_next_into_buf(self):
"""Read data from the file in self.bufsize chunks until we're
certain we have a full line in the buffer.
"""
file_pos = self.fileobject.tell()
if (file_pos == 0) and (self.buf == b''):
raise StopIteration
while file_pos and (self.get_start_of_line() == 0):
bytes_to_read = min(self.bufsize, file_pos)
file_pos = file_pos - bytes_to_read
self.fileobject.seek(file_pos)
new_stuff = self.fileobject.read(bytes_to_read)[:bytes_to_read]
self.fileobject.seek(file_pos)
self.buf = new_stuff + self.buf | python | def read_next_into_buf(self):
"""Read data from the file in self.bufsize chunks until we're
certain we have a full line in the buffer.
"""
file_pos = self.fileobject.tell()
if (file_pos == 0) and (self.buf == b''):
raise StopIteration
while file_pos and (self.get_start_of_line() == 0):
bytes_to_read = min(self.bufsize, file_pos)
file_pos = file_pos - bytes_to_read
self.fileobject.seek(file_pos)
new_stuff = self.fileobject.read(bytes_to_read)[:bytes_to_read]
self.fileobject.seek(file_pos)
self.buf = new_stuff + self.buf | [
"def",
"read_next_into_buf",
"(",
"self",
")",
":",
"file_pos",
"=",
"self",
".",
"fileobject",
".",
"tell",
"(",
")",
"if",
"(",
"file_pos",
"==",
"0",
")",
"and",
"(",
"self",
".",
"buf",
"==",
"b''",
")",
":",
"raise",
"StopIteration",
"while",
"file_pos",
"and",
"(",
"self",
".",
"get_start_of_line",
"(",
")",
"==",
"0",
")",
":",
"bytes_to_read",
"=",
"min",
"(",
"self",
".",
"bufsize",
",",
"file_pos",
")",
"file_pos",
"=",
"file_pos",
"-",
"bytes_to_read",
"self",
".",
"fileobject",
".",
"seek",
"(",
"file_pos",
")",
"new_stuff",
"=",
"self",
".",
"fileobject",
".",
"read",
"(",
"bytes_to_read",
")",
"[",
":",
"bytes_to_read",
"]",
"self",
".",
"fileobject",
".",
"seek",
"(",
"file_pos",
")",
"self",
".",
"buf",
"=",
"new_stuff",
"+",
"self",
".",
"buf"
] | Read data from the file in self.bufsize chunks until we're
certain we have a full line in the buffer. | [
"Read",
"data",
"from",
"the",
"file",
"in",
"self",
".",
"bufsize",
"chunks",
"until",
"we",
"re",
"certain",
"we",
"have",
"a",
"full",
"line",
"in",
"the",
"buffer",
"."
] | 4ebe31950c9a3b7fac243dc83afd915894ce678f | https://github.com/aptiko/simpletail/blob/4ebe31950c9a3b7fac243dc83afd915894ce678f/simpletail/__init__.py#L78-L91 |
249,429 | edeposit/edeposit.amqp.storage | src/edeposit/amqp/storage/tree_handler.py | TreeHandler._add_to | def _add_to(self, db, index, item, default=OOSet):
"""
Add `item` to `db` under `index`. If `index` is not yet in `db`, create
it using `default`.
Args:
db (dict-obj): Dict-like object used to connect to database.
index (str): Index used to look in `db`.
item (obj): Persistent object, which may be stored in DB.
default (func/obj): Reference to function/object, which will be
used to create the object under `index`.
Default :class:`OOSet`.
"""
row = db.get(index, None)
if row is None:
row = default()
db[index] = row
row.add(item) | python | def _add_to(self, db, index, item, default=OOSet):
"""
Add `item` to `db` under `index`. If `index` is not yet in `db`, create
it using `default`.
Args:
db (dict-obj): Dict-like object used to connect to database.
index (str): Index used to look in `db`.
item (obj): Persistent object, which may be stored in DB.
default (func/obj): Reference to function/object, which will be
used to create the object under `index`.
Default :class:`OOSet`.
"""
row = db.get(index, None)
if row is None:
row = default()
db[index] = row
row.add(item) | [
"def",
"_add_to",
"(",
"self",
",",
"db",
",",
"index",
",",
"item",
",",
"default",
"=",
"OOSet",
")",
":",
"row",
"=",
"db",
".",
"get",
"(",
"index",
",",
"None",
")",
"if",
"row",
"is",
"None",
":",
"row",
"=",
"default",
"(",
")",
"db",
"[",
"index",
"]",
"=",
"row",
"row",
".",
"add",
"(",
"item",
")"
] | Add `item` to `db` under `index`. If `index` is not yet in `db`, create
it using `default`.
Args:
db (dict-obj): Dict-like object used to connect to database.
index (str): Index used to look in `db`.
item (obj): Persistent object, which may be stored in DB.
default (func/obj): Reference to function/object, which will be
used to create the object under `index`.
Default :class:`OOSet`. | [
"Add",
"item",
"to",
"db",
"under",
"index",
".",
"If",
"index",
"is",
"not",
"yet",
"in",
"db",
"create",
"it",
"using",
"default",
"."
] | fb6bd326249847de04b17b64e856c878665cea92 | https://github.com/edeposit/edeposit.amqp.storage/blob/fb6bd326249847de04b17b64e856c878665cea92/src/edeposit/amqp/storage/tree_handler.py#L73-L92 |
249,430 | edeposit/edeposit.amqp.storage | src/edeposit/amqp/storage/tree_handler.py | TreeHandler.add_tree | def add_tree(self, tree, parent=None):
"""
Add `tree` into database.
Args:
tree (obj): :class:`.Tree` instance.
parent (ref, default None): Reference to parent tree. This is used
for all sub-trees in recursive call.
"""
if tree.path in self.path_db:
self.remove_tree_by_path(tree.path)
# index all indexable attributes
for index in tree.indexes:
if not getattr(tree, index):
continue
self._add_to(
getattr(self, index + "_db"),
getattr(tree, index),
tree,
)
if parent:
self._add_to(self.parent_db, tree.path, parent)
# make sure, that all sub-trees starts with path of parent tree
for sub_tree in tree.sub_trees:
assert sub_tree.path.startswith(tree.path)
for sub_tree in tree.sub_trees:
self.add_tree(sub_tree, parent=tree) | python | def add_tree(self, tree, parent=None):
"""
Add `tree` into database.
Args:
tree (obj): :class:`.Tree` instance.
parent (ref, default None): Reference to parent tree. This is used
for all sub-trees in recursive call.
"""
if tree.path in self.path_db:
self.remove_tree_by_path(tree.path)
# index all indexable attributes
for index in tree.indexes:
if not getattr(tree, index):
continue
self._add_to(
getattr(self, index + "_db"),
getattr(tree, index),
tree,
)
if parent:
self._add_to(self.parent_db, tree.path, parent)
# make sure, that all sub-trees starts with path of parent tree
for sub_tree in tree.sub_trees:
assert sub_tree.path.startswith(tree.path)
for sub_tree in tree.sub_trees:
self.add_tree(sub_tree, parent=tree) | [
"def",
"add_tree",
"(",
"self",
",",
"tree",
",",
"parent",
"=",
"None",
")",
":",
"if",
"tree",
".",
"path",
"in",
"self",
".",
"path_db",
":",
"self",
".",
"remove_tree_by_path",
"(",
"tree",
".",
"path",
")",
"# index all indexable attributes",
"for",
"index",
"in",
"tree",
".",
"indexes",
":",
"if",
"not",
"getattr",
"(",
"tree",
",",
"index",
")",
":",
"continue",
"self",
".",
"_add_to",
"(",
"getattr",
"(",
"self",
",",
"index",
"+",
"\"_db\"",
")",
",",
"getattr",
"(",
"tree",
",",
"index",
")",
",",
"tree",
",",
")",
"if",
"parent",
":",
"self",
".",
"_add_to",
"(",
"self",
".",
"parent_db",
",",
"tree",
".",
"path",
",",
"parent",
")",
"# make sure, that all sub-trees starts with path of parent tree",
"for",
"sub_tree",
"in",
"tree",
".",
"sub_trees",
":",
"assert",
"sub_tree",
".",
"path",
".",
"startswith",
"(",
"tree",
".",
"path",
")",
"for",
"sub_tree",
"in",
"tree",
".",
"sub_trees",
":",
"self",
".",
"add_tree",
"(",
"sub_tree",
",",
"parent",
"=",
"tree",
")"
] | Add `tree` into database.
Args:
tree (obj): :class:`.Tree` instance.
parent (ref, default None): Reference to parent tree. This is used
for all sub-trees in recursive call. | [
"Add",
"tree",
"into",
"database",
"."
] | fb6bd326249847de04b17b64e856c878665cea92 | https://github.com/edeposit/edeposit.amqp.storage/blob/fb6bd326249847de04b17b64e856c878665cea92/src/edeposit/amqp/storage/tree_handler.py#L95-L126 |
249,431 | edeposit/edeposit.amqp.storage | src/edeposit/amqp/storage/tree_handler.py | TreeHandler.remove_tree_by_path | def remove_tree_by_path(self, path):
"""
Remove the tree from database by given `path`.
Args:
path (str): Path of the tree.
"""
with transaction.manager:
trees = self.path_db.get(path, None)
if not trees:
return
for tree in trees:
return self._remove_tree(tree) | python | def remove_tree_by_path(self, path):
"""
Remove the tree from database by given `path`.
Args:
path (str): Path of the tree.
"""
with transaction.manager:
trees = self.path_db.get(path, None)
if not trees:
return
for tree in trees:
return self._remove_tree(tree) | [
"def",
"remove_tree_by_path",
"(",
"self",
",",
"path",
")",
":",
"with",
"transaction",
".",
"manager",
":",
"trees",
"=",
"self",
".",
"path_db",
".",
"get",
"(",
"path",
",",
"None",
")",
"if",
"not",
"trees",
":",
"return",
"for",
"tree",
"in",
"trees",
":",
"return",
"self",
".",
"_remove_tree",
"(",
"tree",
")"
] | Remove the tree from database by given `path`.
Args:
path (str): Path of the tree. | [
"Remove",
"the",
"tree",
"from",
"database",
"by",
"given",
"path",
"."
] | fb6bd326249847de04b17b64e856c878665cea92 | https://github.com/edeposit/edeposit.amqp.storage/blob/fb6bd326249847de04b17b64e856c878665cea92/src/edeposit/amqp/storage/tree_handler.py#L128-L142 |
249,432 | edeposit/edeposit.amqp.storage | src/edeposit/amqp/storage/tree_handler.py | TreeHandler._remove_from | def _remove_from(self, db, index, item):
"""
Remove `item` from `db` at `index`.
Note:
This function is inverse to :meth:`._add_to`.
Args:
db (dict-obj): Dict-like object used to connect to database.
index (str): Index used to look in `db`.
item (obj): Persistent object, which may be stored in DB.
"""
with transaction.manager:
row = db.get(index, None)
if row is None:
return
with transaction.manager:
if item in row:
row.remove(item)
with transaction.manager:
if not row:
del db[index] | python | def _remove_from(self, db, index, item):
"""
Remove `item` from `db` at `index`.
Note:
This function is inverse to :meth:`._add_to`.
Args:
db (dict-obj): Dict-like object used to connect to database.
index (str): Index used to look in `db`.
item (obj): Persistent object, which may be stored in DB.
"""
with transaction.manager:
row = db.get(index, None)
if row is None:
return
with transaction.manager:
if item in row:
row.remove(item)
with transaction.manager:
if not row:
del db[index] | [
"def",
"_remove_from",
"(",
"self",
",",
"db",
",",
"index",
",",
"item",
")",
":",
"with",
"transaction",
".",
"manager",
":",
"row",
"=",
"db",
".",
"get",
"(",
"index",
",",
"None",
")",
"if",
"row",
"is",
"None",
":",
"return",
"with",
"transaction",
".",
"manager",
":",
"if",
"item",
"in",
"row",
":",
"row",
".",
"remove",
"(",
"item",
")",
"with",
"transaction",
".",
"manager",
":",
"if",
"not",
"row",
":",
"del",
"db",
"[",
"index",
"]"
] | Remove `item` from `db` at `index`.
Note:
This function is inverse to :meth:`._add_to`.
Args:
db (dict-obj): Dict-like object used to connect to database.
index (str): Index used to look in `db`.
item (obj): Persistent object, which may be stored in DB. | [
"Remove",
"item",
"from",
"db",
"at",
"index",
"."
] | fb6bd326249847de04b17b64e856c878665cea92 | https://github.com/edeposit/edeposit.amqp.storage/blob/fb6bd326249847de04b17b64e856c878665cea92/src/edeposit/amqp/storage/tree_handler.py#L153-L177 |
249,433 | edeposit/edeposit.amqp.storage | src/edeposit/amqp/storage/tree_handler.py | TreeHandler._remove_tree | def _remove_tree(self, tree, parent=None):
"""
Really remove the tree identified by `tree` instance from all indexes
from database.
Args:
tree (obj): :class:`.Tree` instance.
parent (obj, default None): Reference to parent.
"""
# remove sub-trees
for sub_tree in tree.sub_trees:
self._remove_tree(sub_tree, parent=tree)
# remove itself
for index in tree.indexes:
if not getattr(tree, index):
continue
self._remove_from(
getattr(self, index + "_db"),
getattr(tree, index),
tree,
)
if parent:
self._remove_from(self.parent_db, tree.path, parent)
self.zeo.pack() | python | def _remove_tree(self, tree, parent=None):
"""
Really remove the tree identified by `tree` instance from all indexes
from database.
Args:
tree (obj): :class:`.Tree` instance.
parent (obj, default None): Reference to parent.
"""
# remove sub-trees
for sub_tree in tree.sub_trees:
self._remove_tree(sub_tree, parent=tree)
# remove itself
for index in tree.indexes:
if not getattr(tree, index):
continue
self._remove_from(
getattr(self, index + "_db"),
getattr(tree, index),
tree,
)
if parent:
self._remove_from(self.parent_db, tree.path, parent)
self.zeo.pack() | [
"def",
"_remove_tree",
"(",
"self",
",",
"tree",
",",
"parent",
"=",
"None",
")",
":",
"# remove sub-trees",
"for",
"sub_tree",
"in",
"tree",
".",
"sub_trees",
":",
"self",
".",
"_remove_tree",
"(",
"sub_tree",
",",
"parent",
"=",
"tree",
")",
"# remove itself",
"for",
"index",
"in",
"tree",
".",
"indexes",
":",
"if",
"not",
"getattr",
"(",
"tree",
",",
"index",
")",
":",
"continue",
"self",
".",
"_remove_from",
"(",
"getattr",
"(",
"self",
",",
"index",
"+",
"\"_db\"",
")",
",",
"getattr",
"(",
"tree",
",",
"index",
")",
",",
"tree",
",",
")",
"if",
"parent",
":",
"self",
".",
"_remove_from",
"(",
"self",
".",
"parent_db",
",",
"tree",
".",
"path",
",",
"parent",
")",
"self",
".",
"zeo",
".",
"pack",
"(",
")"
] | Really remove the tree identified by `tree` instance from all indexes
from database.
Args:
tree (obj): :class:`.Tree` instance.
parent (obj, default None): Reference to parent. | [
"Really",
"remove",
"the",
"tree",
"identified",
"by",
"tree",
"instance",
"from",
"all",
"indexes",
"from",
"database",
"."
] | fb6bd326249847de04b17b64e856c878665cea92 | https://github.com/edeposit/edeposit.amqp.storage/blob/fb6bd326249847de04b17b64e856c878665cea92/src/edeposit/amqp/storage/tree_handler.py#L180-L207 |
249,434 | edeposit/edeposit.amqp.storage | src/edeposit/amqp/storage/tree_handler.py | TreeHandler.trees_by_issn | def trees_by_issn(self, issn):
"""
Search trees by `issn`.
Args:
issn (str): :attr:`.Tree.issn` property of :class:`.Tree`.
Returns:
set: Set of matching :class:`Tree` instances.
"""
return set(
self.issn_db.get(issn, OOSet()).keys()
) | python | def trees_by_issn(self, issn):
"""
Search trees by `issn`.
Args:
issn (str): :attr:`.Tree.issn` property of :class:`.Tree`.
Returns:
set: Set of matching :class:`Tree` instances.
"""
return set(
self.issn_db.get(issn, OOSet()).keys()
) | [
"def",
"trees_by_issn",
"(",
"self",
",",
"issn",
")",
":",
"return",
"set",
"(",
"self",
".",
"issn_db",
".",
"get",
"(",
"issn",
",",
"OOSet",
"(",
")",
")",
".",
"keys",
"(",
")",
")"
] | Search trees by `issn`.
Args:
issn (str): :attr:`.Tree.issn` property of :class:`.Tree`.
Returns:
set: Set of matching :class:`Tree` instances. | [
"Search",
"trees",
"by",
"issn",
"."
] | fb6bd326249847de04b17b64e856c878665cea92 | https://github.com/edeposit/edeposit.amqp.storage/blob/fb6bd326249847de04b17b64e856c878665cea92/src/edeposit/amqp/storage/tree_handler.py#L210-L222 |
249,435 | edeposit/edeposit.amqp.storage | src/edeposit/amqp/storage/tree_handler.py | TreeHandler.trees_by_path | def trees_by_path(self, path):
"""
Search trees by `path`.
Args:
path (str): :attr:`.Tree.path` property of :class:`.Tree`.
Returns:
set: Set of matching :class:`Tree` instances.
"""
return set(
self.path_db.get(path, OOSet()).keys()
) | python | def trees_by_path(self, path):
"""
Search trees by `path`.
Args:
path (str): :attr:`.Tree.path` property of :class:`.Tree`.
Returns:
set: Set of matching :class:`Tree` instances.
"""
return set(
self.path_db.get(path, OOSet()).keys()
) | [
"def",
"trees_by_path",
"(",
"self",
",",
"path",
")",
":",
"return",
"set",
"(",
"self",
".",
"path_db",
".",
"get",
"(",
"path",
",",
"OOSet",
"(",
")",
")",
".",
"keys",
"(",
")",
")"
] | Search trees by `path`.
Args:
path (str): :attr:`.Tree.path` property of :class:`.Tree`.
Returns:
set: Set of matching :class:`Tree` instances. | [
"Search",
"trees",
"by",
"path",
"."
] | fb6bd326249847de04b17b64e856c878665cea92 | https://github.com/edeposit/edeposit.amqp.storage/blob/fb6bd326249847de04b17b64e856c878665cea92/src/edeposit/amqp/storage/tree_handler.py#L225-L237 |
249,436 | edeposit/edeposit.amqp.storage | src/edeposit/amqp/storage/tree_handler.py | TreeHandler.get_parent | def get_parent(self, tree, alt=None):
"""
Get parent for given `tree` or `alt` if not found.
Args:
tree (obj): :class:`.Tree` instance, which is already stored in DB.
alt (obj, default None): Alternative value returned when `tree` is
not found.
Returns:
obj: :class:`.Tree` parent to given `tree`.
"""
parent = self.parent_db.get(tree.path)
if not parent:
return alt
return list(parent)[0] | python | def get_parent(self, tree, alt=None):
"""
Get parent for given `tree` or `alt` if not found.
Args:
tree (obj): :class:`.Tree` instance, which is already stored in DB.
alt (obj, default None): Alternative value returned when `tree` is
not found.
Returns:
obj: :class:`.Tree` parent to given `tree`.
"""
parent = self.parent_db.get(tree.path)
if not parent:
return alt
return list(parent)[0] | [
"def",
"get_parent",
"(",
"self",
",",
"tree",
",",
"alt",
"=",
"None",
")",
":",
"parent",
"=",
"self",
".",
"parent_db",
".",
"get",
"(",
"tree",
".",
"path",
")",
"if",
"not",
"parent",
":",
"return",
"alt",
"return",
"list",
"(",
"parent",
")",
"[",
"0",
"]"
] | Get parent for given `tree` or `alt` if not found.
Args:
tree (obj): :class:`.Tree` instance, which is already stored in DB.
alt (obj, default None): Alternative value returned when `tree` is
not found.
Returns:
obj: :class:`.Tree` parent to given `tree`. | [
"Get",
"parent",
"for",
"given",
"tree",
"or",
"alt",
"if",
"not",
"found",
"."
] | fb6bd326249847de04b17b64e856c878665cea92 | https://github.com/edeposit/edeposit.amqp.storage/blob/fb6bd326249847de04b17b64e856c878665cea92/src/edeposit/amqp/storage/tree_handler.py#L261-L278 |
249,437 | ulf1/oxyba | oxyba/yearfrac_365q.py | yearfrac_365q | def yearfrac_365q(d1, d2):
"""date difference "d1-d2" as year fractional"""
# import modules
from datetime import date
from oxyba import date_to_datetime
# define yearfrac formula
# toyf = lambda a,b: (a - b).days / 365.2425
def toyf(a, b):
a = date_to_datetime(a) if isinstance(a, date) else a
b = date_to_datetime(b) if isinstance(b, date) else b
return (a - b).days / 365.2425
# deal with scalars and vectors
n1 = len(d1) if hasattr(d1, "__iter__") else 1
n2 = len(d2) if hasattr(d2, "__iter__") else 1
# compute yearfrac
if n1 == 1 and n2 == 1:
return toyf(d1, d2)
elif n1 > 1 and n2 == 1:
return [toyf(elem, d2) for elem in d1]
elif n1 == 1 and n2 > 1:
return [toyf(d1, elem) for elem in d2]
elif n1 > 1 and n1 == n2:
return [toyf(e1, e2) for e1, e2 in zip(d1, d2)]
else:
raise Exception("d1 and d2 have the wrong dimensions.") | python | def yearfrac_365q(d1, d2):
"""date difference "d1-d2" as year fractional"""
# import modules
from datetime import date
from oxyba import date_to_datetime
# define yearfrac formula
# toyf = lambda a,b: (a - b).days / 365.2425
def toyf(a, b):
a = date_to_datetime(a) if isinstance(a, date) else a
b = date_to_datetime(b) if isinstance(b, date) else b
return (a - b).days / 365.2425
# deal with scalars and vectors
n1 = len(d1) if hasattr(d1, "__iter__") else 1
n2 = len(d2) if hasattr(d2, "__iter__") else 1
# compute yearfrac
if n1 == 1 and n2 == 1:
return toyf(d1, d2)
elif n1 > 1 and n2 == 1:
return [toyf(elem, d2) for elem in d1]
elif n1 == 1 and n2 > 1:
return [toyf(d1, elem) for elem in d2]
elif n1 > 1 and n1 == n2:
return [toyf(e1, e2) for e1, e2 in zip(d1, d2)]
else:
raise Exception("d1 and d2 have the wrong dimensions.") | [
"def",
"yearfrac_365q",
"(",
"d1",
",",
"d2",
")",
":",
"# import modules",
"from",
"datetime",
"import",
"date",
"from",
"oxyba",
"import",
"date_to_datetime",
"# define yearfrac formula",
"# toyf = lambda a,b: (a - b).days / 365.2425",
"def",
"toyf",
"(",
"a",
",",
"b",
")",
":",
"a",
"=",
"date_to_datetime",
"(",
"a",
")",
"if",
"isinstance",
"(",
"a",
",",
"date",
")",
"else",
"a",
"b",
"=",
"date_to_datetime",
"(",
"b",
")",
"if",
"isinstance",
"(",
"b",
",",
"date",
")",
"else",
"b",
"return",
"(",
"a",
"-",
"b",
")",
".",
"days",
"/",
"365.2425",
"# deal with scalars and vectors",
"n1",
"=",
"len",
"(",
"d1",
")",
"if",
"hasattr",
"(",
"d1",
",",
"\"__iter__\"",
")",
"else",
"1",
"n2",
"=",
"len",
"(",
"d2",
")",
"if",
"hasattr",
"(",
"d2",
",",
"\"__iter__\"",
")",
"else",
"1",
"# compute yearfrac",
"if",
"n1",
"==",
"1",
"and",
"n2",
"==",
"1",
":",
"return",
"toyf",
"(",
"d1",
",",
"d2",
")",
"elif",
"n1",
">",
"1",
"and",
"n2",
"==",
"1",
":",
"return",
"[",
"toyf",
"(",
"elem",
",",
"d2",
")",
"for",
"elem",
"in",
"d1",
"]",
"elif",
"n1",
"==",
"1",
"and",
"n2",
">",
"1",
":",
"return",
"[",
"toyf",
"(",
"d1",
",",
"elem",
")",
"for",
"elem",
"in",
"d2",
"]",
"elif",
"n1",
">",
"1",
"and",
"n1",
"==",
"n2",
":",
"return",
"[",
"toyf",
"(",
"e1",
",",
"e2",
")",
"for",
"e1",
",",
"e2",
"in",
"zip",
"(",
"d1",
",",
"d2",
")",
"]",
"else",
":",
"raise",
"Exception",
"(",
"\"d1 and d2 have the wrong dimensions.\"",
")"
] | date difference "d1-d2" as year fractional | [
"date",
"difference",
"d1",
"-",
"d2",
"as",
"year",
"fractional"
] | b3043116050de275124365cb11e7df91fb40169d | https://github.com/ulf1/oxyba/blob/b3043116050de275124365cb11e7df91fb40169d/oxyba/yearfrac_365q.py#L2-L30 |
249,438 | opieters/DynamicNumber | languages/python/dn.py | dn.add | def add(self, name, value, unit=None):
"""Add symbolic link to Dynamic Number list.
name -- name of the symbolic link
value -- value of the link (if not a string, conversion is done with str())
unit -- if value is a numerical value, a unit can be added to invoke the \unit{}{} LaTeX command
"""
# check if unit provided
if unit is not None:
add_unit = True
unit = str(unit)
else:
add_unit = False
# convert value to string
value = str(value)
# write to file
f = open(self.file_dir, 'a')
if add_unit:
f.write("\\pgfkeys{dynamicnumber/%s/%s = \unit{%s}{%s}}\n" % (self.name, name, value, unit))
else:
f.write("\\pgfkeys{dynamicnumber/%s/%s = %s}\n" % (self.name, name, value))
f.close() | python | def add(self, name, value, unit=None):
"""Add symbolic link to Dynamic Number list.
name -- name of the symbolic link
value -- value of the link (if not a string, conversion is done with str())
unit -- if value is a numerical value, a unit can be added to invoke the \unit{}{} LaTeX command
"""
# check if unit provided
if unit is not None:
add_unit = True
unit = str(unit)
else:
add_unit = False
# convert value to string
value = str(value)
# write to file
f = open(self.file_dir, 'a')
if add_unit:
f.write("\\pgfkeys{dynamicnumber/%s/%s = \unit{%s}{%s}}\n" % (self.name, name, value, unit))
else:
f.write("\\pgfkeys{dynamicnumber/%s/%s = %s}\n" % (self.name, name, value))
f.close() | [
"def",
"add",
"(",
"self",
",",
"name",
",",
"value",
",",
"unit",
"=",
"None",
")",
":",
"# check if unit provided",
"if",
"unit",
"is",
"not",
"None",
":",
"add_unit",
"=",
"True",
"unit",
"=",
"str",
"(",
"unit",
")",
"else",
":",
"add_unit",
"=",
"False",
"# convert value to string",
"value",
"=",
"str",
"(",
"value",
")",
"# write to file",
"f",
"=",
"open",
"(",
"self",
".",
"file_dir",
",",
"'a'",
")",
"if",
"add_unit",
":",
"f",
".",
"write",
"(",
"\"\\\\pgfkeys{dynamicnumber/%s/%s = \\unit{%s}{%s}}\\n\"",
"%",
"(",
"self",
".",
"name",
",",
"name",
",",
"value",
",",
"unit",
")",
")",
"else",
":",
"f",
".",
"write",
"(",
"\"\\\\pgfkeys{dynamicnumber/%s/%s = %s}\\n\"",
"%",
"(",
"self",
".",
"name",
",",
"name",
",",
"value",
")",
")",
"f",
".",
"close",
"(",
")"
] | Add symbolic link to Dynamic Number list.
name -- name of the symbolic link
value -- value of the link (if not a string, conversion is done with str())
unit -- if value is a numerical value, a unit can be added to invoke the \unit{}{} LaTeX command | [
"Add",
"symbolic",
"link",
"to",
"Dynamic",
"Number",
"list",
"."
] | 433679e9f772a4d0e633e73bd1243ce912bb8dfc | https://github.com/opieters/DynamicNumber/blob/433679e9f772a4d0e633e73bd1243ce912bb8dfc/languages/python/dn.py#L38-L61 |
249,439 | Bystroushaak/zeo_connector | src/zeo_connector/zeo_wrapper_prototype.py | _init_zeo | def _init_zeo():
"""
Start asyncore thread.
"""
if not _ASYNCORE_RUNNING:
def _run_asyncore_loop():
asyncore.loop()
thread.start_new_thread(_run_asyncore_loop, ())
global _ASYNCORE_RUNNING
_ASYNCORE_RUNNING = True | python | def _init_zeo():
"""
Start asyncore thread.
"""
if not _ASYNCORE_RUNNING:
def _run_asyncore_loop():
asyncore.loop()
thread.start_new_thread(_run_asyncore_loop, ())
global _ASYNCORE_RUNNING
_ASYNCORE_RUNNING = True | [
"def",
"_init_zeo",
"(",
")",
":",
"if",
"not",
"_ASYNCORE_RUNNING",
":",
"def",
"_run_asyncore_loop",
"(",
")",
":",
"asyncore",
".",
"loop",
"(",
")",
"thread",
".",
"start_new_thread",
"(",
"_run_asyncore_loop",
",",
"(",
")",
")",
"global",
"_ASYNCORE_RUNNING",
"_ASYNCORE_RUNNING",
"=",
"True"
] | Start asyncore thread. | [
"Start",
"asyncore",
"thread",
"."
] | 93f86447204efc8e33d3112907cd221daf6bce3b | https://github.com/Bystroushaak/zeo_connector/blob/93f86447204efc8e33d3112907cd221daf6bce3b/src/zeo_connector/zeo_wrapper_prototype.py#L21-L32 |
249,440 | Bystroushaak/zeo_connector | src/zeo_connector/zeo_wrapper_prototype.py | retry_and_reset | def retry_and_reset(fn):
"""
Decorator used to make sure, that operation on ZEO object will be retried,
if there is ``ConnectionStateError`` exception.
"""
@wraps(fn)
def retry_and_reset_decorator(*args, **kwargs):
obj = kwargs.get("self", None)
if not obj:
obj = args[0]
try:
return fn(*args, **kwargs)
except ConnectionStateError:
obj._on_close_callback()
return fn(*args, **kwargs)
return retry_and_reset_decorator | python | def retry_and_reset(fn):
"""
Decorator used to make sure, that operation on ZEO object will be retried,
if there is ``ConnectionStateError`` exception.
"""
@wraps(fn)
def retry_and_reset_decorator(*args, **kwargs):
obj = kwargs.get("self", None)
if not obj:
obj = args[0]
try:
return fn(*args, **kwargs)
except ConnectionStateError:
obj._on_close_callback()
return fn(*args, **kwargs)
return retry_and_reset_decorator | [
"def",
"retry_and_reset",
"(",
"fn",
")",
":",
"@",
"wraps",
"(",
"fn",
")",
"def",
"retry_and_reset_decorator",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"obj",
"=",
"kwargs",
".",
"get",
"(",
"\"self\"",
",",
"None",
")",
"if",
"not",
"obj",
":",
"obj",
"=",
"args",
"[",
"0",
"]",
"try",
":",
"return",
"fn",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
"except",
"ConnectionStateError",
":",
"obj",
".",
"_on_close_callback",
"(",
")",
"return",
"fn",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
"return",
"retry_and_reset_decorator"
] | Decorator used to make sure, that operation on ZEO object will be retried,
if there is ``ConnectionStateError`` exception. | [
"Decorator",
"used",
"to",
"make",
"sure",
"that",
"operation",
"on",
"ZEO",
"object",
"will",
"be",
"retried",
"if",
"there",
"is",
"ConnectionStateError",
"exception",
"."
] | 93f86447204efc8e33d3112907cd221daf6bce3b | https://github.com/Bystroushaak/zeo_connector/blob/93f86447204efc8e33d3112907cd221daf6bce3b/src/zeo_connector/zeo_wrapper_prototype.py#L35-L54 |
249,441 | Bystroushaak/zeo_connector | src/zeo_connector/zeo_wrapper_prototype.py | ZEOWrapperPrototype._init_zeo_root | def _init_zeo_root(self, attempts=3):
"""
Get and initialize the ZEO root object.
Args:
attempts (int, default 3): How many times to try, if the connection
was lost.
"""
try:
db_root = self._connection.root()
except ConnectionStateError:
if attempts <= 0:
raise
self._open_connection()
return self._init_zeo_root(attempts=attempts-1)
# init the root, if it wasn't already declared
if self.project_key and self.project_key not in db_root:
with transaction.manager:
db_root[self.project_key] = self.default_type()
self._root = db_root[self.project_key] if self.project_key else db_root | python | def _init_zeo_root(self, attempts=3):
"""
Get and initialize the ZEO root object.
Args:
attempts (int, default 3): How many times to try, if the connection
was lost.
"""
try:
db_root = self._connection.root()
except ConnectionStateError:
if attempts <= 0:
raise
self._open_connection()
return self._init_zeo_root(attempts=attempts-1)
# init the root, if it wasn't already declared
if self.project_key and self.project_key not in db_root:
with transaction.manager:
db_root[self.project_key] = self.default_type()
self._root = db_root[self.project_key] if self.project_key else db_root | [
"def",
"_init_zeo_root",
"(",
"self",
",",
"attempts",
"=",
"3",
")",
":",
"try",
":",
"db_root",
"=",
"self",
".",
"_connection",
".",
"root",
"(",
")",
"except",
"ConnectionStateError",
":",
"if",
"attempts",
"<=",
"0",
":",
"raise",
"self",
".",
"_open_connection",
"(",
")",
"return",
"self",
".",
"_init_zeo_root",
"(",
"attempts",
"=",
"attempts",
"-",
"1",
")",
"# init the root, if it wasn't already declared",
"if",
"self",
".",
"project_key",
"and",
"self",
".",
"project_key",
"not",
"in",
"db_root",
":",
"with",
"transaction",
".",
"manager",
":",
"db_root",
"[",
"self",
".",
"project_key",
"]",
"=",
"self",
".",
"default_type",
"(",
")",
"self",
".",
"_root",
"=",
"db_root",
"[",
"self",
".",
"project_key",
"]",
"if",
"self",
".",
"project_key",
"else",
"db_root"
] | Get and initialize the ZEO root object.
Args:
attempts (int, default 3): How many times to try, if the connection
was lost. | [
"Get",
"and",
"initialize",
"the",
"ZEO",
"root",
"object",
"."
] | 93f86447204efc8e33d3112907cd221daf6bce3b | https://github.com/Bystroushaak/zeo_connector/blob/93f86447204efc8e33d3112907cd221daf6bce3b/src/zeo_connector/zeo_wrapper_prototype.py#L120-L142 |
249,442 | ariebovenberg/valuable | valuable/load.py | create_dataclass_loader | def create_dataclass_loader(cls, registry, field_getters):
"""create a loader for a dataclass type"""
fields = cls.__dataclass_fields__
item_loaders = map(registry, map(attrgetter('type'), fields.values()))
getters = map(field_getters.__getitem__, fields)
loaders = list(starmap(compose, zip(item_loaders, getters)))
def dloader(obj):
return cls(*(g(obj) for g in loaders))
return dloader | python | def create_dataclass_loader(cls, registry, field_getters):
"""create a loader for a dataclass type"""
fields = cls.__dataclass_fields__
item_loaders = map(registry, map(attrgetter('type'), fields.values()))
getters = map(field_getters.__getitem__, fields)
loaders = list(starmap(compose, zip(item_loaders, getters)))
def dloader(obj):
return cls(*(g(obj) for g in loaders))
return dloader | [
"def",
"create_dataclass_loader",
"(",
"cls",
",",
"registry",
",",
"field_getters",
")",
":",
"fields",
"=",
"cls",
".",
"__dataclass_fields__",
"item_loaders",
"=",
"map",
"(",
"registry",
",",
"map",
"(",
"attrgetter",
"(",
"'type'",
")",
",",
"fields",
".",
"values",
"(",
")",
")",
")",
"getters",
"=",
"map",
"(",
"field_getters",
".",
"__getitem__",
",",
"fields",
")",
"loaders",
"=",
"list",
"(",
"starmap",
"(",
"compose",
",",
"zip",
"(",
"item_loaders",
",",
"getters",
")",
")",
")",
"def",
"dloader",
"(",
"obj",
")",
":",
"return",
"cls",
"(",
"*",
"(",
"g",
"(",
"obj",
")",
"for",
"g",
"in",
"loaders",
")",
")",
"return",
"dloader"
] | create a loader for a dataclass type | [
"create",
"a",
"loader",
"for",
"a",
"dataclass",
"type"
] | 72ac98b5a044233f13d14a9b9f273ce3a237d9ae | https://github.com/ariebovenberg/valuable/blob/72ac98b5a044233f13d14a9b9f273ce3a237d9ae/valuable/load.py#L145-L155 |
249,443 | coghost/izen | izen/helper.py | rand_block | def rand_block(minimum, scale, maximum=1):
"""
block current thread at random pareto time ``minimum < block < 15`` and return the sleep time ``seconds``
:param minimum:
:type minimum:
:param scale:
:type scale:
:param slow_mode: a tuple e.g.(2, 5)
:type slow_mode: tuple
:return:
"""
t = min(rand_pareto_float(minimum, scale), maximum)
time.sleep(t)
return t | python | def rand_block(minimum, scale, maximum=1):
"""
block current thread at random pareto time ``minimum < block < 15`` and return the sleep time ``seconds``
:param minimum:
:type minimum:
:param scale:
:type scale:
:param slow_mode: a tuple e.g.(2, 5)
:type slow_mode: tuple
:return:
"""
t = min(rand_pareto_float(minimum, scale), maximum)
time.sleep(t)
return t | [
"def",
"rand_block",
"(",
"minimum",
",",
"scale",
",",
"maximum",
"=",
"1",
")",
":",
"t",
"=",
"min",
"(",
"rand_pareto_float",
"(",
"minimum",
",",
"scale",
")",
",",
"maximum",
")",
"time",
".",
"sleep",
"(",
"t",
")",
"return",
"t"
] | block current thread at random pareto time ``minimum < block < 15`` and return the sleep time ``seconds``
:param minimum:
:type minimum:
:param scale:
:type scale:
:param slow_mode: a tuple e.g.(2, 5)
:type slow_mode: tuple
:return: | [
"block",
"current",
"thread",
"at",
"random",
"pareto",
"time",
"minimum",
"<",
"block",
"<",
"15",
"and",
"return",
"the",
"sleep",
"time",
"seconds"
] | 432db017f99dd2ba809e1ba1792145ab6510263d | https://github.com/coghost/izen/blob/432db017f99dd2ba809e1ba1792145ab6510263d/izen/helper.py#L1267-L1281 |
249,444 | coghost/izen | izen/helper.py | TermTable._print_divide | def _print_divide(self):
"""Prints all those table line dividers."""
for space in self.AttributesLength:
self.StrTable += "+ " + "- " * space
self.StrTable += "+" + "\n" | python | def _print_divide(self):
"""Prints all those table line dividers."""
for space in self.AttributesLength:
self.StrTable += "+ " + "- " * space
self.StrTable += "+" + "\n" | [
"def",
"_print_divide",
"(",
"self",
")",
":",
"for",
"space",
"in",
"self",
".",
"AttributesLength",
":",
"self",
".",
"StrTable",
"+=",
"\"+ \"",
"+",
"\"- \"",
"*",
"space",
"self",
".",
"StrTable",
"+=",
"\"+\"",
"+",
"\"\\n\""
] | Prints all those table line dividers. | [
"Prints",
"all",
"those",
"table",
"line",
"dividers",
"."
] | 432db017f99dd2ba809e1ba1792145ab6510263d | https://github.com/coghost/izen/blob/432db017f99dd2ba809e1ba1792145ab6510263d/izen/helper.py#L1150-L1154 |
249,445 | coghost/izen | izen/helper.py | TermTable._create_table | def _create_table(self):
"""
Creates a pretty-printed string representation of the table as
``self.StrTable``.
"""
self.StrTable = ""
self.AttributesLength = []
self.Lines_num = 0
# Prepare some values..
for col in self.Table:
# Updates the table line count if necessary
values = list(col.values())[0]
self.Lines_num = max(self.Lines_num, len(values))
# find the length of longest value in current column
key_length = max([self._disp_width(v) for v in values] or [0])
# and also the table header
key_length = max(key_length, self._disp_width(list(col.keys())[0]))
self.AttributesLength.append(key_length)
# Do the real thing.
self._print_head()
self._print_value() | python | def _create_table(self):
"""
Creates a pretty-printed string representation of the table as
``self.StrTable``.
"""
self.StrTable = ""
self.AttributesLength = []
self.Lines_num = 0
# Prepare some values..
for col in self.Table:
# Updates the table line count if necessary
values = list(col.values())[0]
self.Lines_num = max(self.Lines_num, len(values))
# find the length of longest value in current column
key_length = max([self._disp_width(v) for v in values] or [0])
# and also the table header
key_length = max(key_length, self._disp_width(list(col.keys())[0]))
self.AttributesLength.append(key_length)
# Do the real thing.
self._print_head()
self._print_value() | [
"def",
"_create_table",
"(",
"self",
")",
":",
"self",
".",
"StrTable",
"=",
"\"\"",
"self",
".",
"AttributesLength",
"=",
"[",
"]",
"self",
".",
"Lines_num",
"=",
"0",
"# Prepare some values..",
"for",
"col",
"in",
"self",
".",
"Table",
":",
"# Updates the table line count if necessary",
"values",
"=",
"list",
"(",
"col",
".",
"values",
"(",
")",
")",
"[",
"0",
"]",
"self",
".",
"Lines_num",
"=",
"max",
"(",
"self",
".",
"Lines_num",
",",
"len",
"(",
"values",
")",
")",
"# find the length of longest value in current column",
"key_length",
"=",
"max",
"(",
"[",
"self",
".",
"_disp_width",
"(",
"v",
")",
"for",
"v",
"in",
"values",
"]",
"or",
"[",
"0",
"]",
")",
"# and also the table header",
"key_length",
"=",
"max",
"(",
"key_length",
",",
"self",
".",
"_disp_width",
"(",
"list",
"(",
"col",
".",
"keys",
"(",
")",
")",
"[",
"0",
"]",
")",
")",
"self",
".",
"AttributesLength",
".",
"append",
"(",
"key_length",
")",
"# Do the real thing.",
"self",
".",
"_print_head",
"(",
")",
"self",
".",
"_print_value",
"(",
")"
] | Creates a pretty-printed string representation of the table as
``self.StrTable``. | [
"Creates",
"a",
"pretty",
"-",
"printed",
"string",
"representation",
"of",
"the",
"table",
"as",
"self",
".",
"StrTable",
"."
] | 432db017f99dd2ba809e1ba1792145ab6510263d | https://github.com/coghost/izen/blob/432db017f99dd2ba809e1ba1792145ab6510263d/izen/helper.py#L1182-L1202 |
249,446 | coghost/izen | izen/helper.py | TermTable._print_head | def _print_head(self):
"""Generates the table header."""
self._print_divide()
self.StrTable += "| "
for colwidth, attr in zip(self.AttributesLength, self.Attributes):
self.StrTable += self._pad_string(attr, colwidth * 2)
self.StrTable += "| "
self.StrTable += '\n'
self._print_divide() | python | def _print_head(self):
"""Generates the table header."""
self._print_divide()
self.StrTable += "| "
for colwidth, attr in zip(self.AttributesLength, self.Attributes):
self.StrTable += self._pad_string(attr, colwidth * 2)
self.StrTable += "| "
self.StrTable += '\n'
self._print_divide() | [
"def",
"_print_head",
"(",
"self",
")",
":",
"self",
".",
"_print_divide",
"(",
")",
"self",
".",
"StrTable",
"+=",
"\"| \"",
"for",
"colwidth",
",",
"attr",
"in",
"zip",
"(",
"self",
".",
"AttributesLength",
",",
"self",
".",
"Attributes",
")",
":",
"self",
".",
"StrTable",
"+=",
"self",
".",
"_pad_string",
"(",
"attr",
",",
"colwidth",
"*",
"2",
")",
"self",
".",
"StrTable",
"+=",
"\"| \"",
"self",
".",
"StrTable",
"+=",
"'\\n'",
"self",
".",
"_print_divide",
"(",
")"
] | Generates the table header. | [
"Generates",
"the",
"table",
"header",
"."
] | 432db017f99dd2ba809e1ba1792145ab6510263d | https://github.com/coghost/izen/blob/432db017f99dd2ba809e1ba1792145ab6510263d/izen/helper.py#L1204-L1212 |
249,447 | coghost/izen | izen/helper.py | TermTable._print_value | def _print_value(self):
"""Generates the table values."""
for line in range(self.Lines_num):
for col, length in zip(self.Table, self.AttributesLength):
vals = list(col.values())[0]
val = vals[line] if len(vals) != 0 and line < len(vals) else ''
self.StrTable += "| "
self.StrTable += self._pad_string(val, length * 2)
self.StrTable += "|" + '\n'
self._print_divide() | python | def _print_value(self):
"""Generates the table values."""
for line in range(self.Lines_num):
for col, length in zip(self.Table, self.AttributesLength):
vals = list(col.values())[0]
val = vals[line] if len(vals) != 0 and line < len(vals) else ''
self.StrTable += "| "
self.StrTable += self._pad_string(val, length * 2)
self.StrTable += "|" + '\n'
self._print_divide() | [
"def",
"_print_value",
"(",
"self",
")",
":",
"for",
"line",
"in",
"range",
"(",
"self",
".",
"Lines_num",
")",
":",
"for",
"col",
",",
"length",
"in",
"zip",
"(",
"self",
".",
"Table",
",",
"self",
".",
"AttributesLength",
")",
":",
"vals",
"=",
"list",
"(",
"col",
".",
"values",
"(",
")",
")",
"[",
"0",
"]",
"val",
"=",
"vals",
"[",
"line",
"]",
"if",
"len",
"(",
"vals",
")",
"!=",
"0",
"and",
"line",
"<",
"len",
"(",
"vals",
")",
"else",
"''",
"self",
".",
"StrTable",
"+=",
"\"| \"",
"self",
".",
"StrTable",
"+=",
"self",
".",
"_pad_string",
"(",
"val",
",",
"length",
"*",
"2",
")",
"self",
".",
"StrTable",
"+=",
"\"|\"",
"+",
"'\\n'",
"self",
".",
"_print_divide",
"(",
")"
] | Generates the table values. | [
"Generates",
"the",
"table",
"values",
"."
] | 432db017f99dd2ba809e1ba1792145ab6510263d | https://github.com/coghost/izen/blob/432db017f99dd2ba809e1ba1792145ab6510263d/izen/helper.py#L1214-L1223 |
249,448 | coghost/izen | izen/helper.py | TermTable._pad_string | def _pad_string(self, str, colwidth):
"""Center-pads a string to the given column width using spaces."""
width = self._disp_width(str)
prefix = (colwidth - 1 - width) // 2
suffix = colwidth - prefix - width
return ' ' * prefix + str + ' ' * suffix | python | def _pad_string(self, str, colwidth):
"""Center-pads a string to the given column width using spaces."""
width = self._disp_width(str)
prefix = (colwidth - 1 - width) // 2
suffix = colwidth - prefix - width
return ' ' * prefix + str + ' ' * suffix | [
"def",
"_pad_string",
"(",
"self",
",",
"str",
",",
"colwidth",
")",
":",
"width",
"=",
"self",
".",
"_disp_width",
"(",
"str",
")",
"prefix",
"=",
"(",
"colwidth",
"-",
"1",
"-",
"width",
")",
"//",
"2",
"suffix",
"=",
"colwidth",
"-",
"prefix",
"-",
"width",
"return",
"' '",
"*",
"prefix",
"+",
"str",
"+",
"' '",
"*",
"suffix"
] | Center-pads a string to the given column width using spaces. | [
"Center",
"-",
"pads",
"a",
"string",
"to",
"the",
"given",
"column",
"width",
"using",
"spaces",
"."
] | 432db017f99dd2ba809e1ba1792145ab6510263d | https://github.com/coghost/izen/blob/432db017f99dd2ba809e1ba1792145ab6510263d/izen/helper.py#L1242-L1247 |
249,449 | eallik/spinoff | spinoff/util/http.py | basic_auth_string | def basic_auth_string(username, password):
"""
Encode a username and password for use in an HTTP Basic Authentication
header
"""
b64 = base64.encodestring('%s:%s' % (username, password)).strip()
return 'Basic %s' % b64 | python | def basic_auth_string(username, password):
"""
Encode a username and password for use in an HTTP Basic Authentication
header
"""
b64 = base64.encodestring('%s:%s' % (username, password)).strip()
return 'Basic %s' % b64 | [
"def",
"basic_auth_string",
"(",
"username",
",",
"password",
")",
":",
"b64",
"=",
"base64",
".",
"encodestring",
"(",
"'%s:%s'",
"%",
"(",
"username",
",",
"password",
")",
")",
".",
"strip",
"(",
")",
"return",
"'Basic %s'",
"%",
"b64"
] | Encode a username and password for use in an HTTP Basic Authentication
header | [
"Encode",
"a",
"username",
"and",
"password",
"for",
"use",
"in",
"an",
"HTTP",
"Basic",
"Authentication",
"header"
] | 06b00d6b86c7422c9cb8f9a4b2915906e92b7d52 | https://github.com/eallik/spinoff/blob/06b00d6b86c7422c9cb8f9a4b2915906e92b7d52/spinoff/util/http.py#L80-L86 |
249,450 | laysakura/relshell | relshell/type.py | Type.equivalent_relshell_type | def equivalent_relshell_type(val):
"""Returns `val`'s relshell compatible type.
:param val: value to check relshell equivalent type
:raises: `NotImplementedError` if val's relshell compatible type is not implemented.
"""
builtin_type = type(val)
if builtin_type not in Type._typemap:
raise NotImplementedError("builtin type %s is not convertible to relshell type" %
(builtin_type))
relshell_type_str = Type._typemap[builtin_type]
return Type(relshell_type_str) | python | def equivalent_relshell_type(val):
"""Returns `val`'s relshell compatible type.
:param val: value to check relshell equivalent type
:raises: `NotImplementedError` if val's relshell compatible type is not implemented.
"""
builtin_type = type(val)
if builtin_type not in Type._typemap:
raise NotImplementedError("builtin type %s is not convertible to relshell type" %
(builtin_type))
relshell_type_str = Type._typemap[builtin_type]
return Type(relshell_type_str) | [
"def",
"equivalent_relshell_type",
"(",
"val",
")",
":",
"builtin_type",
"=",
"type",
"(",
"val",
")",
"if",
"builtin_type",
"not",
"in",
"Type",
".",
"_typemap",
":",
"raise",
"NotImplementedError",
"(",
"\"builtin type %s is not convertible to relshell type\"",
"%",
"(",
"builtin_type",
")",
")",
"relshell_type_str",
"=",
"Type",
".",
"_typemap",
"[",
"builtin_type",
"]",
"return",
"Type",
"(",
"relshell_type_str",
")"
] | Returns `val`'s relshell compatible type.
:param val: value to check relshell equivalent type
:raises: `NotImplementedError` if val's relshell compatible type is not implemented. | [
"Returns",
"val",
"s",
"relshell",
"compatible",
"type",
"."
] | 9ca5c03a34c11cb763a4a75595f18bf4383aa8cc | https://github.com/laysakura/relshell/blob/9ca5c03a34c11cb763a4a75595f18bf4383aa8cc/relshell/type.py#L53-L64 |
249,451 | 20c/twentyc.tools | twentyc/tools/thread.py | RunInThread.start | def start(self, *args, **kwargs):
"""
Set the arguments for the callback function and start the
thread
"""
self.runArgs = args
self.runKwargs = kwargs
Thread.start(self) | python | def start(self, *args, **kwargs):
"""
Set the arguments for the callback function and start the
thread
"""
self.runArgs = args
self.runKwargs = kwargs
Thread.start(self) | [
"def",
"start",
"(",
"self",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"self",
".",
"runArgs",
"=",
"args",
"self",
".",
"runKwargs",
"=",
"kwargs",
"Thread",
".",
"start",
"(",
"self",
")"
] | Set the arguments for the callback function and start the
thread | [
"Set",
"the",
"arguments",
"for",
"the",
"callback",
"function",
"and",
"start",
"the",
"thread"
] | f8f681e64f58d449bfc32646ba8bcc57db90a233 | https://github.com/20c/twentyc.tools/blob/f8f681e64f58d449bfc32646ba8bcc57db90a233/twentyc/tools/thread.py#L46-L55 |
249,452 | ckcollab/brains-cli | brains.py | init | def init(name, languages, run):
"""Initializes your CONFIG_FILE for the current submission"""
contents = [file_name for file_name in glob.glob("*.*") if file_name != "brains.yaml"]
with open(CONFIG_FILE, "w") as output:
output.write(yaml.safe_dump({
"run": run,
"name": name,
"languages": languages,
# automatically insert all root files into contents
"contents": contents,
}, default_flow_style=False))
print ""
cprint("Automatically including the follow files in brain contents:", "cyan")
for file_name in contents:
print "\t", file_name
print ""
cprint("done! brains.yaml created", 'green') | python | def init(name, languages, run):
"""Initializes your CONFIG_FILE for the current submission"""
contents = [file_name for file_name in glob.glob("*.*") if file_name != "brains.yaml"]
with open(CONFIG_FILE, "w") as output:
output.write(yaml.safe_dump({
"run": run,
"name": name,
"languages": languages,
# automatically insert all root files into contents
"contents": contents,
}, default_flow_style=False))
print ""
cprint("Automatically including the follow files in brain contents:", "cyan")
for file_name in contents:
print "\t", file_name
print ""
cprint("done! brains.yaml created", 'green') | [
"def",
"init",
"(",
"name",
",",
"languages",
",",
"run",
")",
":",
"contents",
"=",
"[",
"file_name",
"for",
"file_name",
"in",
"glob",
".",
"glob",
"(",
"\"*.*\"",
")",
"if",
"file_name",
"!=",
"\"brains.yaml\"",
"]",
"with",
"open",
"(",
"CONFIG_FILE",
",",
"\"w\"",
")",
"as",
"output",
":",
"output",
".",
"write",
"(",
"yaml",
".",
"safe_dump",
"(",
"{",
"\"run\"",
":",
"run",
",",
"\"name\"",
":",
"name",
",",
"\"languages\"",
":",
"languages",
",",
"# automatically insert all root files into contents",
"\"contents\"",
":",
"contents",
",",
"}",
",",
"default_flow_style",
"=",
"False",
")",
")",
"print",
"\"\"",
"cprint",
"(",
"\"Automatically including the follow files in brain contents:\"",
",",
"\"cyan\"",
")",
"for",
"file_name",
"in",
"contents",
":",
"print",
"\"\\t\"",
",",
"file_name",
"print",
"\"\"",
"cprint",
"(",
"\"done! brains.yaml created\"",
",",
"'green'",
")"
] | Initializes your CONFIG_FILE for the current submission | [
"Initializes",
"your",
"CONFIG_FILE",
"for",
"the",
"current",
"submission"
] | 8dc512c32fc83ecc3a80bf7fa2b474d142d99b0e | https://github.com/ckcollab/brains-cli/blob/8dc512c32fc83ecc3a80bf7fa2b474d142d99b0e/brains.py#L59-L77 |
249,453 | ckcollab/brains-cli | brains.py | push | def push(description, datasets, wait, verbose):
"""Publish your submission to brains"""
# Loading config
config = _get_config()
file_patterns = config["contents"]
if not isinstance(file_patterns, type([])):
# put it into an array so we can iterate it, if it isn't already an array
file_patterns = [file_patterns]
if datasets:
datasets_string = datasets.split(',')
else:
datasets_string = config.get("datasets", '')
# Getting all file names/globs -- making sure we get CONFIG_FILE
files = {CONFIG_FILE} # use a set so we don't get duplicates
for pattern in file_patterns:
files.update(glob.glob(pattern))
if not files:
print "No files could be found? Check your contents section in `CONFIG_FILE`!"
exit(-1)
if verbose:
print ""
print "gatherered files:"
for path in files:
print "\t", path
print ""
# Zipping
_print("zipping...")
if not os.path.exists("brains_history"):
os.mkdir("brains_history")
zip_path = 'brains_history/%s.zip' % str(datetime.datetime.now())
with ZipFile(zip_path, 'w') as zip_file:
for path in files:
zip_file.write(path)
cprint("done", 'green')
# Sending to server
with open(zip_path, 'rb') as zip_file:
_print("sending to server...")
try:
response = requests.post(
URL_SUBMIT,
files={
"zip_file": zip_file,
},
data={
"name": config["name"],
"description": description or '',
"languages": config["languages"],
"datasets": datasets_string,
"wait": wait,
},
stream=wait # if we're waiting for response then stream
)
if response.status_code == 200:
cprint("done", 'green')
if wait:
_print("\nOutput: ")
cprint(" " * 72, 'green', attrs=('underline',))
chunk_buffer = ""
# read in 1 chunk at a time for carriage return detection
for chunk in response.iter_content(chunk_size=1):
chunk_buffer += chunk
if chunk == '\r':
# We hit the end of a message!
try:
data = json.loads(chunk_buffer)
if "stdout" not in data or "stderr" not in data:
print "dis one"
continue
except (ValueError,):
continue
if data["stdout"]:
# Get rid of termination string, if it's there
data["stdout"] = data["stdout"].replace("-%-%-%-%-END BRAIN SEQUENCE-%-%-%-%-", "")
_print(data["stdout"])
if data["stderr"]:
_print(colored(data["stderr"], 'red'))
# Clear buffer after reading message
chunk_buffer = ""
else:
cprint(response.json()["error"], 'red')
except requests.exceptions.ConnectionError:
cprint("failed to connect to server!", 'red')
exit(-2) | python | def push(description, datasets, wait, verbose):
"""Publish your submission to brains"""
# Loading config
config = _get_config()
file_patterns = config["contents"]
if not isinstance(file_patterns, type([])):
# put it into an array so we can iterate it, if it isn't already an array
file_patterns = [file_patterns]
if datasets:
datasets_string = datasets.split(',')
else:
datasets_string = config.get("datasets", '')
# Getting all file names/globs -- making sure we get CONFIG_FILE
files = {CONFIG_FILE} # use a set so we don't get duplicates
for pattern in file_patterns:
files.update(glob.glob(pattern))
if not files:
print "No files could be found? Check your contents section in `CONFIG_FILE`!"
exit(-1)
if verbose:
print ""
print "gatherered files:"
for path in files:
print "\t", path
print ""
# Zipping
_print("zipping...")
if not os.path.exists("brains_history"):
os.mkdir("brains_history")
zip_path = 'brains_history/%s.zip' % str(datetime.datetime.now())
with ZipFile(zip_path, 'w') as zip_file:
for path in files:
zip_file.write(path)
cprint("done", 'green')
# Sending to server
with open(zip_path, 'rb') as zip_file:
_print("sending to server...")
try:
response = requests.post(
URL_SUBMIT,
files={
"zip_file": zip_file,
},
data={
"name": config["name"],
"description": description or '',
"languages": config["languages"],
"datasets": datasets_string,
"wait": wait,
},
stream=wait # if we're waiting for response then stream
)
if response.status_code == 200:
cprint("done", 'green')
if wait:
_print("\nOutput: ")
cprint(" " * 72, 'green', attrs=('underline',))
chunk_buffer = ""
# read in 1 chunk at a time for carriage return detection
for chunk in response.iter_content(chunk_size=1):
chunk_buffer += chunk
if chunk == '\r':
# We hit the end of a message!
try:
data = json.loads(chunk_buffer)
if "stdout" not in data or "stderr" not in data:
print "dis one"
continue
except (ValueError,):
continue
if data["stdout"]:
# Get rid of termination string, if it's there
data["stdout"] = data["stdout"].replace("-%-%-%-%-END BRAIN SEQUENCE-%-%-%-%-", "")
_print(data["stdout"])
if data["stderr"]:
_print(colored(data["stderr"], 'red'))
# Clear buffer after reading message
chunk_buffer = ""
else:
cprint(response.json()["error"], 'red')
except requests.exceptions.ConnectionError:
cprint("failed to connect to server!", 'red')
exit(-2) | [
"def",
"push",
"(",
"description",
",",
"datasets",
",",
"wait",
",",
"verbose",
")",
":",
"# Loading config",
"config",
"=",
"_get_config",
"(",
")",
"file_patterns",
"=",
"config",
"[",
"\"contents\"",
"]",
"if",
"not",
"isinstance",
"(",
"file_patterns",
",",
"type",
"(",
"[",
"]",
")",
")",
":",
"# put it into an array so we can iterate it, if it isn't already an array",
"file_patterns",
"=",
"[",
"file_patterns",
"]",
"if",
"datasets",
":",
"datasets_string",
"=",
"datasets",
".",
"split",
"(",
"','",
")",
"else",
":",
"datasets_string",
"=",
"config",
".",
"get",
"(",
"\"datasets\"",
",",
"''",
")",
"# Getting all file names/globs -- making sure we get CONFIG_FILE",
"files",
"=",
"{",
"CONFIG_FILE",
"}",
"# use a set so we don't get duplicates",
"for",
"pattern",
"in",
"file_patterns",
":",
"files",
".",
"update",
"(",
"glob",
".",
"glob",
"(",
"pattern",
")",
")",
"if",
"not",
"files",
":",
"print",
"\"No files could be found? Check your contents section in `CONFIG_FILE`!\"",
"exit",
"(",
"-",
"1",
")",
"if",
"verbose",
":",
"print",
"\"\"",
"print",
"\"gatherered files:\"",
"for",
"path",
"in",
"files",
":",
"print",
"\"\\t\"",
",",
"path",
"print",
"\"\"",
"# Zipping",
"_print",
"(",
"\"zipping...\"",
")",
"if",
"not",
"os",
".",
"path",
".",
"exists",
"(",
"\"brains_history\"",
")",
":",
"os",
".",
"mkdir",
"(",
"\"brains_history\"",
")",
"zip_path",
"=",
"'brains_history/%s.zip'",
"%",
"str",
"(",
"datetime",
".",
"datetime",
".",
"now",
"(",
")",
")",
"with",
"ZipFile",
"(",
"zip_path",
",",
"'w'",
")",
"as",
"zip_file",
":",
"for",
"path",
"in",
"files",
":",
"zip_file",
".",
"write",
"(",
"path",
")",
"cprint",
"(",
"\"done\"",
",",
"'green'",
")",
"# Sending to server",
"with",
"open",
"(",
"zip_path",
",",
"'rb'",
")",
"as",
"zip_file",
":",
"_print",
"(",
"\"sending to server...\"",
")",
"try",
":",
"response",
"=",
"requests",
".",
"post",
"(",
"URL_SUBMIT",
",",
"files",
"=",
"{",
"\"zip_file\"",
":",
"zip_file",
",",
"}",
",",
"data",
"=",
"{",
"\"name\"",
":",
"config",
"[",
"\"name\"",
"]",
",",
"\"description\"",
":",
"description",
"or",
"''",
",",
"\"languages\"",
":",
"config",
"[",
"\"languages\"",
"]",
",",
"\"datasets\"",
":",
"datasets_string",
",",
"\"wait\"",
":",
"wait",
",",
"}",
",",
"stream",
"=",
"wait",
"# if we're waiting for response then stream",
")",
"if",
"response",
".",
"status_code",
"==",
"200",
":",
"cprint",
"(",
"\"done\"",
",",
"'green'",
")",
"if",
"wait",
":",
"_print",
"(",
"\"\\nOutput: \"",
")",
"cprint",
"(",
"\" \"",
"*",
"72",
",",
"'green'",
",",
"attrs",
"=",
"(",
"'underline'",
",",
")",
")",
"chunk_buffer",
"=",
"\"\"",
"# read in 1 chunk at a time for carriage return detection",
"for",
"chunk",
"in",
"response",
".",
"iter_content",
"(",
"chunk_size",
"=",
"1",
")",
":",
"chunk_buffer",
"+=",
"chunk",
"if",
"chunk",
"==",
"'\\r'",
":",
"# We hit the end of a message!",
"try",
":",
"data",
"=",
"json",
".",
"loads",
"(",
"chunk_buffer",
")",
"if",
"\"stdout\"",
"not",
"in",
"data",
"or",
"\"stderr\"",
"not",
"in",
"data",
":",
"print",
"\"dis one\"",
"continue",
"except",
"(",
"ValueError",
",",
")",
":",
"continue",
"if",
"data",
"[",
"\"stdout\"",
"]",
":",
"# Get rid of termination string, if it's there",
"data",
"[",
"\"stdout\"",
"]",
"=",
"data",
"[",
"\"stdout\"",
"]",
".",
"replace",
"(",
"\"-%-%-%-%-END BRAIN SEQUENCE-%-%-%-%-\"",
",",
"\"\"",
")",
"_print",
"(",
"data",
"[",
"\"stdout\"",
"]",
")",
"if",
"data",
"[",
"\"stderr\"",
"]",
":",
"_print",
"(",
"colored",
"(",
"data",
"[",
"\"stderr\"",
"]",
",",
"'red'",
")",
")",
"# Clear buffer after reading message",
"chunk_buffer",
"=",
"\"\"",
"else",
":",
"cprint",
"(",
"response",
".",
"json",
"(",
")",
"[",
"\"error\"",
"]",
",",
"'red'",
")",
"except",
"requests",
".",
"exceptions",
".",
"ConnectionError",
":",
"cprint",
"(",
"\"failed to connect to server!\"",
",",
"'red'",
")",
"exit",
"(",
"-",
"2",
")"
] | Publish your submission to brains | [
"Publish",
"your",
"submission",
"to",
"brains"
] | 8dc512c32fc83ecc3a80bf7fa2b474d142d99b0e | https://github.com/ckcollab/brains-cli/blob/8dc512c32fc83ecc3a80bf7fa2b474d142d99b0e/brains.py#L85-L180 |
249,454 | ckcollab/brains-cli | brains.py | run | def run(dataset):
"""Run brain locally"""
config = _get_config()
if dataset:
_print("getting dataset from brains...")
cprint("done", 'green')
# check dataset cache for dataset
# if not exists
# r = requests.get('https://api.github.com/events', stream=True)
# with open(filename, 'wb') as fd:
# for chunk in r.iter_content(chunk_size):
# fd.write(chunk)
cprint('Running "%s"' % config["run"], 'green', attrs=("underline",))
call(config["run"].split()) | python | def run(dataset):
"""Run brain locally"""
config = _get_config()
if dataset:
_print("getting dataset from brains...")
cprint("done", 'green')
# check dataset cache for dataset
# if not exists
# r = requests.get('https://api.github.com/events', stream=True)
# with open(filename, 'wb') as fd:
# for chunk in r.iter_content(chunk_size):
# fd.write(chunk)
cprint('Running "%s"' % config["run"], 'green', attrs=("underline",))
call(config["run"].split()) | [
"def",
"run",
"(",
"dataset",
")",
":",
"config",
"=",
"_get_config",
"(",
")",
"if",
"dataset",
":",
"_print",
"(",
"\"getting dataset from brains...\"",
")",
"cprint",
"(",
"\"done\"",
",",
"'green'",
")",
"# check dataset cache for dataset",
"# if not exists",
"# r = requests.get('https://api.github.com/events', stream=True)",
"# with open(filename, 'wb') as fd:",
"# for chunk in r.iter_content(chunk_size):",
"# fd.write(chunk)",
"cprint",
"(",
"'Running \"%s\"'",
"%",
"config",
"[",
"\"run\"",
"]",
",",
"'green'",
",",
"attrs",
"=",
"(",
"\"underline\"",
",",
")",
")",
"call",
"(",
"config",
"[",
"\"run\"",
"]",
".",
"split",
"(",
")",
")"
] | Run brain locally | [
"Run",
"brain",
"locally"
] | 8dc512c32fc83ecc3a80bf7fa2b474d142d99b0e | https://github.com/ckcollab/brains-cli/blob/8dc512c32fc83ecc3a80bf7fa2b474d142d99b0e/brains.py#L185-L201 |
249,455 | smetj/wishbone-input-disk | wishbone_input_disk/diskin.py | DiskIn.diskMonitor | def diskMonitor(self):
'''Primitive monitor which checks whether new data is added to disk.'''
while self.loop():
try:
newest = max(glob.iglob("%s/*" % (self.kwargs.directory)), key=os.path.getmtime)
except Exception:
pass
else:
if time() - os.path.getctime(newest) >= self.kwargs.idle_time:
self.reading.set()
else:
self.reading.clear()
sleep(1) | python | def diskMonitor(self):
'''Primitive monitor which checks whether new data is added to disk.'''
while self.loop():
try:
newest = max(glob.iglob("%s/*" % (self.kwargs.directory)), key=os.path.getmtime)
except Exception:
pass
else:
if time() - os.path.getctime(newest) >= self.kwargs.idle_time:
self.reading.set()
else:
self.reading.clear()
sleep(1) | [
"def",
"diskMonitor",
"(",
"self",
")",
":",
"while",
"self",
".",
"loop",
"(",
")",
":",
"try",
":",
"newest",
"=",
"max",
"(",
"glob",
".",
"iglob",
"(",
"\"%s/*\"",
"%",
"(",
"self",
".",
"kwargs",
".",
"directory",
")",
")",
",",
"key",
"=",
"os",
".",
"path",
".",
"getmtime",
")",
"except",
"Exception",
":",
"pass",
"else",
":",
"if",
"time",
"(",
")",
"-",
"os",
".",
"path",
".",
"getctime",
"(",
"newest",
")",
">=",
"self",
".",
"kwargs",
".",
"idle_time",
":",
"self",
".",
"reading",
".",
"set",
"(",
")",
"else",
":",
"self",
".",
"reading",
".",
"clear",
"(",
")",
"sleep",
"(",
"1",
")"
] | Primitive monitor which checks whether new data is added to disk. | [
"Primitive",
"monitor",
"which",
"checks",
"whether",
"new",
"data",
"is",
"added",
"to",
"disk",
"."
] | c19b0df932adbbe7c04f0b76a72cee8a42463a82 | https://github.com/smetj/wishbone-input-disk/blob/c19b0df932adbbe7c04f0b76a72cee8a42463a82/wishbone_input_disk/diskin.py#L105-L118 |
249,456 | exekias/droplet | droplet/models.py | ModelQuerySet.update | def update(self, **kwargs):
"""
Update selected objects with the given keyword parameters
and mark them as changed
"""
super(ModelQuerySet, self).update(_changed=True, **kwargs) | python | def update(self, **kwargs):
"""
Update selected objects with the given keyword parameters
and mark them as changed
"""
super(ModelQuerySet, self).update(_changed=True, **kwargs) | [
"def",
"update",
"(",
"self",
",",
"*",
"*",
"kwargs",
")",
":",
"super",
"(",
"ModelQuerySet",
",",
"self",
")",
".",
"update",
"(",
"_changed",
"=",
"True",
",",
"*",
"*",
"kwargs",
")"
] | Update selected objects with the given keyword parameters
and mark them as changed | [
"Update",
"selected",
"objects",
"with",
"the",
"given",
"keyword",
"parameters",
"and",
"mark",
"them",
"as",
"changed"
] | aeac573a2c1c4b774e99d5414a1c79b1bb734941 | https://github.com/exekias/droplet/blob/aeac573a2c1c4b774e99d5414a1c79b1bb734941/droplet/models.py#L31-L36 |
249,457 | exekias/droplet | droplet/network/models.py | Interface.update | def update(cls):
"""
Update rows to include known network interfaces
"""
ifaddrs = getifaddrs()
# Create new interfaces
for ifname in ifaddrs.keys():
if filter(ifname.startswith, cls.NAME_FILTER):
cls.objects.get_or_create(name=ifname)
# Delete no longer existing ones
cls.objects.exclude(name__in=ifaddrs.keys()).delete() | python | def update(cls):
"""
Update rows to include known network interfaces
"""
ifaddrs = getifaddrs()
# Create new interfaces
for ifname in ifaddrs.keys():
if filter(ifname.startswith, cls.NAME_FILTER):
cls.objects.get_or_create(name=ifname)
# Delete no longer existing ones
cls.objects.exclude(name__in=ifaddrs.keys()).delete() | [
"def",
"update",
"(",
"cls",
")",
":",
"ifaddrs",
"=",
"getifaddrs",
"(",
")",
"# Create new interfaces",
"for",
"ifname",
"in",
"ifaddrs",
".",
"keys",
"(",
")",
":",
"if",
"filter",
"(",
"ifname",
".",
"startswith",
",",
"cls",
".",
"NAME_FILTER",
")",
":",
"cls",
".",
"objects",
".",
"get_or_create",
"(",
"name",
"=",
"ifname",
")",
"# Delete no longer existing ones",
"cls",
".",
"objects",
".",
"exclude",
"(",
"name__in",
"=",
"ifaddrs",
".",
"keys",
"(",
")",
")",
".",
"delete",
"(",
")"
] | Update rows to include known network interfaces | [
"Update",
"rows",
"to",
"include",
"known",
"network",
"interfaces"
] | aeac573a2c1c4b774e99d5414a1c79b1bb734941 | https://github.com/exekias/droplet/blob/aeac573a2c1c4b774e99d5414a1c79b1bb734941/droplet/network/models.py#L56-L67 |
249,458 | minhhoit/yacms | yacms/pages/views.py | admin_page_ordering | def admin_page_ordering(request):
"""
Updates the ordering of pages via AJAX from within the admin.
"""
def get_id(s):
s = s.split("_")[-1]
return int(s) if s.isdigit() else None
page = get_object_or_404(Page, id=get_id(request.POST['id']))
old_parent_id = page.parent_id
new_parent_id = get_id(request.POST['parent_id'])
new_parent = Page.objects.get(id=new_parent_id) if new_parent_id else None
try:
page.get_content_model().can_move(request, new_parent)
except PageMoveException as e:
messages.error(request, e)
return HttpResponse('error')
# Perform the page move
if new_parent_id != page.parent_id:
# Parent changed - set the new parent and re-order the
# previous siblings.
page.set_parent(new_parent)
pages = Page.objects.filter(parent_id=old_parent_id)
for i, page in enumerate(pages.order_by('_order')):
Page.objects.filter(id=page.id).update(_order=i)
# Set the new order for the moved page and its current siblings.
for i, page_id in enumerate(request.POST.getlist('siblings[]')):
Page.objects.filter(id=get_id(page_id)).update(_order=i)
return HttpResponse("ok") | python | def admin_page_ordering(request):
"""
Updates the ordering of pages via AJAX from within the admin.
"""
def get_id(s):
s = s.split("_")[-1]
return int(s) if s.isdigit() else None
page = get_object_or_404(Page, id=get_id(request.POST['id']))
old_parent_id = page.parent_id
new_parent_id = get_id(request.POST['parent_id'])
new_parent = Page.objects.get(id=new_parent_id) if new_parent_id else None
try:
page.get_content_model().can_move(request, new_parent)
except PageMoveException as e:
messages.error(request, e)
return HttpResponse('error')
# Perform the page move
if new_parent_id != page.parent_id:
# Parent changed - set the new parent and re-order the
# previous siblings.
page.set_parent(new_parent)
pages = Page.objects.filter(parent_id=old_parent_id)
for i, page in enumerate(pages.order_by('_order')):
Page.objects.filter(id=page.id).update(_order=i)
# Set the new order for the moved page and its current siblings.
for i, page_id in enumerate(request.POST.getlist('siblings[]')):
Page.objects.filter(id=get_id(page_id)).update(_order=i)
return HttpResponse("ok") | [
"def",
"admin_page_ordering",
"(",
"request",
")",
":",
"def",
"get_id",
"(",
"s",
")",
":",
"s",
"=",
"s",
".",
"split",
"(",
"\"_\"",
")",
"[",
"-",
"1",
"]",
"return",
"int",
"(",
"s",
")",
"if",
"s",
".",
"isdigit",
"(",
")",
"else",
"None",
"page",
"=",
"get_object_or_404",
"(",
"Page",
",",
"id",
"=",
"get_id",
"(",
"request",
".",
"POST",
"[",
"'id'",
"]",
")",
")",
"old_parent_id",
"=",
"page",
".",
"parent_id",
"new_parent_id",
"=",
"get_id",
"(",
"request",
".",
"POST",
"[",
"'parent_id'",
"]",
")",
"new_parent",
"=",
"Page",
".",
"objects",
".",
"get",
"(",
"id",
"=",
"new_parent_id",
")",
"if",
"new_parent_id",
"else",
"None",
"try",
":",
"page",
".",
"get_content_model",
"(",
")",
".",
"can_move",
"(",
"request",
",",
"new_parent",
")",
"except",
"PageMoveException",
"as",
"e",
":",
"messages",
".",
"error",
"(",
"request",
",",
"e",
")",
"return",
"HttpResponse",
"(",
"'error'",
")",
"# Perform the page move",
"if",
"new_parent_id",
"!=",
"page",
".",
"parent_id",
":",
"# Parent changed - set the new parent and re-order the",
"# previous siblings.",
"page",
".",
"set_parent",
"(",
"new_parent",
")",
"pages",
"=",
"Page",
".",
"objects",
".",
"filter",
"(",
"parent_id",
"=",
"old_parent_id",
")",
"for",
"i",
",",
"page",
"in",
"enumerate",
"(",
"pages",
".",
"order_by",
"(",
"'_order'",
")",
")",
":",
"Page",
".",
"objects",
".",
"filter",
"(",
"id",
"=",
"page",
".",
"id",
")",
".",
"update",
"(",
"_order",
"=",
"i",
")",
"# Set the new order for the moved page and its current siblings.",
"for",
"i",
",",
"page_id",
"in",
"enumerate",
"(",
"request",
".",
"POST",
".",
"getlist",
"(",
"'siblings[]'",
")",
")",
":",
"Page",
".",
"objects",
".",
"filter",
"(",
"id",
"=",
"get_id",
"(",
"page_id",
")",
")",
".",
"update",
"(",
"_order",
"=",
"i",
")",
"return",
"HttpResponse",
"(",
"\"ok\"",
")"
] | Updates the ordering of pages via AJAX from within the admin. | [
"Updates",
"the",
"ordering",
"of",
"pages",
"via",
"AJAX",
"from",
"within",
"the",
"admin",
"."
] | 2921b706b7107c6e8c5f2bbf790ff11f85a2167f | https://github.com/minhhoit/yacms/blob/2921b706b7107c6e8c5f2bbf790ff11f85a2167f/yacms/pages/views.py#L16-L47 |
249,459 | minhhoit/yacms | yacms/pages/views.py | page | def page(request, slug, template=u"pages/page.html", extra_context=None):
"""
Select a template for a page and render it. The request
object should have a ``page`` attribute that's added via
``yacms.pages.middleware.PageMiddleware``. The page is loaded
earlier via middleware to perform various other functions.
The urlpattern that maps to this view is a catch-all pattern, in
which case the page attribute won't exist, so raise a 404 then.
For template selection, a list of possible templates is built up
based on the current page. This list is order from most granular
match, starting with a custom template for the exact page, then
adding templates based on the page's parent page, that could be
used for sections of a site (eg all children of the parent).
Finally at the broadest level, a template for the page's content
type (it's model class) is checked for, and then if none of these
templates match, the default pages/page.html is used.
"""
from yacms.pages.middleware import PageMiddleware
if not PageMiddleware.installed():
raise ImproperlyConfigured("yacms.pages.middleware.PageMiddleware "
"(or a subclass of it) is missing from " +
"settings.MIDDLEWARE_CLASSES or " +
"settings.MIDDLEWARE")
if not hasattr(request, "page") or request.page.slug != slug:
raise Http404
# Check for a template name matching the page's slug. If the homepage
# is configured as a page instance, the template "pages/index.html" is
# used, since the slug "/" won't match a template name.
template_name = str(slug) if slug != home_slug() else "index"
templates = [u"pages/%s.html" % template_name]
method_template = request.page.get_content_model().get_template_name()
if method_template:
templates.insert(0, method_template)
if request.page.content_model is not None:
templates.append(u"pages/%s/%s.html" % (template_name,
request.page.content_model))
for parent in request.page.get_ascendants(for_user=request.user):
parent_template_name = str(parent.slug)
# Check for a template matching the page's content model.
if request.page.content_model is not None:
templates.append(u"pages/%s/%s.html" % (parent_template_name,
request.page.content_model))
# Check for a template matching the page's content model.
if request.page.content_model is not None:
templates.append(u"pages/%s.html" % request.page.content_model)
templates.append(template)
return TemplateResponse(request, templates, extra_context or {}) | python | def page(request, slug, template=u"pages/page.html", extra_context=None):
"""
Select a template for a page and render it. The request
object should have a ``page`` attribute that's added via
``yacms.pages.middleware.PageMiddleware``. The page is loaded
earlier via middleware to perform various other functions.
The urlpattern that maps to this view is a catch-all pattern, in
which case the page attribute won't exist, so raise a 404 then.
For template selection, a list of possible templates is built up
based on the current page. This list is order from most granular
match, starting with a custom template for the exact page, then
adding templates based on the page's parent page, that could be
used for sections of a site (eg all children of the parent).
Finally at the broadest level, a template for the page's content
type (it's model class) is checked for, and then if none of these
templates match, the default pages/page.html is used.
"""
from yacms.pages.middleware import PageMiddleware
if not PageMiddleware.installed():
raise ImproperlyConfigured("yacms.pages.middleware.PageMiddleware "
"(or a subclass of it) is missing from " +
"settings.MIDDLEWARE_CLASSES or " +
"settings.MIDDLEWARE")
if not hasattr(request, "page") or request.page.slug != slug:
raise Http404
# Check for a template name matching the page's slug. If the homepage
# is configured as a page instance, the template "pages/index.html" is
# used, since the slug "/" won't match a template name.
template_name = str(slug) if slug != home_slug() else "index"
templates = [u"pages/%s.html" % template_name]
method_template = request.page.get_content_model().get_template_name()
if method_template:
templates.insert(0, method_template)
if request.page.content_model is not None:
templates.append(u"pages/%s/%s.html" % (template_name,
request.page.content_model))
for parent in request.page.get_ascendants(for_user=request.user):
parent_template_name = str(parent.slug)
# Check for a template matching the page's content model.
if request.page.content_model is not None:
templates.append(u"pages/%s/%s.html" % (parent_template_name,
request.page.content_model))
# Check for a template matching the page's content model.
if request.page.content_model is not None:
templates.append(u"pages/%s.html" % request.page.content_model)
templates.append(template)
return TemplateResponse(request, templates, extra_context or {}) | [
"def",
"page",
"(",
"request",
",",
"slug",
",",
"template",
"=",
"u\"pages/page.html\"",
",",
"extra_context",
"=",
"None",
")",
":",
"from",
"yacms",
".",
"pages",
".",
"middleware",
"import",
"PageMiddleware",
"if",
"not",
"PageMiddleware",
".",
"installed",
"(",
")",
":",
"raise",
"ImproperlyConfigured",
"(",
"\"yacms.pages.middleware.PageMiddleware \"",
"\"(or a subclass of it) is missing from \"",
"+",
"\"settings.MIDDLEWARE_CLASSES or \"",
"+",
"\"settings.MIDDLEWARE\"",
")",
"if",
"not",
"hasattr",
"(",
"request",
",",
"\"page\"",
")",
"or",
"request",
".",
"page",
".",
"slug",
"!=",
"slug",
":",
"raise",
"Http404",
"# Check for a template name matching the page's slug. If the homepage",
"# is configured as a page instance, the template \"pages/index.html\" is",
"# used, since the slug \"/\" won't match a template name.",
"template_name",
"=",
"str",
"(",
"slug",
")",
"if",
"slug",
"!=",
"home_slug",
"(",
")",
"else",
"\"index\"",
"templates",
"=",
"[",
"u\"pages/%s.html\"",
"%",
"template_name",
"]",
"method_template",
"=",
"request",
".",
"page",
".",
"get_content_model",
"(",
")",
".",
"get_template_name",
"(",
")",
"if",
"method_template",
":",
"templates",
".",
"insert",
"(",
"0",
",",
"method_template",
")",
"if",
"request",
".",
"page",
".",
"content_model",
"is",
"not",
"None",
":",
"templates",
".",
"append",
"(",
"u\"pages/%s/%s.html\"",
"%",
"(",
"template_name",
",",
"request",
".",
"page",
".",
"content_model",
")",
")",
"for",
"parent",
"in",
"request",
".",
"page",
".",
"get_ascendants",
"(",
"for_user",
"=",
"request",
".",
"user",
")",
":",
"parent_template_name",
"=",
"str",
"(",
"parent",
".",
"slug",
")",
"# Check for a template matching the page's content model.",
"if",
"request",
".",
"page",
".",
"content_model",
"is",
"not",
"None",
":",
"templates",
".",
"append",
"(",
"u\"pages/%s/%s.html\"",
"%",
"(",
"parent_template_name",
",",
"request",
".",
"page",
".",
"content_model",
")",
")",
"# Check for a template matching the page's content model.",
"if",
"request",
".",
"page",
".",
"content_model",
"is",
"not",
"None",
":",
"templates",
".",
"append",
"(",
"u\"pages/%s.html\"",
"%",
"request",
".",
"page",
".",
"content_model",
")",
"templates",
".",
"append",
"(",
"template",
")",
"return",
"TemplateResponse",
"(",
"request",
",",
"templates",
",",
"extra_context",
"or",
"{",
"}",
")"
] | Select a template for a page and render it. The request
object should have a ``page`` attribute that's added via
``yacms.pages.middleware.PageMiddleware``. The page is loaded
earlier via middleware to perform various other functions.
The urlpattern that maps to this view is a catch-all pattern, in
which case the page attribute won't exist, so raise a 404 then.
For template selection, a list of possible templates is built up
based on the current page. This list is order from most granular
match, starting with a custom template for the exact page, then
adding templates based on the page's parent page, that could be
used for sections of a site (eg all children of the parent).
Finally at the broadest level, a template for the page's content
type (it's model class) is checked for, and then if none of these
templates match, the default pages/page.html is used. | [
"Select",
"a",
"template",
"for",
"a",
"page",
"and",
"render",
"it",
".",
"The",
"request",
"object",
"should",
"have",
"a",
"page",
"attribute",
"that",
"s",
"added",
"via",
"yacms",
".",
"pages",
".",
"middleware",
".",
"PageMiddleware",
".",
"The",
"page",
"is",
"loaded",
"earlier",
"via",
"middleware",
"to",
"perform",
"various",
"other",
"functions",
".",
"The",
"urlpattern",
"that",
"maps",
"to",
"this",
"view",
"is",
"a",
"catch",
"-",
"all",
"pattern",
"in",
"which",
"case",
"the",
"page",
"attribute",
"won",
"t",
"exist",
"so",
"raise",
"a",
"404",
"then",
"."
] | 2921b706b7107c6e8c5f2bbf790ff11f85a2167f | https://github.com/minhhoit/yacms/blob/2921b706b7107c6e8c5f2bbf790ff11f85a2167f/yacms/pages/views.py#L50-L100 |
249,460 | eddiejessup/spatious | spatious/vector.py | vector_unit_nonull | def vector_unit_nonull(v):
"""Return unit vectors.
Any null vectors raise an Exception.
Parameters
----------
v: array, shape (a1, a2, ..., d)
Cartesian vectors, with last axis indexing the dimension.
Returns
-------
v_new: array, shape of v
"""
if v.size == 0:
return v
return v / vector_mag(v)[..., np.newaxis] | python | def vector_unit_nonull(v):
"""Return unit vectors.
Any null vectors raise an Exception.
Parameters
----------
v: array, shape (a1, a2, ..., d)
Cartesian vectors, with last axis indexing the dimension.
Returns
-------
v_new: array, shape of v
"""
if v.size == 0:
return v
return v / vector_mag(v)[..., np.newaxis] | [
"def",
"vector_unit_nonull",
"(",
"v",
")",
":",
"if",
"v",
".",
"size",
"==",
"0",
":",
"return",
"v",
"return",
"v",
"/",
"vector_mag",
"(",
"v",
")",
"[",
"...",
",",
"np",
".",
"newaxis",
"]"
] | Return unit vectors.
Any null vectors raise an Exception.
Parameters
----------
v: array, shape (a1, a2, ..., d)
Cartesian vectors, with last axis indexing the dimension.
Returns
-------
v_new: array, shape of v | [
"Return",
"unit",
"vectors",
".",
"Any",
"null",
"vectors",
"raise",
"an",
"Exception",
"."
] | b7ae91bec029e85a45a7f303ee184076433723cd | https://github.com/eddiejessup/spatious/blob/b7ae91bec029e85a45a7f303ee184076433723cd/spatious/vector.py#L40-L55 |
249,461 | eddiejessup/spatious | spatious/vector.py | vector_unit_nullnull | def vector_unit_nullnull(v):
"""Return unit vectors.
Any null vectors remain null vectors.
Parameters
----------
v: array, shape (a1, a2, ..., d)
Cartesian vectors, with last axis indexing the dimension.
Returns
-------
v_new: array, shape of v
"""
if v.size == 0:
return v
mag = vector_mag(v)
v_new = v.copy()
v_new[mag > 0.0] /= mag[mag > 0.0][..., np.newaxis]
return v_new | python | def vector_unit_nullnull(v):
"""Return unit vectors.
Any null vectors remain null vectors.
Parameters
----------
v: array, shape (a1, a2, ..., d)
Cartesian vectors, with last axis indexing the dimension.
Returns
-------
v_new: array, shape of v
"""
if v.size == 0:
return v
mag = vector_mag(v)
v_new = v.copy()
v_new[mag > 0.0] /= mag[mag > 0.0][..., np.newaxis]
return v_new | [
"def",
"vector_unit_nullnull",
"(",
"v",
")",
":",
"if",
"v",
".",
"size",
"==",
"0",
":",
"return",
"v",
"mag",
"=",
"vector_mag",
"(",
"v",
")",
"v_new",
"=",
"v",
".",
"copy",
"(",
")",
"v_new",
"[",
"mag",
">",
"0.0",
"]",
"/=",
"mag",
"[",
"mag",
">",
"0.0",
"]",
"[",
"...",
",",
"np",
".",
"newaxis",
"]",
"return",
"v_new"
] | Return unit vectors.
Any null vectors remain null vectors.
Parameters
----------
v: array, shape (a1, a2, ..., d)
Cartesian vectors, with last axis indexing the dimension.
Returns
-------
v_new: array, shape of v | [
"Return",
"unit",
"vectors",
".",
"Any",
"null",
"vectors",
"remain",
"null",
"vectors",
"."
] | b7ae91bec029e85a45a7f303ee184076433723cd | https://github.com/eddiejessup/spatious/blob/b7ae91bec029e85a45a7f303ee184076433723cd/spatious/vector.py#L58-L76 |
249,462 | eddiejessup/spatious | spatious/vector.py | vector_unit_nullrand | def vector_unit_nullrand(v, rng=None):
"""Return unit vectors.
Any null vectors are mapped to a uniformly picked unit vector.
Parameters
----------
v: array, shape (a1, a2, ..., d)
Cartesian vectors, with last axis indexing the dimension.
Returns
-------
v_new: array, shape of v
"""
if v.size == 0:
return v
mag = vector_mag(v)
v_new = v.copy()
v_new[mag == 0.0] = sphere_pick(v.shape[-1], (mag == 0.0).sum(), rng)
v_new[mag > 0.0] /= mag[mag > 0.0][..., np.newaxis]
return v_new | python | def vector_unit_nullrand(v, rng=None):
"""Return unit vectors.
Any null vectors are mapped to a uniformly picked unit vector.
Parameters
----------
v: array, shape (a1, a2, ..., d)
Cartesian vectors, with last axis indexing the dimension.
Returns
-------
v_new: array, shape of v
"""
if v.size == 0:
return v
mag = vector_mag(v)
v_new = v.copy()
v_new[mag == 0.0] = sphere_pick(v.shape[-1], (mag == 0.0).sum(), rng)
v_new[mag > 0.0] /= mag[mag > 0.0][..., np.newaxis]
return v_new | [
"def",
"vector_unit_nullrand",
"(",
"v",
",",
"rng",
"=",
"None",
")",
":",
"if",
"v",
".",
"size",
"==",
"0",
":",
"return",
"v",
"mag",
"=",
"vector_mag",
"(",
"v",
")",
"v_new",
"=",
"v",
".",
"copy",
"(",
")",
"v_new",
"[",
"mag",
"==",
"0.0",
"]",
"=",
"sphere_pick",
"(",
"v",
".",
"shape",
"[",
"-",
"1",
"]",
",",
"(",
"mag",
"==",
"0.0",
")",
".",
"sum",
"(",
")",
",",
"rng",
")",
"v_new",
"[",
"mag",
">",
"0.0",
"]",
"/=",
"mag",
"[",
"mag",
">",
"0.0",
"]",
"[",
"...",
",",
"np",
".",
"newaxis",
"]",
"return",
"v_new"
] | Return unit vectors.
Any null vectors are mapped to a uniformly picked unit vector.
Parameters
----------
v: array, shape (a1, a2, ..., d)
Cartesian vectors, with last axis indexing the dimension.
Returns
-------
v_new: array, shape of v | [
"Return",
"unit",
"vectors",
".",
"Any",
"null",
"vectors",
"are",
"mapped",
"to",
"a",
"uniformly",
"picked",
"unit",
"vector",
"."
] | b7ae91bec029e85a45a7f303ee184076433723cd | https://github.com/eddiejessup/spatious/blob/b7ae91bec029e85a45a7f303ee184076433723cd/spatious/vector.py#L79-L98 |
249,463 | eddiejessup/spatious | spatious/vector.py | polar_to_cart | def polar_to_cart(arr_p):
"""Return polar vectors in their cartesian representation.
Parameters
----------
arr_p: array, shape (a1, a2, ..., d)
Polar vectors, with last axis indexing the dimension,
using (radius, inclination, azimuth) convention.
Returns
-------
arr_c: array, shape of arr_p
Cartesian vectors.
"""
if arr_p.shape[-1] == 1:
arr_c = arr_p.copy()
elif arr_p.shape[-1] == 2:
arr_c = np.empty_like(arr_p)
arr_c[..., 0] = arr_p[..., 0] * np.cos(arr_p[..., 1])
arr_c[..., 1] = arr_p[..., 0] * np.sin(arr_p[..., 1])
elif arr_p.shape[-1] == 3:
arr_c = np.empty_like(arr_p)
arr_c[..., 0] = arr_p[..., 0] * np.sin(
arr_p[..., 1]) * np.cos(arr_p[..., 2])
arr_c[..., 1] = arr_p[..., 0] * np.sin(
arr_p[..., 1]) * np.sin(arr_p[..., 2])
arr_c[..., 2] = arr_p[..., 0] * np.cos(arr_p[..., 1])
else:
raise Exception('Invalid vector for polar representation')
return arr_c | python | def polar_to_cart(arr_p):
"""Return polar vectors in their cartesian representation.
Parameters
----------
arr_p: array, shape (a1, a2, ..., d)
Polar vectors, with last axis indexing the dimension,
using (radius, inclination, azimuth) convention.
Returns
-------
arr_c: array, shape of arr_p
Cartesian vectors.
"""
if arr_p.shape[-1] == 1:
arr_c = arr_p.copy()
elif arr_p.shape[-1] == 2:
arr_c = np.empty_like(arr_p)
arr_c[..., 0] = arr_p[..., 0] * np.cos(arr_p[..., 1])
arr_c[..., 1] = arr_p[..., 0] * np.sin(arr_p[..., 1])
elif arr_p.shape[-1] == 3:
arr_c = np.empty_like(arr_p)
arr_c[..., 0] = arr_p[..., 0] * np.sin(
arr_p[..., 1]) * np.cos(arr_p[..., 2])
arr_c[..., 1] = arr_p[..., 0] * np.sin(
arr_p[..., 1]) * np.sin(arr_p[..., 2])
arr_c[..., 2] = arr_p[..., 0] * np.cos(arr_p[..., 1])
else:
raise Exception('Invalid vector for polar representation')
return arr_c | [
"def",
"polar_to_cart",
"(",
"arr_p",
")",
":",
"if",
"arr_p",
".",
"shape",
"[",
"-",
"1",
"]",
"==",
"1",
":",
"arr_c",
"=",
"arr_p",
".",
"copy",
"(",
")",
"elif",
"arr_p",
".",
"shape",
"[",
"-",
"1",
"]",
"==",
"2",
":",
"arr_c",
"=",
"np",
".",
"empty_like",
"(",
"arr_p",
")",
"arr_c",
"[",
"...",
",",
"0",
"]",
"=",
"arr_p",
"[",
"...",
",",
"0",
"]",
"*",
"np",
".",
"cos",
"(",
"arr_p",
"[",
"...",
",",
"1",
"]",
")",
"arr_c",
"[",
"...",
",",
"1",
"]",
"=",
"arr_p",
"[",
"...",
",",
"0",
"]",
"*",
"np",
".",
"sin",
"(",
"arr_p",
"[",
"...",
",",
"1",
"]",
")",
"elif",
"arr_p",
".",
"shape",
"[",
"-",
"1",
"]",
"==",
"3",
":",
"arr_c",
"=",
"np",
".",
"empty_like",
"(",
"arr_p",
")",
"arr_c",
"[",
"...",
",",
"0",
"]",
"=",
"arr_p",
"[",
"...",
",",
"0",
"]",
"*",
"np",
".",
"sin",
"(",
"arr_p",
"[",
"...",
",",
"1",
"]",
")",
"*",
"np",
".",
"cos",
"(",
"arr_p",
"[",
"...",
",",
"2",
"]",
")",
"arr_c",
"[",
"...",
",",
"1",
"]",
"=",
"arr_p",
"[",
"...",
",",
"0",
"]",
"*",
"np",
".",
"sin",
"(",
"arr_p",
"[",
"...",
",",
"1",
"]",
")",
"*",
"np",
".",
"sin",
"(",
"arr_p",
"[",
"...",
",",
"2",
"]",
")",
"arr_c",
"[",
"...",
",",
"2",
"]",
"=",
"arr_p",
"[",
"...",
",",
"0",
"]",
"*",
"np",
".",
"cos",
"(",
"arr_p",
"[",
"...",
",",
"1",
"]",
")",
"else",
":",
"raise",
"Exception",
"(",
"'Invalid vector for polar representation'",
")",
"return",
"arr_c"
] | Return polar vectors in their cartesian representation.
Parameters
----------
arr_p: array, shape (a1, a2, ..., d)
Polar vectors, with last axis indexing the dimension,
using (radius, inclination, azimuth) convention.
Returns
-------
arr_c: array, shape of arr_p
Cartesian vectors. | [
"Return",
"polar",
"vectors",
"in",
"their",
"cartesian",
"representation",
"."
] | b7ae91bec029e85a45a7f303ee184076433723cd | https://github.com/eddiejessup/spatious/blob/b7ae91bec029e85a45a7f303ee184076433723cd/spatious/vector.py#L124-L153 |
249,464 | eddiejessup/spatious | spatious/vector.py | cart_to_polar | def cart_to_polar(arr_c):
"""Return cartesian vectors in their polar representation.
Parameters
----------
arr_c: array, shape (a1, a2, ..., d)
Cartesian vectors, with last axis indexing the dimension.
Returns
-------
arr_p: array, shape of arr_c
Polar vectors, using (radius, inclination, azimuth) convention.
"""
if arr_c.shape[-1] == 1:
arr_p = arr_c.copy()
elif arr_c.shape[-1] == 2:
arr_p = np.empty_like(arr_c)
arr_p[..., 0] = vector_mag(arr_c)
arr_p[..., 1] = np.arctan2(arr_c[..., 1], arr_c[..., 0])
elif arr_c.shape[-1] == 3:
arr_p = np.empty_like(arr_c)
arr_p[..., 0] = vector_mag(arr_c)
arr_p[..., 1] = np.arccos(arr_c[..., 2] / arr_p[..., 0])
arr_p[..., 2] = np.arctan2(arr_c[..., 1], arr_c[..., 0])
else:
raise Exception('Invalid vector for polar representation')
return arr_p | python | def cart_to_polar(arr_c):
"""Return cartesian vectors in their polar representation.
Parameters
----------
arr_c: array, shape (a1, a2, ..., d)
Cartesian vectors, with last axis indexing the dimension.
Returns
-------
arr_p: array, shape of arr_c
Polar vectors, using (radius, inclination, azimuth) convention.
"""
if arr_c.shape[-1] == 1:
arr_p = arr_c.copy()
elif arr_c.shape[-1] == 2:
arr_p = np.empty_like(arr_c)
arr_p[..., 0] = vector_mag(arr_c)
arr_p[..., 1] = np.arctan2(arr_c[..., 1], arr_c[..., 0])
elif arr_c.shape[-1] == 3:
arr_p = np.empty_like(arr_c)
arr_p[..., 0] = vector_mag(arr_c)
arr_p[..., 1] = np.arccos(arr_c[..., 2] / arr_p[..., 0])
arr_p[..., 2] = np.arctan2(arr_c[..., 1], arr_c[..., 0])
else:
raise Exception('Invalid vector for polar representation')
return arr_p | [
"def",
"cart_to_polar",
"(",
"arr_c",
")",
":",
"if",
"arr_c",
".",
"shape",
"[",
"-",
"1",
"]",
"==",
"1",
":",
"arr_p",
"=",
"arr_c",
".",
"copy",
"(",
")",
"elif",
"arr_c",
".",
"shape",
"[",
"-",
"1",
"]",
"==",
"2",
":",
"arr_p",
"=",
"np",
".",
"empty_like",
"(",
"arr_c",
")",
"arr_p",
"[",
"...",
",",
"0",
"]",
"=",
"vector_mag",
"(",
"arr_c",
")",
"arr_p",
"[",
"...",
",",
"1",
"]",
"=",
"np",
".",
"arctan2",
"(",
"arr_c",
"[",
"...",
",",
"1",
"]",
",",
"arr_c",
"[",
"...",
",",
"0",
"]",
")",
"elif",
"arr_c",
".",
"shape",
"[",
"-",
"1",
"]",
"==",
"3",
":",
"arr_p",
"=",
"np",
".",
"empty_like",
"(",
"arr_c",
")",
"arr_p",
"[",
"...",
",",
"0",
"]",
"=",
"vector_mag",
"(",
"arr_c",
")",
"arr_p",
"[",
"...",
",",
"1",
"]",
"=",
"np",
".",
"arccos",
"(",
"arr_c",
"[",
"...",
",",
"2",
"]",
"/",
"arr_p",
"[",
"...",
",",
"0",
"]",
")",
"arr_p",
"[",
"...",
",",
"2",
"]",
"=",
"np",
".",
"arctan2",
"(",
"arr_c",
"[",
"...",
",",
"1",
"]",
",",
"arr_c",
"[",
"...",
",",
"0",
"]",
")",
"else",
":",
"raise",
"Exception",
"(",
"'Invalid vector for polar representation'",
")",
"return",
"arr_p"
] | Return cartesian vectors in their polar representation.
Parameters
----------
arr_c: array, shape (a1, a2, ..., d)
Cartesian vectors, with last axis indexing the dimension.
Returns
-------
arr_p: array, shape of arr_c
Polar vectors, using (radius, inclination, azimuth) convention. | [
"Return",
"cartesian",
"vectors",
"in",
"their",
"polar",
"representation",
"."
] | b7ae91bec029e85a45a7f303ee184076433723cd | https://github.com/eddiejessup/spatious/blob/b7ae91bec029e85a45a7f303ee184076433723cd/spatious/vector.py#L156-L182 |
249,465 | eddiejessup/spatious | spatious/vector.py | sphere_pick_polar | def sphere_pick_polar(d, n=1, rng=None):
"""Return vectors uniformly picked on the unit sphere.
Vectors are in a polar representation.
Parameters
----------
d: float
The number of dimensions of the space in which the sphere lives.
n: integer
Number of samples to pick.
Returns
-------
r: array, shape (n, d)
Sample vectors.
"""
if rng is None:
rng = np.random
a = np.empty([n, d])
if d == 1:
a[:, 0] = rng.randint(2, size=n) * 2 - 1
elif d == 2:
a[:, 0] = 1.0
a[:, 1] = rng.uniform(-np.pi, +np.pi, n)
elif d == 3:
u, v = rng.uniform(0.0, 1.0, (2, n))
a[:, 0] = 1.0
a[:, 1] = np.arccos(2.0 * v - 1.0)
a[:, 2] = 2.0 * np.pi * u
else:
raise Exception('Invalid vector for polar representation')
return a | python | def sphere_pick_polar(d, n=1, rng=None):
"""Return vectors uniformly picked on the unit sphere.
Vectors are in a polar representation.
Parameters
----------
d: float
The number of dimensions of the space in which the sphere lives.
n: integer
Number of samples to pick.
Returns
-------
r: array, shape (n, d)
Sample vectors.
"""
if rng is None:
rng = np.random
a = np.empty([n, d])
if d == 1:
a[:, 0] = rng.randint(2, size=n) * 2 - 1
elif d == 2:
a[:, 0] = 1.0
a[:, 1] = rng.uniform(-np.pi, +np.pi, n)
elif d == 3:
u, v = rng.uniform(0.0, 1.0, (2, n))
a[:, 0] = 1.0
a[:, 1] = np.arccos(2.0 * v - 1.0)
a[:, 2] = 2.0 * np.pi * u
else:
raise Exception('Invalid vector for polar representation')
return a | [
"def",
"sphere_pick_polar",
"(",
"d",
",",
"n",
"=",
"1",
",",
"rng",
"=",
"None",
")",
":",
"if",
"rng",
"is",
"None",
":",
"rng",
"=",
"np",
".",
"random",
"a",
"=",
"np",
".",
"empty",
"(",
"[",
"n",
",",
"d",
"]",
")",
"if",
"d",
"==",
"1",
":",
"a",
"[",
":",
",",
"0",
"]",
"=",
"rng",
".",
"randint",
"(",
"2",
",",
"size",
"=",
"n",
")",
"*",
"2",
"-",
"1",
"elif",
"d",
"==",
"2",
":",
"a",
"[",
":",
",",
"0",
"]",
"=",
"1.0",
"a",
"[",
":",
",",
"1",
"]",
"=",
"rng",
".",
"uniform",
"(",
"-",
"np",
".",
"pi",
",",
"+",
"np",
".",
"pi",
",",
"n",
")",
"elif",
"d",
"==",
"3",
":",
"u",
",",
"v",
"=",
"rng",
".",
"uniform",
"(",
"0.0",
",",
"1.0",
",",
"(",
"2",
",",
"n",
")",
")",
"a",
"[",
":",
",",
"0",
"]",
"=",
"1.0",
"a",
"[",
":",
",",
"1",
"]",
"=",
"np",
".",
"arccos",
"(",
"2.0",
"*",
"v",
"-",
"1.0",
")",
"a",
"[",
":",
",",
"2",
"]",
"=",
"2.0",
"*",
"np",
".",
"pi",
"*",
"u",
"else",
":",
"raise",
"Exception",
"(",
"'Invalid vector for polar representation'",
")",
"return",
"a"
] | Return vectors uniformly picked on the unit sphere.
Vectors are in a polar representation.
Parameters
----------
d: float
The number of dimensions of the space in which the sphere lives.
n: integer
Number of samples to pick.
Returns
-------
r: array, shape (n, d)
Sample vectors. | [
"Return",
"vectors",
"uniformly",
"picked",
"on",
"the",
"unit",
"sphere",
".",
"Vectors",
"are",
"in",
"a",
"polar",
"representation",
"."
] | b7ae91bec029e85a45a7f303ee184076433723cd | https://github.com/eddiejessup/spatious/blob/b7ae91bec029e85a45a7f303ee184076433723cd/spatious/vector.py#L185-L216 |
249,466 | eddiejessup/spatious | spatious/vector.py | rejection_pick | def rejection_pick(L, n, d, valid, rng=None):
"""Return cartesian vectors uniformly picked in a space with an arbitrary
number of dimensions, which is fully enclosed by a cube of finite length,
using a supplied function which should evaluate whether a picked point lies
within this space.
The picking is done by rejection sampling in the cube.
Parameters
----------
L: float
Side length of the enclosing cube.
n: integer
Number of points to return
d: integer
The number of dimensions of the space
Returns
-------
r: array, shape (n, d)
Sample cartesian vectors
"""
if rng is None:
rng = np.random
rs = []
while len(rs) < n:
r = rng.uniform(-L / 2.0, L / 2.0, size=d)
if valid(r):
rs.append(r)
return np.array(rs) | python | def rejection_pick(L, n, d, valid, rng=None):
"""Return cartesian vectors uniformly picked in a space with an arbitrary
number of dimensions, which is fully enclosed by a cube of finite length,
using a supplied function which should evaluate whether a picked point lies
within this space.
The picking is done by rejection sampling in the cube.
Parameters
----------
L: float
Side length of the enclosing cube.
n: integer
Number of points to return
d: integer
The number of dimensions of the space
Returns
-------
r: array, shape (n, d)
Sample cartesian vectors
"""
if rng is None:
rng = np.random
rs = []
while len(rs) < n:
r = rng.uniform(-L / 2.0, L / 2.0, size=d)
if valid(r):
rs.append(r)
return np.array(rs) | [
"def",
"rejection_pick",
"(",
"L",
",",
"n",
",",
"d",
",",
"valid",
",",
"rng",
"=",
"None",
")",
":",
"if",
"rng",
"is",
"None",
":",
"rng",
"=",
"np",
".",
"random",
"rs",
"=",
"[",
"]",
"while",
"len",
"(",
"rs",
")",
"<",
"n",
":",
"r",
"=",
"rng",
".",
"uniform",
"(",
"-",
"L",
"/",
"2.0",
",",
"L",
"/",
"2.0",
",",
"size",
"=",
"d",
")",
"if",
"valid",
"(",
"r",
")",
":",
"rs",
".",
"append",
"(",
"r",
")",
"return",
"np",
".",
"array",
"(",
"rs",
")"
] | Return cartesian vectors uniformly picked in a space with an arbitrary
number of dimensions, which is fully enclosed by a cube of finite length,
using a supplied function which should evaluate whether a picked point lies
within this space.
The picking is done by rejection sampling in the cube.
Parameters
----------
L: float
Side length of the enclosing cube.
n: integer
Number of points to return
d: integer
The number of dimensions of the space
Returns
-------
r: array, shape (n, d)
Sample cartesian vectors | [
"Return",
"cartesian",
"vectors",
"uniformly",
"picked",
"in",
"a",
"space",
"with",
"an",
"arbitrary",
"number",
"of",
"dimensions",
"which",
"is",
"fully",
"enclosed",
"by",
"a",
"cube",
"of",
"finite",
"length",
"using",
"a",
"supplied",
"function",
"which",
"should",
"evaluate",
"whether",
"a",
"picked",
"point",
"lies",
"within",
"this",
"space",
"."
] | b7ae91bec029e85a45a7f303ee184076433723cd | https://github.com/eddiejessup/spatious/blob/b7ae91bec029e85a45a7f303ee184076433723cd/spatious/vector.py#L238-L267 |
249,467 | eddiejessup/spatious | spatious/vector.py | ball_pick | def ball_pick(n, d, rng=None):
"""Return cartesian vectors uniformly picked on the unit ball in an
arbitrary number of dimensions.
The unit ball is the space enclosed by the unit sphere.
The picking is done by rejection sampling in the unit cube.
In 3-dimensional space, the fraction `\pi / 6 \sim 0.52` points are valid.
Parameters
----------
n: integer
Number of points to return.
d: integer
Number of dimensions of the space in which the ball lives
Returns
-------
r: array, shape (n, d)
Sample cartesian vectors.
"""
def valid(r):
return vector_mag_sq(r) < 1.0
return rejection_pick(L=2.0, n=n, d=d, valid=valid, rng=rng) | python | def ball_pick(n, d, rng=None):
"""Return cartesian vectors uniformly picked on the unit ball in an
arbitrary number of dimensions.
The unit ball is the space enclosed by the unit sphere.
The picking is done by rejection sampling in the unit cube.
In 3-dimensional space, the fraction `\pi / 6 \sim 0.52` points are valid.
Parameters
----------
n: integer
Number of points to return.
d: integer
Number of dimensions of the space in which the ball lives
Returns
-------
r: array, shape (n, d)
Sample cartesian vectors.
"""
def valid(r):
return vector_mag_sq(r) < 1.0
return rejection_pick(L=2.0, n=n, d=d, valid=valid, rng=rng) | [
"def",
"ball_pick",
"(",
"n",
",",
"d",
",",
"rng",
"=",
"None",
")",
":",
"def",
"valid",
"(",
"r",
")",
":",
"return",
"vector_mag_sq",
"(",
"r",
")",
"<",
"1.0",
"return",
"rejection_pick",
"(",
"L",
"=",
"2.0",
",",
"n",
"=",
"n",
",",
"d",
"=",
"d",
",",
"valid",
"=",
"valid",
",",
"rng",
"=",
"rng",
")"
] | Return cartesian vectors uniformly picked on the unit ball in an
arbitrary number of dimensions.
The unit ball is the space enclosed by the unit sphere.
The picking is done by rejection sampling in the unit cube.
In 3-dimensional space, the fraction `\pi / 6 \sim 0.52` points are valid.
Parameters
----------
n: integer
Number of points to return.
d: integer
Number of dimensions of the space in which the ball lives
Returns
-------
r: array, shape (n, d)
Sample cartesian vectors. | [
"Return",
"cartesian",
"vectors",
"uniformly",
"picked",
"on",
"the",
"unit",
"ball",
"in",
"an",
"arbitrary",
"number",
"of",
"dimensions",
"."
] | b7ae91bec029e85a45a7f303ee184076433723cd | https://github.com/eddiejessup/spatious/blob/b7ae91bec029e85a45a7f303ee184076433723cd/spatious/vector.py#L270-L294 |
249,468 | eddiejessup/spatious | spatious/vector.py | disk_pick_polar | def disk_pick_polar(n=1, rng=None):
"""Return vectors uniformly picked on the unit disk.
The unit disk is the space enclosed by the unit circle.
Vectors are in a polar representation.
Parameters
----------
n: integer
Number of points to return.
Returns
-------
r: array, shape (n, 2)
Sample vectors.
"""
if rng is None:
rng = np.random
a = np.zeros([n, 2], dtype=np.float)
a[:, 0] = np.sqrt(rng.uniform(size=n))
a[:, 1] = rng.uniform(0.0, 2.0 * np.pi, size=n)
return a | python | def disk_pick_polar(n=1, rng=None):
"""Return vectors uniformly picked on the unit disk.
The unit disk is the space enclosed by the unit circle.
Vectors are in a polar representation.
Parameters
----------
n: integer
Number of points to return.
Returns
-------
r: array, shape (n, 2)
Sample vectors.
"""
if rng is None:
rng = np.random
a = np.zeros([n, 2], dtype=np.float)
a[:, 0] = np.sqrt(rng.uniform(size=n))
a[:, 1] = rng.uniform(0.0, 2.0 * np.pi, size=n)
return a | [
"def",
"disk_pick_polar",
"(",
"n",
"=",
"1",
",",
"rng",
"=",
"None",
")",
":",
"if",
"rng",
"is",
"None",
":",
"rng",
"=",
"np",
".",
"random",
"a",
"=",
"np",
".",
"zeros",
"(",
"[",
"n",
",",
"2",
"]",
",",
"dtype",
"=",
"np",
".",
"float",
")",
"a",
"[",
":",
",",
"0",
"]",
"=",
"np",
".",
"sqrt",
"(",
"rng",
".",
"uniform",
"(",
"size",
"=",
"n",
")",
")",
"a",
"[",
":",
",",
"1",
"]",
"=",
"rng",
".",
"uniform",
"(",
"0.0",
",",
"2.0",
"*",
"np",
".",
"pi",
",",
"size",
"=",
"n",
")",
"return",
"a"
] | Return vectors uniformly picked on the unit disk.
The unit disk is the space enclosed by the unit circle.
Vectors are in a polar representation.
Parameters
----------
n: integer
Number of points to return.
Returns
-------
r: array, shape (n, 2)
Sample vectors. | [
"Return",
"vectors",
"uniformly",
"picked",
"on",
"the",
"unit",
"disk",
".",
"The",
"unit",
"disk",
"is",
"the",
"space",
"enclosed",
"by",
"the",
"unit",
"circle",
".",
"Vectors",
"are",
"in",
"a",
"polar",
"representation",
"."
] | b7ae91bec029e85a45a7f303ee184076433723cd | https://github.com/eddiejessup/spatious/blob/b7ae91bec029e85a45a7f303ee184076433723cd/spatious/vector.py#L297-L317 |
249,469 | eddiejessup/spatious | spatious/vector.py | smallest_signed_angle | def smallest_signed_angle(source, target):
"""Find the smallest angle going from angle `source` to angle `target`."""
dth = target - source
dth = (dth + np.pi) % (2.0 * np.pi) - np.pi
return dth | python | def smallest_signed_angle(source, target):
"""Find the smallest angle going from angle `source` to angle `target`."""
dth = target - source
dth = (dth + np.pi) % (2.0 * np.pi) - np.pi
return dth | [
"def",
"smallest_signed_angle",
"(",
"source",
",",
"target",
")",
":",
"dth",
"=",
"target",
"-",
"source",
"dth",
"=",
"(",
"dth",
"+",
"np",
".",
"pi",
")",
"%",
"(",
"2.0",
"*",
"np",
".",
"pi",
")",
"-",
"np",
".",
"pi",
"return",
"dth"
] | Find the smallest angle going from angle `source` to angle `target`. | [
"Find",
"the",
"smallest",
"angle",
"going",
"from",
"angle",
"source",
"to",
"angle",
"target",
"."
] | b7ae91bec029e85a45a7f303ee184076433723cd | https://github.com/eddiejessup/spatious/blob/b7ae91bec029e85a45a7f303ee184076433723cd/spatious/vector.py#L343-L347 |
249,470 | cbrand/vpnchooser | src/vpnchooser/resources/vpn.py | VpnResource.put | def put(self, vpn_id: int) -> Vpn:
"""
Updates the Vpn Resource with the
name.
"""
vpn = self._get_or_abort(vpn_id)
self.update(vpn)
session.commit()
return vpn | python | def put(self, vpn_id: int) -> Vpn:
"""
Updates the Vpn Resource with the
name.
"""
vpn = self._get_or_abort(vpn_id)
self.update(vpn)
session.commit()
return vpn | [
"def",
"put",
"(",
"self",
",",
"vpn_id",
":",
"int",
")",
"->",
"Vpn",
":",
"vpn",
"=",
"self",
".",
"_get_or_abort",
"(",
"vpn_id",
")",
"self",
".",
"update",
"(",
"vpn",
")",
"session",
".",
"commit",
"(",
")",
"return",
"vpn"
] | Updates the Vpn Resource with the
name. | [
"Updates",
"the",
"Vpn",
"Resource",
"with",
"the",
"name",
"."
] | d153e3d05555c23cf5e8e15e507eecad86465923 | https://github.com/cbrand/vpnchooser/blob/d153e3d05555c23cf5e8e15e507eecad86465923/src/vpnchooser/resources/vpn.py#L88-L96 |
249,471 | cbrand/vpnchooser | src/vpnchooser/resources/vpn.py | VpnListResource.post | def post(self) -> Vpn:
"""
Creates the vpn with the given data.
"""
vpn = Vpn()
session.add(vpn)
self.update(vpn)
session.flush()
session.commit()
return vpn, 201, {
'Location': url_for('vpn', vpn_id=vpn.id)
} | python | def post(self) -> Vpn:
"""
Creates the vpn with the given data.
"""
vpn = Vpn()
session.add(vpn)
self.update(vpn)
session.flush()
session.commit()
return vpn, 201, {
'Location': url_for('vpn', vpn_id=vpn.id)
} | [
"def",
"post",
"(",
"self",
")",
"->",
"Vpn",
":",
"vpn",
"=",
"Vpn",
"(",
")",
"session",
".",
"add",
"(",
"vpn",
")",
"self",
".",
"update",
"(",
"vpn",
")",
"session",
".",
"flush",
"(",
")",
"session",
".",
"commit",
"(",
")",
"return",
"vpn",
",",
"201",
",",
"{",
"'Location'",
":",
"url_for",
"(",
"'vpn'",
",",
"vpn_id",
"=",
"vpn",
".",
"id",
")",
"}"
] | Creates the vpn with the given data. | [
"Creates",
"the",
"vpn",
"with",
"the",
"given",
"data",
"."
] | d153e3d05555c23cf5e8e15e507eecad86465923 | https://github.com/cbrand/vpnchooser/blob/d153e3d05555c23cf5e8e15e507eecad86465923/src/vpnchooser/resources/vpn.py#L121-L132 |
249,472 | tBaxter/tango-comments | build/lib/tango_comments/templatetags/comments.py | RenderCommentListNode.handle_token | def handle_token(cls, parser, token):
"""Class method to parse render_comment_list and return a Node."""
tokens = token.contents.split()
if tokens[1] != 'for':
raise template.TemplateSyntaxError("Second argument in %r tag must be 'for'" % tokens[0])
# {% render_comment_list for obj %}
if len(tokens) == 3:
return cls(object_expr=parser.compile_filter(tokens[2]))
# {% render_comment_list for app.models pk %}
elif len(tokens) == 4:
return cls(
ctype = BaseCommentNode.lookup_content_type(tokens[2], tokens[0]),
object_pk_expr = parser.compile_filter(tokens[3])
) | python | def handle_token(cls, parser, token):
"""Class method to parse render_comment_list and return a Node."""
tokens = token.contents.split()
if tokens[1] != 'for':
raise template.TemplateSyntaxError("Second argument in %r tag must be 'for'" % tokens[0])
# {% render_comment_list for obj %}
if len(tokens) == 3:
return cls(object_expr=parser.compile_filter(tokens[2]))
# {% render_comment_list for app.models pk %}
elif len(tokens) == 4:
return cls(
ctype = BaseCommentNode.lookup_content_type(tokens[2], tokens[0]),
object_pk_expr = parser.compile_filter(tokens[3])
) | [
"def",
"handle_token",
"(",
"cls",
",",
"parser",
",",
"token",
")",
":",
"tokens",
"=",
"token",
".",
"contents",
".",
"split",
"(",
")",
"if",
"tokens",
"[",
"1",
"]",
"!=",
"'for'",
":",
"raise",
"template",
".",
"TemplateSyntaxError",
"(",
"\"Second argument in %r tag must be 'for'\"",
"%",
"tokens",
"[",
"0",
"]",
")",
"# {% render_comment_list for obj %}",
"if",
"len",
"(",
"tokens",
")",
"==",
"3",
":",
"return",
"cls",
"(",
"object_expr",
"=",
"parser",
".",
"compile_filter",
"(",
"tokens",
"[",
"2",
"]",
")",
")",
"# {% render_comment_list for app.models pk %}",
"elif",
"len",
"(",
"tokens",
")",
"==",
"4",
":",
"return",
"cls",
"(",
"ctype",
"=",
"BaseCommentNode",
".",
"lookup_content_type",
"(",
"tokens",
"[",
"2",
"]",
",",
"tokens",
"[",
"0",
"]",
")",
",",
"object_pk_expr",
"=",
"parser",
".",
"compile_filter",
"(",
"tokens",
"[",
"3",
"]",
")",
")"
] | Class method to parse render_comment_list and return a Node. | [
"Class",
"method",
"to",
"parse",
"render_comment_list",
"and",
"return",
"a",
"Node",
"."
] | 1fd335c6fc9e81bba158e42e1483f1a149622ab4 | https://github.com/tBaxter/tango-comments/blob/1fd335c6fc9e81bba158e42e1483f1a149622ab4/build/lib/tango_comments/templatetags/comments.py#L188-L203 |
249,473 | andrewramsay/sk8-drivers | pysk8/calibration/sk8_calibration_gui.py | SK8Calibration.get_current_data | def get_current_data(self):
"""Return the calibration data for the current IMU, if any."""
if self.current_imuid in self.calibration_data:
return self.calibration_data[self.current_imuid]
return {} | python | def get_current_data(self):
"""Return the calibration data for the current IMU, if any."""
if self.current_imuid in self.calibration_data:
return self.calibration_data[self.current_imuid]
return {} | [
"def",
"get_current_data",
"(",
"self",
")",
":",
"if",
"self",
".",
"current_imuid",
"in",
"self",
".",
"calibration_data",
":",
"return",
"self",
".",
"calibration_data",
"[",
"self",
".",
"current_imuid",
"]",
"return",
"{",
"}"
] | Return the calibration data for the current IMU, if any. | [
"Return",
"the",
"calibration",
"data",
"for",
"the",
"current",
"IMU",
"if",
"any",
"."
] | 67347a71762fb421f5ae65a595def5c7879e8b0c | https://github.com/andrewramsay/sk8-drivers/blob/67347a71762fb421f5ae65a595def5c7879e8b0c/pysk8/calibration/sk8_calibration_gui.py#L271-L276 |
249,474 | andrewramsay/sk8-drivers | pysk8/calibration/sk8_calibration_gui.py | SK8Calibration.update_battery | def update_battery(self):
"""Updates the battery level in the UI for the connected SK8, if any"""
if self.sk8 is None:
return
battery = self.sk8.get_battery_level()
self.lblBattery.setText('Battery: {}%'.format(battery)) | python | def update_battery(self):
"""Updates the battery level in the UI for the connected SK8, if any"""
if self.sk8 is None:
return
battery = self.sk8.get_battery_level()
self.lblBattery.setText('Battery: {}%'.format(battery)) | [
"def",
"update_battery",
"(",
"self",
")",
":",
"if",
"self",
".",
"sk8",
"is",
"None",
":",
"return",
"battery",
"=",
"self",
".",
"sk8",
".",
"get_battery_level",
"(",
")",
"self",
".",
"lblBattery",
".",
"setText",
"(",
"'Battery: {}%'",
".",
"format",
"(",
"battery",
")",
")"
] | Updates the battery level in the UI for the connected SK8, if any | [
"Updates",
"the",
"battery",
"level",
"in",
"the",
"UI",
"for",
"the",
"connected",
"SK8",
"if",
"any"
] | 67347a71762fb421f5ae65a595def5c7879e8b0c | https://github.com/andrewramsay/sk8-drivers/blob/67347a71762fb421f5ae65a595def5c7879e8b0c/pysk8/calibration/sk8_calibration_gui.py#L278-L283 |
249,475 | andrewramsay/sk8-drivers | pysk8/calibration/sk8_calibration_gui.py | SK8Calibration.imu_changed | def imu_changed(self, val):
"""Handle clicks on the IMU index spinner."""
self.current_imuid = '{}_IMU{}'.format(self.sk8.get_device_name(), val)
self.update_data_display(self.get_current_data()) | python | def imu_changed(self, val):
"""Handle clicks on the IMU index spinner."""
self.current_imuid = '{}_IMU{}'.format(self.sk8.get_device_name(), val)
self.update_data_display(self.get_current_data()) | [
"def",
"imu_changed",
"(",
"self",
",",
"val",
")",
":",
"self",
".",
"current_imuid",
"=",
"'{}_IMU{}'",
".",
"format",
"(",
"self",
".",
"sk8",
".",
"get_device_name",
"(",
")",
",",
"val",
")",
"self",
".",
"update_data_display",
"(",
"self",
".",
"get_current_data",
"(",
")",
")"
] | Handle clicks on the IMU index spinner. | [
"Handle",
"clicks",
"on",
"the",
"IMU",
"index",
"spinner",
"."
] | 67347a71762fb421f5ae65a595def5c7879e8b0c | https://github.com/andrewramsay/sk8-drivers/blob/67347a71762fb421f5ae65a595def5c7879e8b0c/pysk8/calibration/sk8_calibration_gui.py#L285-L288 |
249,476 | andrewramsay/sk8-drivers | pysk8/calibration/sk8_calibration_gui.py | SK8Calibration.accel_calibration | def accel_calibration(self):
"""Perform accelerometer calibration for current IMU."""
self.calibration_state = self.CAL_ACC
self.acc_dialog = SK8AccDialog(self.sk8.get_imu(self.spinIMU.value()), self)
if self.acc_dialog.exec_() == QDialog.Rejected:
return
self.calculate_acc_calibration(self.acc_dialog.samples) | python | def accel_calibration(self):
"""Perform accelerometer calibration for current IMU."""
self.calibration_state = self.CAL_ACC
self.acc_dialog = SK8AccDialog(self.sk8.get_imu(self.spinIMU.value()), self)
if self.acc_dialog.exec_() == QDialog.Rejected:
return
self.calculate_acc_calibration(self.acc_dialog.samples) | [
"def",
"accel_calibration",
"(",
"self",
")",
":",
"self",
".",
"calibration_state",
"=",
"self",
".",
"CAL_ACC",
"self",
".",
"acc_dialog",
"=",
"SK8AccDialog",
"(",
"self",
".",
"sk8",
".",
"get_imu",
"(",
"self",
".",
"spinIMU",
".",
"value",
"(",
")",
")",
",",
"self",
")",
"if",
"self",
".",
"acc_dialog",
".",
"exec_",
"(",
")",
"==",
"QDialog",
".",
"Rejected",
":",
"return",
"self",
".",
"calculate_acc_calibration",
"(",
"self",
".",
"acc_dialog",
".",
"samples",
")"
] | Perform accelerometer calibration for current IMU. | [
"Perform",
"accelerometer",
"calibration",
"for",
"current",
"IMU",
"."
] | 67347a71762fb421f5ae65a595def5c7879e8b0c | https://github.com/andrewramsay/sk8-drivers/blob/67347a71762fb421f5ae65a595def5c7879e8b0c/pysk8/calibration/sk8_calibration_gui.py#L290-L297 |
249,477 | andrewramsay/sk8-drivers | pysk8/calibration/sk8_calibration_gui.py | SK8Calibration.gyro_calibration | def gyro_calibration(self):
"""Perform gyroscope calibration for current IMU."""
QtWidgets.QMessageBox.information(self, 'Gyro calibration', 'Ensure the selected IMU is in a stable, unmoving position, then click OK. Don\'t move the the IMU for a few seconds')
self.calibration_state = self.CAL_GYRO
self.gyro_dialog = SK8GyroDialog(self.sk8.get_imu(self.spinIMU.value()), self)
if self.gyro_dialog.exec_() == QDialog.Rejected:
return
self.calculate_gyro_calibration(self.gyro_dialog.samples) | python | def gyro_calibration(self):
"""Perform gyroscope calibration for current IMU."""
QtWidgets.QMessageBox.information(self, 'Gyro calibration', 'Ensure the selected IMU is in a stable, unmoving position, then click OK. Don\'t move the the IMU for a few seconds')
self.calibration_state = self.CAL_GYRO
self.gyro_dialog = SK8GyroDialog(self.sk8.get_imu(self.spinIMU.value()), self)
if self.gyro_dialog.exec_() == QDialog.Rejected:
return
self.calculate_gyro_calibration(self.gyro_dialog.samples) | [
"def",
"gyro_calibration",
"(",
"self",
")",
":",
"QtWidgets",
".",
"QMessageBox",
".",
"information",
"(",
"self",
",",
"'Gyro calibration'",
",",
"'Ensure the selected IMU is in a stable, unmoving position, then click OK. Don\\'t move the the IMU for a few seconds'",
")",
"self",
".",
"calibration_state",
"=",
"self",
".",
"CAL_GYRO",
"self",
".",
"gyro_dialog",
"=",
"SK8GyroDialog",
"(",
"self",
".",
"sk8",
".",
"get_imu",
"(",
"self",
".",
"spinIMU",
".",
"value",
"(",
")",
")",
",",
"self",
")",
"if",
"self",
".",
"gyro_dialog",
".",
"exec_",
"(",
")",
"==",
"QDialog",
".",
"Rejected",
":",
"return",
"self",
".",
"calculate_gyro_calibration",
"(",
"self",
".",
"gyro_dialog",
".",
"samples",
")"
] | Perform gyroscope calibration for current IMU. | [
"Perform",
"gyroscope",
"calibration",
"for",
"current",
"IMU",
"."
] | 67347a71762fb421f5ae65a595def5c7879e8b0c | https://github.com/andrewramsay/sk8-drivers/blob/67347a71762fb421f5ae65a595def5c7879e8b0c/pysk8/calibration/sk8_calibration_gui.py#L299-L307 |
249,478 | andrewramsay/sk8-drivers | pysk8/calibration/sk8_calibration_gui.py | SK8Calibration.mag_calibration | def mag_calibration(self):
"""Perform magnetometer calibration for current IMU."""
self.calibration_state = self.CAL_MAG
self.mag_dialog = SK8MagDialog(self.sk8.get_imu(self.spinIMU.value()), self)
if self.mag_dialog.exec_() == QDialog.Rejected:
return
self.calculate_mag_calibration(self.mag_dialog.samples) | python | def mag_calibration(self):
"""Perform magnetometer calibration for current IMU."""
self.calibration_state = self.CAL_MAG
self.mag_dialog = SK8MagDialog(self.sk8.get_imu(self.spinIMU.value()), self)
if self.mag_dialog.exec_() == QDialog.Rejected:
return
self.calculate_mag_calibration(self.mag_dialog.samples) | [
"def",
"mag_calibration",
"(",
"self",
")",
":",
"self",
".",
"calibration_state",
"=",
"self",
".",
"CAL_MAG",
"self",
".",
"mag_dialog",
"=",
"SK8MagDialog",
"(",
"self",
".",
"sk8",
".",
"get_imu",
"(",
"self",
".",
"spinIMU",
".",
"value",
"(",
")",
")",
",",
"self",
")",
"if",
"self",
".",
"mag_dialog",
".",
"exec_",
"(",
")",
"==",
"QDialog",
".",
"Rejected",
":",
"return",
"self",
".",
"calculate_mag_calibration",
"(",
"self",
".",
"mag_dialog",
".",
"samples",
")"
] | Perform magnetometer calibration for current IMU. | [
"Perform",
"magnetometer",
"calibration",
"for",
"current",
"IMU",
"."
] | 67347a71762fb421f5ae65a595def5c7879e8b0c | https://github.com/andrewramsay/sk8-drivers/blob/67347a71762fb421f5ae65a595def5c7879e8b0c/pysk8/calibration/sk8_calibration_gui.py#L309-L316 |
249,479 | andrewramsay/sk8-drivers | pysk8/calibration/sk8_calibration_gui.py | SK8Calibration.calculate_gyro_calibration | def calculate_gyro_calibration(self, gyro_samples):
"""Performs a basic gyroscope bias calculation.
Takes a list of (x, y, z) samples and averages over each axis to calculate
the bias values, and stores them in the calibration data structure for the
currently connected SK8"""
totals = [0, 0, 0]
for gs in gyro_samples:
totals[0] += gs[0]
totals[1] += gs[1]
totals[2] += gs[2]
for i in range(3):
totals[i] = int(float(totals[i]) / len(gyro_samples))
print('Saving gyro offsets for {}'.format(self.current_imuid))
self.calibration_data[self.current_imuid][self.GYROX_OFFSET] = str(totals[0])
self.calibration_data[self.current_imuid][self.GYROY_OFFSET] = str(totals[1])
self.calibration_data[self.current_imuid][self.GYROZ_OFFSET] = str(totals[2])
self.calibration_data[self.current_imuid][self.GYRO_TIMESTAMP] = datetime.now().isoformat()
self.write_calibration_data()
self.update_data_display(self.calibration_data[self.current_imuid])
self.calibration_state = self.CAL_NONE | python | def calculate_gyro_calibration(self, gyro_samples):
"""Performs a basic gyroscope bias calculation.
Takes a list of (x, y, z) samples and averages over each axis to calculate
the bias values, and stores them in the calibration data structure for the
currently connected SK8"""
totals = [0, 0, 0]
for gs in gyro_samples:
totals[0] += gs[0]
totals[1] += gs[1]
totals[2] += gs[2]
for i in range(3):
totals[i] = int(float(totals[i]) / len(gyro_samples))
print('Saving gyro offsets for {}'.format(self.current_imuid))
self.calibration_data[self.current_imuid][self.GYROX_OFFSET] = str(totals[0])
self.calibration_data[self.current_imuid][self.GYROY_OFFSET] = str(totals[1])
self.calibration_data[self.current_imuid][self.GYROZ_OFFSET] = str(totals[2])
self.calibration_data[self.current_imuid][self.GYRO_TIMESTAMP] = datetime.now().isoformat()
self.write_calibration_data()
self.update_data_display(self.calibration_data[self.current_imuid])
self.calibration_state = self.CAL_NONE | [
"def",
"calculate_gyro_calibration",
"(",
"self",
",",
"gyro_samples",
")",
":",
"totals",
"=",
"[",
"0",
",",
"0",
",",
"0",
"]",
"for",
"gs",
"in",
"gyro_samples",
":",
"totals",
"[",
"0",
"]",
"+=",
"gs",
"[",
"0",
"]",
"totals",
"[",
"1",
"]",
"+=",
"gs",
"[",
"1",
"]",
"totals",
"[",
"2",
"]",
"+=",
"gs",
"[",
"2",
"]",
"for",
"i",
"in",
"range",
"(",
"3",
")",
":",
"totals",
"[",
"i",
"]",
"=",
"int",
"(",
"float",
"(",
"totals",
"[",
"i",
"]",
")",
"/",
"len",
"(",
"gyro_samples",
")",
")",
"print",
"(",
"'Saving gyro offsets for {}'",
".",
"format",
"(",
"self",
".",
"current_imuid",
")",
")",
"self",
".",
"calibration_data",
"[",
"self",
".",
"current_imuid",
"]",
"[",
"self",
".",
"GYROX_OFFSET",
"]",
"=",
"str",
"(",
"totals",
"[",
"0",
"]",
")",
"self",
".",
"calibration_data",
"[",
"self",
".",
"current_imuid",
"]",
"[",
"self",
".",
"GYROY_OFFSET",
"]",
"=",
"str",
"(",
"totals",
"[",
"1",
"]",
")",
"self",
".",
"calibration_data",
"[",
"self",
".",
"current_imuid",
"]",
"[",
"self",
".",
"GYROZ_OFFSET",
"]",
"=",
"str",
"(",
"totals",
"[",
"2",
"]",
")",
"self",
".",
"calibration_data",
"[",
"self",
".",
"current_imuid",
"]",
"[",
"self",
".",
"GYRO_TIMESTAMP",
"]",
"=",
"datetime",
".",
"now",
"(",
")",
".",
"isoformat",
"(",
")",
"self",
".",
"write_calibration_data",
"(",
")",
"self",
".",
"update_data_display",
"(",
"self",
".",
"calibration_data",
"[",
"self",
".",
"current_imuid",
"]",
")",
"self",
".",
"calibration_state",
"=",
"self",
".",
"CAL_NONE"
] | Performs a basic gyroscope bias calculation.
Takes a list of (x, y, z) samples and averages over each axis to calculate
the bias values, and stores them in the calibration data structure for the
currently connected SK8 | [
"Performs",
"a",
"basic",
"gyroscope",
"bias",
"calculation",
"."
] | 67347a71762fb421f5ae65a595def5c7879e8b0c | https://github.com/andrewramsay/sk8-drivers/blob/67347a71762fb421f5ae65a595def5c7879e8b0c/pysk8/calibration/sk8_calibration_gui.py#L357-L380 |
249,480 | andrewramsay/sk8-drivers | pysk8/calibration/sk8_calibration_gui.py | SK8Calibration.device_selected | def device_selected(self, index):
"""Handler for selecting a device from the list in the UI"""
device = self.devicelist_model.itemFromIndex(index)
print(device.device.addr)
self.btnConnect.setEnabled(True) | python | def device_selected(self, index):
"""Handler for selecting a device from the list in the UI"""
device = self.devicelist_model.itemFromIndex(index)
print(device.device.addr)
self.btnConnect.setEnabled(True) | [
"def",
"device_selected",
"(",
"self",
",",
"index",
")",
":",
"device",
"=",
"self",
".",
"devicelist_model",
".",
"itemFromIndex",
"(",
"index",
")",
"print",
"(",
"device",
".",
"device",
".",
"addr",
")",
"self",
".",
"btnConnect",
".",
"setEnabled",
"(",
"True",
")"
] | Handler for selecting a device from the list in the UI | [
"Handler",
"for",
"selecting",
"a",
"device",
"from",
"the",
"list",
"in",
"the",
"UI"
] | 67347a71762fb421f5ae65a595def5c7879e8b0c | https://github.com/andrewramsay/sk8-drivers/blob/67347a71762fb421f5ae65a595def5c7879e8b0c/pysk8/calibration/sk8_calibration_gui.py#L475-L479 |
249,481 | xtrementl/focus | focus/plugin/modules/sites.py | SiteBlock._handle_block | def _handle_block(self, task, disable=False):
""" Handles blocking domains using hosts file.
`task`
``Task`` instance.
`disable`
Set to ``True``, to turn off blocking and restore hosts file;
otherwise, ``False`` will enable blocking by updating hosts
file.
Returns boolean.
"""
backup_file = os.path.join(task.task_dir, '.hosts.bak')
self.orig_data = self.orig_data or common.readfile(backup_file)
self.last_updated = self.last_updated or -1
if not self.orig_data:
# should't attempt restore without good original data, bail
if disable:
return False
# attempt to fetch data from the source
self.orig_data = common.readfile(self.hosts_file)
if not self.orig_data:
return False
# restore backup
if not os.path.exists(backup_file):
common.writefile(backup_file, self.orig_data)
# bail early if hosts file modification time hasn't changed
try:
should_write = (disable or self.last_updated
!= os.path.getmtime(self.hosts_file))
except OSError:
should_write = True # file was removed, let's write!
if not should_write:
return True
# make copy of original data, in case we need to modify
data = self.orig_data
# if not restoring, tack on domains mapped
# to localhost to end of file data
if not disable:
# convert the set to a list and sort
domains = list(self.domains)
domains.sort()
data += ('\n'.join('127.0.0.1\t{0}\t# FOCUS'
.format(d) for d in domains) + '\n')
# make temp file with new host file data
with tempfile.NamedTemporaryFile(prefix='focus_') as tempf:
tempf.write(data)
tempf.flush()
# overwrite hosts file with our modified copy.
if not self.run_root('cp "{0}" "{1}"'.format(tempf.name,
self.hosts_file)):
return False
# MacOS X generally requires flushing the system dns cache to pick
# up changes to the hosts file:
# dscacheutil -flushcache or lookupd -flushcache
if common.IS_MACOSX:
dscacheutil, lookupd = [common.which(x) for x in
('dscacheutil', 'lookupd')]
self.run_root(' '.join([dscacheutil or lookupd,
'-flushcache']))
if disable:
common.safe_remove_file(backup_file) # cleanup the backup
# store last modification time
try:
self.last_updated = os.path.getmtime(self.hosts_file)
except OSError:
# file was removed, let's update next time around
self.last_updated = -1
return True | python | def _handle_block(self, task, disable=False):
""" Handles blocking domains using hosts file.
`task`
``Task`` instance.
`disable`
Set to ``True``, to turn off blocking and restore hosts file;
otherwise, ``False`` will enable blocking by updating hosts
file.
Returns boolean.
"""
backup_file = os.path.join(task.task_dir, '.hosts.bak')
self.orig_data = self.orig_data or common.readfile(backup_file)
self.last_updated = self.last_updated or -1
if not self.orig_data:
# should't attempt restore without good original data, bail
if disable:
return False
# attempt to fetch data from the source
self.orig_data = common.readfile(self.hosts_file)
if not self.orig_data:
return False
# restore backup
if not os.path.exists(backup_file):
common.writefile(backup_file, self.orig_data)
# bail early if hosts file modification time hasn't changed
try:
should_write = (disable or self.last_updated
!= os.path.getmtime(self.hosts_file))
except OSError:
should_write = True # file was removed, let's write!
if not should_write:
return True
# make copy of original data, in case we need to modify
data = self.orig_data
# if not restoring, tack on domains mapped
# to localhost to end of file data
if not disable:
# convert the set to a list and sort
domains = list(self.domains)
domains.sort()
data += ('\n'.join('127.0.0.1\t{0}\t# FOCUS'
.format(d) for d in domains) + '\n')
# make temp file with new host file data
with tempfile.NamedTemporaryFile(prefix='focus_') as tempf:
tempf.write(data)
tempf.flush()
# overwrite hosts file with our modified copy.
if not self.run_root('cp "{0}" "{1}"'.format(tempf.name,
self.hosts_file)):
return False
# MacOS X generally requires flushing the system dns cache to pick
# up changes to the hosts file:
# dscacheutil -flushcache or lookupd -flushcache
if common.IS_MACOSX:
dscacheutil, lookupd = [common.which(x) for x in
('dscacheutil', 'lookupd')]
self.run_root(' '.join([dscacheutil or lookupd,
'-flushcache']))
if disable:
common.safe_remove_file(backup_file) # cleanup the backup
# store last modification time
try:
self.last_updated = os.path.getmtime(self.hosts_file)
except OSError:
# file was removed, let's update next time around
self.last_updated = -1
return True | [
"def",
"_handle_block",
"(",
"self",
",",
"task",
",",
"disable",
"=",
"False",
")",
":",
"backup_file",
"=",
"os",
".",
"path",
".",
"join",
"(",
"task",
".",
"task_dir",
",",
"'.hosts.bak'",
")",
"self",
".",
"orig_data",
"=",
"self",
".",
"orig_data",
"or",
"common",
".",
"readfile",
"(",
"backup_file",
")",
"self",
".",
"last_updated",
"=",
"self",
".",
"last_updated",
"or",
"-",
"1",
"if",
"not",
"self",
".",
"orig_data",
":",
"# should't attempt restore without good original data, bail",
"if",
"disable",
":",
"return",
"False",
"# attempt to fetch data from the source",
"self",
".",
"orig_data",
"=",
"common",
".",
"readfile",
"(",
"self",
".",
"hosts_file",
")",
"if",
"not",
"self",
".",
"orig_data",
":",
"return",
"False",
"# restore backup",
"if",
"not",
"os",
".",
"path",
".",
"exists",
"(",
"backup_file",
")",
":",
"common",
".",
"writefile",
"(",
"backup_file",
",",
"self",
".",
"orig_data",
")",
"# bail early if hosts file modification time hasn't changed",
"try",
":",
"should_write",
"=",
"(",
"disable",
"or",
"self",
".",
"last_updated",
"!=",
"os",
".",
"path",
".",
"getmtime",
"(",
"self",
".",
"hosts_file",
")",
")",
"except",
"OSError",
":",
"should_write",
"=",
"True",
"# file was removed, let's write!",
"if",
"not",
"should_write",
":",
"return",
"True",
"# make copy of original data, in case we need to modify",
"data",
"=",
"self",
".",
"orig_data",
"# if not restoring, tack on domains mapped",
"# to localhost to end of file data",
"if",
"not",
"disable",
":",
"# convert the set to a list and sort",
"domains",
"=",
"list",
"(",
"self",
".",
"domains",
")",
"domains",
".",
"sort",
"(",
")",
"data",
"+=",
"(",
"'\\n'",
".",
"join",
"(",
"'127.0.0.1\\t{0}\\t# FOCUS'",
".",
"format",
"(",
"d",
")",
"for",
"d",
"in",
"domains",
")",
"+",
"'\\n'",
")",
"# make temp file with new host file data",
"with",
"tempfile",
".",
"NamedTemporaryFile",
"(",
"prefix",
"=",
"'focus_'",
")",
"as",
"tempf",
":",
"tempf",
".",
"write",
"(",
"data",
")",
"tempf",
".",
"flush",
"(",
")",
"# overwrite hosts file with our modified copy.",
"if",
"not",
"self",
".",
"run_root",
"(",
"'cp \"{0}\" \"{1}\"'",
".",
"format",
"(",
"tempf",
".",
"name",
",",
"self",
".",
"hosts_file",
")",
")",
":",
"return",
"False",
"# MacOS X generally requires flushing the system dns cache to pick",
"# up changes to the hosts file:",
"# dscacheutil -flushcache or lookupd -flushcache",
"if",
"common",
".",
"IS_MACOSX",
":",
"dscacheutil",
",",
"lookupd",
"=",
"[",
"common",
".",
"which",
"(",
"x",
")",
"for",
"x",
"in",
"(",
"'dscacheutil'",
",",
"'lookupd'",
")",
"]",
"self",
".",
"run_root",
"(",
"' '",
".",
"join",
"(",
"[",
"dscacheutil",
"or",
"lookupd",
",",
"'-flushcache'",
"]",
")",
")",
"if",
"disable",
":",
"common",
".",
"safe_remove_file",
"(",
"backup_file",
")",
"# cleanup the backup",
"# store last modification time",
"try",
":",
"self",
".",
"last_updated",
"=",
"os",
".",
"path",
".",
"getmtime",
"(",
"self",
".",
"hosts_file",
")",
"except",
"OSError",
":",
"# file was removed, let's update next time around",
"self",
".",
"last_updated",
"=",
"-",
"1",
"return",
"True"
] | Handles blocking domains using hosts file.
`task`
``Task`` instance.
`disable`
Set to ``True``, to turn off blocking and restore hosts file;
otherwise, ``False`` will enable blocking by updating hosts
file.
Returns boolean. | [
"Handles",
"blocking",
"domains",
"using",
"hosts",
"file",
"."
] | cbbbc0b49a7409f9e0dc899de5b7e057f50838e4 | https://github.com/xtrementl/focus/blob/cbbbc0b49a7409f9e0dc899de5b7e057f50838e4/focus/plugin/modules/sites.py#L51-L137 |
249,482 | xtrementl/focus | focus/plugin/modules/sites.py | SiteBlock.parse_option | def parse_option(self, option, block_name, *values):
""" Parse domain values for option.
"""
_extra_subs = ('www', 'm', 'mobile')
if len(values) == 0: # expect some values here..
raise ValueError
for value in values:
value = value.lower()
# if it doesn't look like a protocol, assume http
# (e.g. only domain supplied)
if not _RE_PROTOCOL.match(value):
value = 'http://' + value
# did it parse? pull hostname/domain
parsed = urlparse.urlparse(value)
if parsed:
domain = parsed.hostname
if domain and _RE_TLD.search(domain): # must have a TLD
# doesn't have subdomain, tack on www, m, and mobile
# for good measure. note, this check fails for
# multi-part TLDs, e.g. .co.uk
domain = _RE_WWW_SUB.sub('', domain) # strip "www."
if len(domain.split('.')) == 2:
for sub in _extra_subs:
self.domains.add('{0}.{1}'.format(sub, domain))
self.domains.add(domain)
# no domains.. must have failed
if not self.domains:
raise ValueError | python | def parse_option(self, option, block_name, *values):
""" Parse domain values for option.
"""
_extra_subs = ('www', 'm', 'mobile')
if len(values) == 0: # expect some values here..
raise ValueError
for value in values:
value = value.lower()
# if it doesn't look like a protocol, assume http
# (e.g. only domain supplied)
if not _RE_PROTOCOL.match(value):
value = 'http://' + value
# did it parse? pull hostname/domain
parsed = urlparse.urlparse(value)
if parsed:
domain = parsed.hostname
if domain and _RE_TLD.search(domain): # must have a TLD
# doesn't have subdomain, tack on www, m, and mobile
# for good measure. note, this check fails for
# multi-part TLDs, e.g. .co.uk
domain = _RE_WWW_SUB.sub('', domain) # strip "www."
if len(domain.split('.')) == 2:
for sub in _extra_subs:
self.domains.add('{0}.{1}'.format(sub, domain))
self.domains.add(domain)
# no domains.. must have failed
if not self.domains:
raise ValueError | [
"def",
"parse_option",
"(",
"self",
",",
"option",
",",
"block_name",
",",
"*",
"values",
")",
":",
"_extra_subs",
"=",
"(",
"'www'",
",",
"'m'",
",",
"'mobile'",
")",
"if",
"len",
"(",
"values",
")",
"==",
"0",
":",
"# expect some values here..",
"raise",
"ValueError",
"for",
"value",
"in",
"values",
":",
"value",
"=",
"value",
".",
"lower",
"(",
")",
"# if it doesn't look like a protocol, assume http",
"# (e.g. only domain supplied)",
"if",
"not",
"_RE_PROTOCOL",
".",
"match",
"(",
"value",
")",
":",
"value",
"=",
"'http://'",
"+",
"value",
"# did it parse? pull hostname/domain",
"parsed",
"=",
"urlparse",
".",
"urlparse",
"(",
"value",
")",
"if",
"parsed",
":",
"domain",
"=",
"parsed",
".",
"hostname",
"if",
"domain",
"and",
"_RE_TLD",
".",
"search",
"(",
"domain",
")",
":",
"# must have a TLD",
"# doesn't have subdomain, tack on www, m, and mobile",
"# for good measure. note, this check fails for",
"# multi-part TLDs, e.g. .co.uk",
"domain",
"=",
"_RE_WWW_SUB",
".",
"sub",
"(",
"''",
",",
"domain",
")",
"# strip \"www.\"",
"if",
"len",
"(",
"domain",
".",
"split",
"(",
"'.'",
")",
")",
"==",
"2",
":",
"for",
"sub",
"in",
"_extra_subs",
":",
"self",
".",
"domains",
".",
"add",
"(",
"'{0}.{1}'",
".",
"format",
"(",
"sub",
",",
"domain",
")",
")",
"self",
".",
"domains",
".",
"add",
"(",
"domain",
")",
"# no domains.. must have failed",
"if",
"not",
"self",
".",
"domains",
":",
"raise",
"ValueError"
] | Parse domain values for option. | [
"Parse",
"domain",
"values",
"for",
"option",
"."
] | cbbbc0b49a7409f9e0dc899de5b7e057f50838e4 | https://github.com/xtrementl/focus/blob/cbbbc0b49a7409f9e0dc899de5b7e057f50838e4/focus/plugin/modules/sites.py#L139-L174 |
249,483 | tomokinakamaru/mapletree | mapletree/helpers/signing.py | Signing.sign | def sign(self, data):
""" Create url-safe signed token.
:param data: Data to sign
:type data: object
"""
try:
jsonstr = json.dumps(data, separators=(',', ':'))
except TypeError as e:
raise DataSignError(e.args[0])
else:
signature = self._create_signature(jsonstr)
return self._b64encode(jsonstr + '.' + signature) | python | def sign(self, data):
""" Create url-safe signed token.
:param data: Data to sign
:type data: object
"""
try:
jsonstr = json.dumps(data, separators=(',', ':'))
except TypeError as e:
raise DataSignError(e.args[0])
else:
signature = self._create_signature(jsonstr)
return self._b64encode(jsonstr + '.' + signature) | [
"def",
"sign",
"(",
"self",
",",
"data",
")",
":",
"try",
":",
"jsonstr",
"=",
"json",
".",
"dumps",
"(",
"data",
",",
"separators",
"=",
"(",
"','",
",",
"':'",
")",
")",
"except",
"TypeError",
"as",
"e",
":",
"raise",
"DataSignError",
"(",
"e",
".",
"args",
"[",
"0",
"]",
")",
"else",
":",
"signature",
"=",
"self",
".",
"_create_signature",
"(",
"jsonstr",
")",
"return",
"self",
".",
"_b64encode",
"(",
"jsonstr",
"+",
"'.'",
"+",
"signature",
")"
] | Create url-safe signed token.
:param data: Data to sign
:type data: object | [
"Create",
"url",
"-",
"safe",
"signed",
"token",
"."
] | 19ec68769ef2c1cd2e4164ed8623e0c4280279bb | https://github.com/tomokinakamaru/mapletree/blob/19ec68769ef2c1cd2e4164ed8623e0c4280279bb/mapletree/helpers/signing.py#L22-L36 |
249,484 | tomokinakamaru/mapletree | mapletree/helpers/signing.py | Signing.unsign | def unsign(self, b64msg):
""" Retrieves data from signed token.
:param b64msg: Token to unsign
:type b64msg: str
"""
msg = self._b64decode(b64msg)
try:
body, signature = msg.rsplit('.', 1)
except ValueError as e:
raise MalformedSigendMessage(e.args[0])
else:
if signature == self._create_signature(body):
try:
return json.loads(body)
except ValueError as e:
raise MalformedSigendMessage(e.args[0])
else:
raise BadSignature() | python | def unsign(self, b64msg):
""" Retrieves data from signed token.
:param b64msg: Token to unsign
:type b64msg: str
"""
msg = self._b64decode(b64msg)
try:
body, signature = msg.rsplit('.', 1)
except ValueError as e:
raise MalformedSigendMessage(e.args[0])
else:
if signature == self._create_signature(body):
try:
return json.loads(body)
except ValueError as e:
raise MalformedSigendMessage(e.args[0])
else:
raise BadSignature() | [
"def",
"unsign",
"(",
"self",
",",
"b64msg",
")",
":",
"msg",
"=",
"self",
".",
"_b64decode",
"(",
"b64msg",
")",
"try",
":",
"body",
",",
"signature",
"=",
"msg",
".",
"rsplit",
"(",
"'.'",
",",
"1",
")",
"except",
"ValueError",
"as",
"e",
":",
"raise",
"MalformedSigendMessage",
"(",
"e",
".",
"args",
"[",
"0",
"]",
")",
"else",
":",
"if",
"signature",
"==",
"self",
".",
"_create_signature",
"(",
"body",
")",
":",
"try",
":",
"return",
"json",
".",
"loads",
"(",
"body",
")",
"except",
"ValueError",
"as",
"e",
":",
"raise",
"MalformedSigendMessage",
"(",
"e",
".",
"args",
"[",
"0",
"]",
")",
"else",
":",
"raise",
"BadSignature",
"(",
")"
] | Retrieves data from signed token.
:param b64msg: Token to unsign
:type b64msg: str | [
"Retrieves",
"data",
"from",
"signed",
"token",
"."
] | 19ec68769ef2c1cd2e4164ed8623e0c4280279bb | https://github.com/tomokinakamaru/mapletree/blob/19ec68769ef2c1cd2e4164ed8623e0c4280279bb/mapletree/helpers/signing.py#L38-L60 |
249,485 | fred49/linshare-api | linshareapi/core.py | extract_file_name | def extract_file_name(content_dispo):
"""Extract file name from the input request body"""
# print type(content_dispo)
# print repr(content_dispo)
# convertion of escape string (str type) from server
# to unicode object
content_dispo = content_dispo.decode('unicode-escape').strip('"')
file_name = ""
for key_val in content_dispo.split(';'):
param = key_val.strip().split('=')
if param[0] == "filename":
file_name = param[1].strip('"')
break
return file_name | python | def extract_file_name(content_dispo):
"""Extract file name from the input request body"""
# print type(content_dispo)
# print repr(content_dispo)
# convertion of escape string (str type) from server
# to unicode object
content_dispo = content_dispo.decode('unicode-escape').strip('"')
file_name = ""
for key_val in content_dispo.split(';'):
param = key_val.strip().split('=')
if param[0] == "filename":
file_name = param[1].strip('"')
break
return file_name | [
"def",
"extract_file_name",
"(",
"content_dispo",
")",
":",
"# print type(content_dispo)",
"# print repr(content_dispo)",
"# convertion of escape string (str type) from server",
"# to unicode object",
"content_dispo",
"=",
"content_dispo",
".",
"decode",
"(",
"'unicode-escape'",
")",
".",
"strip",
"(",
"'\"'",
")",
"file_name",
"=",
"\"\"",
"for",
"key_val",
"in",
"content_dispo",
".",
"split",
"(",
"';'",
")",
":",
"param",
"=",
"key_val",
".",
"strip",
"(",
")",
".",
"split",
"(",
"'='",
")",
"if",
"param",
"[",
"0",
"]",
"==",
"\"filename\"",
":",
"file_name",
"=",
"param",
"[",
"1",
"]",
".",
"strip",
"(",
"'\"'",
")",
"break",
"return",
"file_name"
] | Extract file name from the input request body | [
"Extract",
"file",
"name",
"from",
"the",
"input",
"request",
"body"
] | be646c25aa8ba3718abb6869c620b157d53d6e41 | https://github.com/fred49/linshare-api/blob/be646c25aa8ba3718abb6869c620b157d53d6e41/linshareapi/core.py#L59-L72 |
249,486 | fred49/linshare-api | linshareapi/core.py | CoreCli.download | def download(self, uuid, url, forced_file_name=None,
progress_bar=True, chunk_size=256,
directory=None, overwrite=False):
""" download a file from LinShare using its rest api.
This method could throw exceptions like urllib2.HTTPError."""
self.last_req_time = None
url = self.get_full_url(url)
self.log.debug("download url : " + url)
# Building request
request = urllib2.Request(url)
# request.add_header('Content-Type', 'application/json; charset=UTF-8')
request.add_header('Accept', 'application/json,*/*;charset=UTF-8')
# request start
starttime = datetime.datetime.now()
# doRequest
resultq = urllib2.urlopen(request)
code = resultq.getcode()
file_name = uuid
self.log.debug("ret code : '" + str(code) + "'")
if code == 200:
content_lenth = resultq.info().getheader('Content-Length')
if not content_lenth:
msg = "No content lengh header found !"
self.log.debug(msg)
progress_bar = False
else:
file_size = int(content_lenth.strip())
if forced_file_name:
file_name = forced_file_name
else:
content_dispo = resultq.info().getheader('Content-disposition')
if content_dispo:
content_dispo = content_dispo.strip()
file_name = extract_file_name(content_dispo)
if directory:
if os.path.isdir(directory):
file_name = directory + "/" + file_name
if os.path.isfile(file_name):
if not overwrite:
cpt = 1
while 1:
if not os.path.isfile(file_name + "." + str(cpt)):
file_name += "." + str(cpt)
break
cpt += 1
else:
self.log.warn("'%s' already exists. It was overwriten.",
file_name)
stream = None
pbar = None
if progress_bar:
widgets = [FileTransferSpeed(), ' <<<', Bar(), '>>> ',
Percentage(), ' ', ETA()]
pbar = ProgressBar(widgets=widgets, maxval=file_size)
stream = FileWithCallback(file_name, 'w', pbar.update,
file_size, file_name)
pbar.start()
else:
stream = file(file_name, 'w')
while 1:
chunk = resultq.read(chunk_size)
if not chunk:
break
stream.write(chunk)
stream.flush()
stream.close()
if pbar:
pbar.finish()
# request end
endtime = datetime.datetime.now()
self.last_req_time = str(endtime - starttime)
self.log.debug("download url : %(url)s : request time : %(time)s",
{"url": url,
"time": self.last_req_time})
return (file_name, self.last_req_time) | python | def download(self, uuid, url, forced_file_name=None,
progress_bar=True, chunk_size=256,
directory=None, overwrite=False):
""" download a file from LinShare using its rest api.
This method could throw exceptions like urllib2.HTTPError."""
self.last_req_time = None
url = self.get_full_url(url)
self.log.debug("download url : " + url)
# Building request
request = urllib2.Request(url)
# request.add_header('Content-Type', 'application/json; charset=UTF-8')
request.add_header('Accept', 'application/json,*/*;charset=UTF-8')
# request start
starttime = datetime.datetime.now()
# doRequest
resultq = urllib2.urlopen(request)
code = resultq.getcode()
file_name = uuid
self.log.debug("ret code : '" + str(code) + "'")
if code == 200:
content_lenth = resultq.info().getheader('Content-Length')
if not content_lenth:
msg = "No content lengh header found !"
self.log.debug(msg)
progress_bar = False
else:
file_size = int(content_lenth.strip())
if forced_file_name:
file_name = forced_file_name
else:
content_dispo = resultq.info().getheader('Content-disposition')
if content_dispo:
content_dispo = content_dispo.strip()
file_name = extract_file_name(content_dispo)
if directory:
if os.path.isdir(directory):
file_name = directory + "/" + file_name
if os.path.isfile(file_name):
if not overwrite:
cpt = 1
while 1:
if not os.path.isfile(file_name + "." + str(cpt)):
file_name += "." + str(cpt)
break
cpt += 1
else:
self.log.warn("'%s' already exists. It was overwriten.",
file_name)
stream = None
pbar = None
if progress_bar:
widgets = [FileTransferSpeed(), ' <<<', Bar(), '>>> ',
Percentage(), ' ', ETA()]
pbar = ProgressBar(widgets=widgets, maxval=file_size)
stream = FileWithCallback(file_name, 'w', pbar.update,
file_size, file_name)
pbar.start()
else:
stream = file(file_name, 'w')
while 1:
chunk = resultq.read(chunk_size)
if not chunk:
break
stream.write(chunk)
stream.flush()
stream.close()
if pbar:
pbar.finish()
# request end
endtime = datetime.datetime.now()
self.last_req_time = str(endtime - starttime)
self.log.debug("download url : %(url)s : request time : %(time)s",
{"url": url,
"time": self.last_req_time})
return (file_name, self.last_req_time) | [
"def",
"download",
"(",
"self",
",",
"uuid",
",",
"url",
",",
"forced_file_name",
"=",
"None",
",",
"progress_bar",
"=",
"True",
",",
"chunk_size",
"=",
"256",
",",
"directory",
"=",
"None",
",",
"overwrite",
"=",
"False",
")",
":",
"self",
".",
"last_req_time",
"=",
"None",
"url",
"=",
"self",
".",
"get_full_url",
"(",
"url",
")",
"self",
".",
"log",
".",
"debug",
"(",
"\"download url : \"",
"+",
"url",
")",
"# Building request",
"request",
"=",
"urllib2",
".",
"Request",
"(",
"url",
")",
"# request.add_header('Content-Type', 'application/json; charset=UTF-8')",
"request",
".",
"add_header",
"(",
"'Accept'",
",",
"'application/json,*/*;charset=UTF-8'",
")",
"# request start",
"starttime",
"=",
"datetime",
".",
"datetime",
".",
"now",
"(",
")",
"# doRequest",
"resultq",
"=",
"urllib2",
".",
"urlopen",
"(",
"request",
")",
"code",
"=",
"resultq",
".",
"getcode",
"(",
")",
"file_name",
"=",
"uuid",
"self",
".",
"log",
".",
"debug",
"(",
"\"ret code : '\"",
"+",
"str",
"(",
"code",
")",
"+",
"\"'\"",
")",
"if",
"code",
"==",
"200",
":",
"content_lenth",
"=",
"resultq",
".",
"info",
"(",
")",
".",
"getheader",
"(",
"'Content-Length'",
")",
"if",
"not",
"content_lenth",
":",
"msg",
"=",
"\"No content lengh header found !\"",
"self",
".",
"log",
".",
"debug",
"(",
"msg",
")",
"progress_bar",
"=",
"False",
"else",
":",
"file_size",
"=",
"int",
"(",
"content_lenth",
".",
"strip",
"(",
")",
")",
"if",
"forced_file_name",
":",
"file_name",
"=",
"forced_file_name",
"else",
":",
"content_dispo",
"=",
"resultq",
".",
"info",
"(",
")",
".",
"getheader",
"(",
"'Content-disposition'",
")",
"if",
"content_dispo",
":",
"content_dispo",
"=",
"content_dispo",
".",
"strip",
"(",
")",
"file_name",
"=",
"extract_file_name",
"(",
"content_dispo",
")",
"if",
"directory",
":",
"if",
"os",
".",
"path",
".",
"isdir",
"(",
"directory",
")",
":",
"file_name",
"=",
"directory",
"+",
"\"/\"",
"+",
"file_name",
"if",
"os",
".",
"path",
".",
"isfile",
"(",
"file_name",
")",
":",
"if",
"not",
"overwrite",
":",
"cpt",
"=",
"1",
"while",
"1",
":",
"if",
"not",
"os",
".",
"path",
".",
"isfile",
"(",
"file_name",
"+",
"\".\"",
"+",
"str",
"(",
"cpt",
")",
")",
":",
"file_name",
"+=",
"\".\"",
"+",
"str",
"(",
"cpt",
")",
"break",
"cpt",
"+=",
"1",
"else",
":",
"self",
".",
"log",
".",
"warn",
"(",
"\"'%s' already exists. It was overwriten.\"",
",",
"file_name",
")",
"stream",
"=",
"None",
"pbar",
"=",
"None",
"if",
"progress_bar",
":",
"widgets",
"=",
"[",
"FileTransferSpeed",
"(",
")",
",",
"' <<<'",
",",
"Bar",
"(",
")",
",",
"'>>> '",
",",
"Percentage",
"(",
")",
",",
"' '",
",",
"ETA",
"(",
")",
"]",
"pbar",
"=",
"ProgressBar",
"(",
"widgets",
"=",
"widgets",
",",
"maxval",
"=",
"file_size",
")",
"stream",
"=",
"FileWithCallback",
"(",
"file_name",
",",
"'w'",
",",
"pbar",
".",
"update",
",",
"file_size",
",",
"file_name",
")",
"pbar",
".",
"start",
"(",
")",
"else",
":",
"stream",
"=",
"file",
"(",
"file_name",
",",
"'w'",
")",
"while",
"1",
":",
"chunk",
"=",
"resultq",
".",
"read",
"(",
"chunk_size",
")",
"if",
"not",
"chunk",
":",
"break",
"stream",
".",
"write",
"(",
"chunk",
")",
"stream",
".",
"flush",
"(",
")",
"stream",
".",
"close",
"(",
")",
"if",
"pbar",
":",
"pbar",
".",
"finish",
"(",
")",
"# request end",
"endtime",
"=",
"datetime",
".",
"datetime",
".",
"now",
"(",
")",
"self",
".",
"last_req_time",
"=",
"str",
"(",
"endtime",
"-",
"starttime",
")",
"self",
".",
"log",
".",
"debug",
"(",
"\"download url : %(url)s : request time : %(time)s\"",
",",
"{",
"\"url\"",
":",
"url",
",",
"\"time\"",
":",
"self",
".",
"last_req_time",
"}",
")",
"return",
"(",
"file_name",
",",
"self",
".",
"last_req_time",
")"
] | download a file from LinShare using its rest api.
This method could throw exceptions like urllib2.HTTPError. | [
"download",
"a",
"file",
"from",
"LinShare",
"using",
"its",
"rest",
"api",
".",
"This",
"method",
"could",
"throw",
"exceptions",
"like",
"urllib2",
".",
"HTTPError",
"."
] | be646c25aa8ba3718abb6869c620b157d53d6e41 | https://github.com/fred49/linshare-api/blob/be646c25aa8ba3718abb6869c620b157d53d6e41/linshareapi/core.py#L558-L632 |
249,487 | fred49/linshare-api | linshareapi/core.py | ResourceBuilder.add_field | def add_field(self, field, arg=None, value=None, extended=False,
hidden=False, e_type=str, required=None):
"""Add a new field to the current ResourceBuilder.
Keyword arguments:
field -- field name
arg -- name of the attribute name in arg object (argparse)
value -- a default for this field, used for resource creation.
extended -- If set to true, the current field will be display in
extended list mode only.
hidden -- If set to true, the current field won't be exposed
as available keys.
e_type -- field data type (default str): int, float, str
required -- True if the current field is required for create
and update methods
"""
if required is None:
required = self._required
if arg is None:
arg = re.sub('(?!^)([A-Z]+)', r'_\1', field).lower()
self._fields[field] = {
'field': field,
'arg': arg,
'value': value,
'extended': extended,
'required': required,
'e_type': e_type,
'hidden': hidden
} | python | def add_field(self, field, arg=None, value=None, extended=False,
hidden=False, e_type=str, required=None):
"""Add a new field to the current ResourceBuilder.
Keyword arguments:
field -- field name
arg -- name of the attribute name in arg object (argparse)
value -- a default for this field, used for resource creation.
extended -- If set to true, the current field will be display in
extended list mode only.
hidden -- If set to true, the current field won't be exposed
as available keys.
e_type -- field data type (default str): int, float, str
required -- True if the current field is required for create
and update methods
"""
if required is None:
required = self._required
if arg is None:
arg = re.sub('(?!^)([A-Z]+)', r'_\1', field).lower()
self._fields[field] = {
'field': field,
'arg': arg,
'value': value,
'extended': extended,
'required': required,
'e_type': e_type,
'hidden': hidden
} | [
"def",
"add_field",
"(",
"self",
",",
"field",
",",
"arg",
"=",
"None",
",",
"value",
"=",
"None",
",",
"extended",
"=",
"False",
",",
"hidden",
"=",
"False",
",",
"e_type",
"=",
"str",
",",
"required",
"=",
"None",
")",
":",
"if",
"required",
"is",
"None",
":",
"required",
"=",
"self",
".",
"_required",
"if",
"arg",
"is",
"None",
":",
"arg",
"=",
"re",
".",
"sub",
"(",
"'(?!^)([A-Z]+)'",
",",
"r'_\\1'",
",",
"field",
")",
".",
"lower",
"(",
")",
"self",
".",
"_fields",
"[",
"field",
"]",
"=",
"{",
"'field'",
":",
"field",
",",
"'arg'",
":",
"arg",
",",
"'value'",
":",
"value",
",",
"'extended'",
":",
"extended",
",",
"'required'",
":",
"required",
",",
"'e_type'",
":",
"e_type",
",",
"'hidden'",
":",
"hidden",
"}"
] | Add a new field to the current ResourceBuilder.
Keyword arguments:
field -- field name
arg -- name of the attribute name in arg object (argparse)
value -- a default for this field, used for resource creation.
extended -- If set to true, the current field will be display in
extended list mode only.
hidden -- If set to true, the current field won't be exposed
as available keys.
e_type -- field data type (default str): int, float, str
required -- True if the current field is required for create
and update methods | [
"Add",
"a",
"new",
"field",
"to",
"the",
"current",
"ResourceBuilder",
"."
] | be646c25aa8ba3718abb6869c620b157d53d6e41 | https://github.com/fred49/linshare-api/blob/be646c25aa8ba3718abb6869c620b157d53d6e41/linshareapi/core.py#L658-L686 |
249,488 | laysakura/relshell | relshell/daemon_shelloperator.py | DaemonShellOperator.kill | def kill(self):
"""Kill instantiated process
:raises: `AttributeError` if instantiated process doesn't seem to satisfy `constraints <relshell.daemon_shelloperator.DaemonShellOperator>`_
"""
BaseShellOperator._close_process_input_stdin(self._batcmd.batch_to_file_s)
BaseShellOperator._wait_process(self._process, self._batcmd.sh_cmd, self._success_exitcodes)
BaseShellOperator._rm_process_input_tmpfiles(self._batcmd.batch_to_file_s)
self._process = None | python | def kill(self):
"""Kill instantiated process
:raises: `AttributeError` if instantiated process doesn't seem to satisfy `constraints <relshell.daemon_shelloperator.DaemonShellOperator>`_
"""
BaseShellOperator._close_process_input_stdin(self._batcmd.batch_to_file_s)
BaseShellOperator._wait_process(self._process, self._batcmd.sh_cmd, self._success_exitcodes)
BaseShellOperator._rm_process_input_tmpfiles(self._batcmd.batch_to_file_s)
self._process = None | [
"def",
"kill",
"(",
"self",
")",
":",
"BaseShellOperator",
".",
"_close_process_input_stdin",
"(",
"self",
".",
"_batcmd",
".",
"batch_to_file_s",
")",
"BaseShellOperator",
".",
"_wait_process",
"(",
"self",
".",
"_process",
",",
"self",
".",
"_batcmd",
".",
"sh_cmd",
",",
"self",
".",
"_success_exitcodes",
")",
"BaseShellOperator",
".",
"_rm_process_input_tmpfiles",
"(",
"self",
".",
"_batcmd",
".",
"batch_to_file_s",
")",
"self",
".",
"_process",
"=",
"None"
] | Kill instantiated process
:raises: `AttributeError` if instantiated process doesn't seem to satisfy `constraints <relshell.daemon_shelloperator.DaemonShellOperator>`_ | [
"Kill",
"instantiated",
"process"
] | 9ca5c03a34c11cb763a4a75595f18bf4383aa8cc | https://github.com/laysakura/relshell/blob/9ca5c03a34c11cb763a4a75595f18bf4383aa8cc/relshell/daemon_shelloperator.py#L120-L128 |
249,489 | simpleenergy/env-excavator | excavator/utils.py | env_timestamp | def env_timestamp(name, required=False, default=empty):
"""Pulls an environment variable out of the environment and parses it to a
``datetime.datetime`` object. The environment variable is expected to be a
timestamp in the form of a float.
If the name is not present in the environment and no default is specified
then a ``ValueError`` will be raised.
:param name: The name of the environment variable be pulled
:type name: str
:param required: Whether the environment variable is required. If ``True``
and the variable is not present, a ``KeyError`` is raised.
:type required: bool
:param default: The value to return if the environment variable is not
present. (Providing a default alongside setting ``required=True`` will raise
a ``ValueError``)
:type default: bool
"""
if required and default is not empty:
raise ValueError("Using `default` with `required=True` is invalid")
value = get_env_value(name, required=required, default=empty)
# change datetime.datetime to time, return time.struct_time type
if default is not empty and value is empty:
return default
if value is empty:
raise ValueError(
"`env_timestamp` requires either a default value to be specified, "
"or for the variable to be present in the environment"
)
timestamp = float(value)
return datetime.datetime.fromtimestamp(timestamp) | python | def env_timestamp(name, required=False, default=empty):
"""Pulls an environment variable out of the environment and parses it to a
``datetime.datetime`` object. The environment variable is expected to be a
timestamp in the form of a float.
If the name is not present in the environment and no default is specified
then a ``ValueError`` will be raised.
:param name: The name of the environment variable be pulled
:type name: str
:param required: Whether the environment variable is required. If ``True``
and the variable is not present, a ``KeyError`` is raised.
:type required: bool
:param default: The value to return if the environment variable is not
present. (Providing a default alongside setting ``required=True`` will raise
a ``ValueError``)
:type default: bool
"""
if required and default is not empty:
raise ValueError("Using `default` with `required=True` is invalid")
value = get_env_value(name, required=required, default=empty)
# change datetime.datetime to time, return time.struct_time type
if default is not empty and value is empty:
return default
if value is empty:
raise ValueError(
"`env_timestamp` requires either a default value to be specified, "
"or for the variable to be present in the environment"
)
timestamp = float(value)
return datetime.datetime.fromtimestamp(timestamp) | [
"def",
"env_timestamp",
"(",
"name",
",",
"required",
"=",
"False",
",",
"default",
"=",
"empty",
")",
":",
"if",
"required",
"and",
"default",
"is",
"not",
"empty",
":",
"raise",
"ValueError",
"(",
"\"Using `default` with `required=True` is invalid\"",
")",
"value",
"=",
"get_env_value",
"(",
"name",
",",
"required",
"=",
"required",
",",
"default",
"=",
"empty",
")",
"# change datetime.datetime to time, return time.struct_time type",
"if",
"default",
"is",
"not",
"empty",
"and",
"value",
"is",
"empty",
":",
"return",
"default",
"if",
"value",
"is",
"empty",
":",
"raise",
"ValueError",
"(",
"\"`env_timestamp` requires either a default value to be specified, \"",
"\"or for the variable to be present in the environment\"",
")",
"timestamp",
"=",
"float",
"(",
"value",
")",
"return",
"datetime",
".",
"datetime",
".",
"fromtimestamp",
"(",
"timestamp",
")"
] | Pulls an environment variable out of the environment and parses it to a
``datetime.datetime`` object. The environment variable is expected to be a
timestamp in the form of a float.
If the name is not present in the environment and no default is specified
then a ``ValueError`` will be raised.
:param name: The name of the environment variable be pulled
:type name: str
:param required: Whether the environment variable is required. If ``True``
and the variable is not present, a ``KeyError`` is raised.
:type required: bool
:param default: The value to return if the environment variable is not
present. (Providing a default alongside setting ``required=True`` will raise
a ``ValueError``)
:type default: bool | [
"Pulls",
"an",
"environment",
"variable",
"out",
"of",
"the",
"environment",
"and",
"parses",
"it",
"to",
"a",
"datetime",
".",
"datetime",
"object",
".",
"The",
"environment",
"variable",
"is",
"expected",
"to",
"be",
"a",
"timestamp",
"in",
"the",
"form",
"of",
"a",
"float",
"."
] | 2bce66396f0c92fefa2b39ea458965174e478faf | https://github.com/simpleenergy/env-excavator/blob/2bce66396f0c92fefa2b39ea458965174e478faf/excavator/utils.py#L179-L213 |
249,490 | simpleenergy/env-excavator | excavator/utils.py | env_iso8601 | def env_iso8601(name, required=False, default=empty):
"""Pulls an environment variable out of the environment and parses it to a
``datetime.datetime`` object. The environment variable is expected to be an
iso8601 formatted string.
If the name is not present in the environment and no default is specified
then a ``ValueError`` will be raised.
:param name: The name of the environment variable be pulled
:type name: str
:param required: Whether the environment variable is required. If ``True``
and the variable is not present, a ``KeyError`` is raised.
:type required: bool
:param default: The value to return if the environment variable is not
present. (Providing a default alongside setting ``required=True`` will raise
a ``ValueError``)
:type default: bool
"""
try:
import iso8601
except ImportError:
raise ImportError(
'Parsing iso8601 datetime strings requires the iso8601 library'
)
if required and default is not empty:
raise ValueError("Using `default` with `required=True` is invalid")
value = get_env_value(name, required=required, default=empty)
# change datetime.datetime to time, return time.struct_time type
if default is not empty and value is empty:
return default
if value is empty:
raise ValueError(
"`env_iso8601` requires either a default value to be specified, or "
"for the variable to be present in the environment"
)
return iso8601.parse_date(value) | python | def env_iso8601(name, required=False, default=empty):
"""Pulls an environment variable out of the environment and parses it to a
``datetime.datetime`` object. The environment variable is expected to be an
iso8601 formatted string.
If the name is not present in the environment and no default is specified
then a ``ValueError`` will be raised.
:param name: The name of the environment variable be pulled
:type name: str
:param required: Whether the environment variable is required. If ``True``
and the variable is not present, a ``KeyError`` is raised.
:type required: bool
:param default: The value to return if the environment variable is not
present. (Providing a default alongside setting ``required=True`` will raise
a ``ValueError``)
:type default: bool
"""
try:
import iso8601
except ImportError:
raise ImportError(
'Parsing iso8601 datetime strings requires the iso8601 library'
)
if required and default is not empty:
raise ValueError("Using `default` with `required=True` is invalid")
value = get_env_value(name, required=required, default=empty)
# change datetime.datetime to time, return time.struct_time type
if default is not empty and value is empty:
return default
if value is empty:
raise ValueError(
"`env_iso8601` requires either a default value to be specified, or "
"for the variable to be present in the environment"
)
return iso8601.parse_date(value) | [
"def",
"env_iso8601",
"(",
"name",
",",
"required",
"=",
"False",
",",
"default",
"=",
"empty",
")",
":",
"try",
":",
"import",
"iso8601",
"except",
"ImportError",
":",
"raise",
"ImportError",
"(",
"'Parsing iso8601 datetime strings requires the iso8601 library'",
")",
"if",
"required",
"and",
"default",
"is",
"not",
"empty",
":",
"raise",
"ValueError",
"(",
"\"Using `default` with `required=True` is invalid\"",
")",
"value",
"=",
"get_env_value",
"(",
"name",
",",
"required",
"=",
"required",
",",
"default",
"=",
"empty",
")",
"# change datetime.datetime to time, return time.struct_time type",
"if",
"default",
"is",
"not",
"empty",
"and",
"value",
"is",
"empty",
":",
"return",
"default",
"if",
"value",
"is",
"empty",
":",
"raise",
"ValueError",
"(",
"\"`env_iso8601` requires either a default value to be specified, or \"",
"\"for the variable to be present in the environment\"",
")",
"return",
"iso8601",
".",
"parse_date",
"(",
"value",
")"
] | Pulls an environment variable out of the environment and parses it to a
``datetime.datetime`` object. The environment variable is expected to be an
iso8601 formatted string.
If the name is not present in the environment and no default is specified
then a ``ValueError`` will be raised.
:param name: The name of the environment variable be pulled
:type name: str
:param required: Whether the environment variable is required. If ``True``
and the variable is not present, a ``KeyError`` is raised.
:type required: bool
:param default: The value to return if the environment variable is not
present. (Providing a default alongside setting ``required=True`` will raise
a ``ValueError``)
:type default: bool | [
"Pulls",
"an",
"environment",
"variable",
"out",
"of",
"the",
"environment",
"and",
"parses",
"it",
"to",
"a",
"datetime",
".",
"datetime",
"object",
".",
"The",
"environment",
"variable",
"is",
"expected",
"to",
"be",
"an",
"iso8601",
"formatted",
"string",
"."
] | 2bce66396f0c92fefa2b39ea458965174e478faf | https://github.com/simpleenergy/env-excavator/blob/2bce66396f0c92fefa2b39ea458965174e478faf/excavator/utils.py#L216-L255 |
249,491 | KnowledgeLinks/rdfframework | rdfframework/search/esloaders.py | EsRdfBulkLoader._set_es_workers | def _set_es_workers(self, **kwargs):
"""
Creates index worker instances for each class to index
kwargs:
-------
idx_only_base[bool]: True will only index the base class
"""
def make_es_worker(search_conn, es_index, es_doc_type, class_name):
"""
Returns a new es_worker instance
args:
-----
search_conn: the connection to elasticsearch
es_index: the name of the elasticsearch index
es_doc_type: the name of the elasticsearch doctype
class_name: name of the rdf class that is being indexed
"""
new_esbase = copy.copy(search_conn)
new_esbase.es_index = es_index
new_esbase.doc_type = es_doc_type
log.info("Indexing '%s' into ES index '%s' doctype '%s'",
class_name.pyuri,
es_index,
es_doc_type)
return new_esbase
def additional_indexers(rdf_class):
"""
returns additional classes to index based off of the es definitions
"""
rtn_list = rdf_class.es_indexers()
rtn_list.remove(rdf_class)
return rtn_list
self.es_worker = make_es_worker(self.search_conn,
self.es_index,
self.es_doc_type,
self.rdf_class.__name__)
if not kwargs.get("idx_only_base"):
self.other_indexers = {item.__name__: make_es_worker(
self.search_conn,
item.es_defs.get('kds_esIndex')[0],
item.es_defs.get('kds_esDocType')[0],
item.__name__)
for item in additional_indexers(self.rdf_class)}
else:
self.other_indexers = {} | python | def _set_es_workers(self, **kwargs):
"""
Creates index worker instances for each class to index
kwargs:
-------
idx_only_base[bool]: True will only index the base class
"""
def make_es_worker(search_conn, es_index, es_doc_type, class_name):
"""
Returns a new es_worker instance
args:
-----
search_conn: the connection to elasticsearch
es_index: the name of the elasticsearch index
es_doc_type: the name of the elasticsearch doctype
class_name: name of the rdf class that is being indexed
"""
new_esbase = copy.copy(search_conn)
new_esbase.es_index = es_index
new_esbase.doc_type = es_doc_type
log.info("Indexing '%s' into ES index '%s' doctype '%s'",
class_name.pyuri,
es_index,
es_doc_type)
return new_esbase
def additional_indexers(rdf_class):
"""
returns additional classes to index based off of the es definitions
"""
rtn_list = rdf_class.es_indexers()
rtn_list.remove(rdf_class)
return rtn_list
self.es_worker = make_es_worker(self.search_conn,
self.es_index,
self.es_doc_type,
self.rdf_class.__name__)
if not kwargs.get("idx_only_base"):
self.other_indexers = {item.__name__: make_es_worker(
self.search_conn,
item.es_defs.get('kds_esIndex')[0],
item.es_defs.get('kds_esDocType')[0],
item.__name__)
for item in additional_indexers(self.rdf_class)}
else:
self.other_indexers = {} | [
"def",
"_set_es_workers",
"(",
"self",
",",
"*",
"*",
"kwargs",
")",
":",
"def",
"make_es_worker",
"(",
"search_conn",
",",
"es_index",
",",
"es_doc_type",
",",
"class_name",
")",
":",
"\"\"\"\n Returns a new es_worker instance\n\n args:\n -----\n search_conn: the connection to elasticsearch\n es_index: the name of the elasticsearch index\n es_doc_type: the name of the elasticsearch doctype\n class_name: name of the rdf class that is being indexed\n \"\"\"",
"new_esbase",
"=",
"copy",
".",
"copy",
"(",
"search_conn",
")",
"new_esbase",
".",
"es_index",
"=",
"es_index",
"new_esbase",
".",
"doc_type",
"=",
"es_doc_type",
"log",
".",
"info",
"(",
"\"Indexing '%s' into ES index '%s' doctype '%s'\"",
",",
"class_name",
".",
"pyuri",
",",
"es_index",
",",
"es_doc_type",
")",
"return",
"new_esbase",
"def",
"additional_indexers",
"(",
"rdf_class",
")",
":",
"\"\"\"\n returns additional classes to index based off of the es definitions\n \"\"\"",
"rtn_list",
"=",
"rdf_class",
".",
"es_indexers",
"(",
")",
"rtn_list",
".",
"remove",
"(",
"rdf_class",
")",
"return",
"rtn_list",
"self",
".",
"es_worker",
"=",
"make_es_worker",
"(",
"self",
".",
"search_conn",
",",
"self",
".",
"es_index",
",",
"self",
".",
"es_doc_type",
",",
"self",
".",
"rdf_class",
".",
"__name__",
")",
"if",
"not",
"kwargs",
".",
"get",
"(",
"\"idx_only_base\"",
")",
":",
"self",
".",
"other_indexers",
"=",
"{",
"item",
".",
"__name__",
":",
"make_es_worker",
"(",
"self",
".",
"search_conn",
",",
"item",
".",
"es_defs",
".",
"get",
"(",
"'kds_esIndex'",
")",
"[",
"0",
"]",
",",
"item",
".",
"es_defs",
".",
"get",
"(",
"'kds_esDocType'",
")",
"[",
"0",
"]",
",",
"item",
".",
"__name__",
")",
"for",
"item",
"in",
"additional_indexers",
"(",
"self",
".",
"rdf_class",
")",
"}",
"else",
":",
"self",
".",
"other_indexers",
"=",
"{",
"}"
] | Creates index worker instances for each class to index
kwargs:
-------
idx_only_base[bool]: True will only index the base class | [
"Creates",
"index",
"worker",
"instances",
"for",
"each",
"class",
"to",
"index"
] | 9ec32dcc4bed51650a4b392cc5c15100fef7923a | https://github.com/KnowledgeLinks/rdfframework/blob/9ec32dcc4bed51650a4b392cc5c15100fef7923a/rdfframework/search/esloaders.py#L74-L123 |
249,492 | KnowledgeLinks/rdfframework | rdfframework/search/esloaders.py | EsRdfBulkLoader._index_sub | def _index_sub(self, uri_list, num, batch_num):
"""
Converts a list of uris to elasticsearch json objects
args:
uri_list: list of uris to convert
num: the ending count within the batch
batch_num: the batch number
"""
bname = '%s-%s' % (batch_num, num)
log.debug("batch_num '%s' starting es_json conversion",
bname)
qry_data = get_all_item_data([item[0] for item in uri_list],
self.tstore_conn,
rdfclass=self.rdf_class)
log.debug("batch_num '%s-%s' query_complete | count: %s",
batch_num,
num,
len(qry_data))
# path = os.path.join(CFG.dirs.cache, "index_pre")
# if not os.path.exists(path):
# os.makedirs(path)
# with open(os.path.join(path, bname + ".json"), "w") as fo:
# fo.write(json.dumps(qry_data))
data = RdfDataset(qry_data)
del qry_data
log.debug("batch_num '%s-%s' RdfDataset Loaded", batch_num, num)
for value in uri_list:
try:
self.batch_data[batch_num]['main'].append(\
data[value[0]].es_json())
self.count += 1
except KeyError:
pass
for name, indexer in self.other_indexers.items():
for item in data.json_qry("$.:%s" % name.pyuri):
val = item.es_json()
if val:
self.batch_data[batch_num][name].append(val)
self.batch_uris[batch_num].append(item.subject)
del data
del uri_list
log.debug("batch_num '%s-%s' converted to es_json", batch_num, num) | python | def _index_sub(self, uri_list, num, batch_num):
"""
Converts a list of uris to elasticsearch json objects
args:
uri_list: list of uris to convert
num: the ending count within the batch
batch_num: the batch number
"""
bname = '%s-%s' % (batch_num, num)
log.debug("batch_num '%s' starting es_json conversion",
bname)
qry_data = get_all_item_data([item[0] for item in uri_list],
self.tstore_conn,
rdfclass=self.rdf_class)
log.debug("batch_num '%s-%s' query_complete | count: %s",
batch_num,
num,
len(qry_data))
# path = os.path.join(CFG.dirs.cache, "index_pre")
# if not os.path.exists(path):
# os.makedirs(path)
# with open(os.path.join(path, bname + ".json"), "w") as fo:
# fo.write(json.dumps(qry_data))
data = RdfDataset(qry_data)
del qry_data
log.debug("batch_num '%s-%s' RdfDataset Loaded", batch_num, num)
for value in uri_list:
try:
self.batch_data[batch_num]['main'].append(\
data[value[0]].es_json())
self.count += 1
except KeyError:
pass
for name, indexer in self.other_indexers.items():
for item in data.json_qry("$.:%s" % name.pyuri):
val = item.es_json()
if val:
self.batch_data[batch_num][name].append(val)
self.batch_uris[batch_num].append(item.subject)
del data
del uri_list
log.debug("batch_num '%s-%s' converted to es_json", batch_num, num) | [
"def",
"_index_sub",
"(",
"self",
",",
"uri_list",
",",
"num",
",",
"batch_num",
")",
":",
"bname",
"=",
"'%s-%s'",
"%",
"(",
"batch_num",
",",
"num",
")",
"log",
".",
"debug",
"(",
"\"batch_num '%s' starting es_json conversion\"",
",",
"bname",
")",
"qry_data",
"=",
"get_all_item_data",
"(",
"[",
"item",
"[",
"0",
"]",
"for",
"item",
"in",
"uri_list",
"]",
",",
"self",
".",
"tstore_conn",
",",
"rdfclass",
"=",
"self",
".",
"rdf_class",
")",
"log",
".",
"debug",
"(",
"\"batch_num '%s-%s' query_complete | count: %s\"",
",",
"batch_num",
",",
"num",
",",
"len",
"(",
"qry_data",
")",
")",
"# path = os.path.join(CFG.dirs.cache, \"index_pre\")",
"# if not os.path.exists(path):",
"# os.makedirs(path)",
"# with open(os.path.join(path, bname + \".json\"), \"w\") as fo:",
"# fo.write(json.dumps(qry_data))",
"data",
"=",
"RdfDataset",
"(",
"qry_data",
")",
"del",
"qry_data",
"log",
".",
"debug",
"(",
"\"batch_num '%s-%s' RdfDataset Loaded\"",
",",
"batch_num",
",",
"num",
")",
"for",
"value",
"in",
"uri_list",
":",
"try",
":",
"self",
".",
"batch_data",
"[",
"batch_num",
"]",
"[",
"'main'",
"]",
".",
"append",
"(",
"data",
"[",
"value",
"[",
"0",
"]",
"]",
".",
"es_json",
"(",
")",
")",
"self",
".",
"count",
"+=",
"1",
"except",
"KeyError",
":",
"pass",
"for",
"name",
",",
"indexer",
"in",
"self",
".",
"other_indexers",
".",
"items",
"(",
")",
":",
"for",
"item",
"in",
"data",
".",
"json_qry",
"(",
"\"$.:%s\"",
"%",
"name",
".",
"pyuri",
")",
":",
"val",
"=",
"item",
".",
"es_json",
"(",
")",
"if",
"val",
":",
"self",
".",
"batch_data",
"[",
"batch_num",
"]",
"[",
"name",
"]",
".",
"append",
"(",
"val",
")",
"self",
".",
"batch_uris",
"[",
"batch_num",
"]",
".",
"append",
"(",
"item",
".",
"subject",
")",
"del",
"data",
"del",
"uri_list",
"log",
".",
"debug",
"(",
"\"batch_num '%s-%s' converted to es_json\"",
",",
"batch_num",
",",
"num",
")"
] | Converts a list of uris to elasticsearch json objects
args:
uri_list: list of uris to convert
num: the ending count within the batch
batch_num: the batch number | [
"Converts",
"a",
"list",
"of",
"uris",
"to",
"elasticsearch",
"json",
"objects"
] | 9ec32dcc4bed51650a4b392cc5c15100fef7923a | https://github.com/KnowledgeLinks/rdfframework/blob/9ec32dcc4bed51650a4b392cc5c15100fef7923a/rdfframework/search/esloaders.py#L125-L168 |
249,493 | KnowledgeLinks/rdfframework | rdfframework/search/esloaders.py | EsRdfBulkLoader.delete_idx_status | def delete_idx_status(self, rdf_class):
"""
Removes all of the index status triples from the datastore
Args:
-----
rdf_class: The class of items to remove the status from
"""
sparql_template = """
DELETE
{{
?s kds:esIndexTime ?esTime .
?s kds:esIndexError ?esError .
}}
WHERE
{{
VALUES ?rdftypes {{\n\t\t{} }} .
?s a ?rdftypes .
OPTIONAL {{
?s kds:esIndexTime ?esTime
}}
OPTIONAL {{
?s kds:esIndexError ?esError
}}
FILTER(bound(?esTime)||bound(?esError))
}}
"""
rdf_types = [rdf_class.uri] + [item.uri
for item in rdf_class.subclasses]
sparql = sparql_template.format("\n\t\t".join(rdf_types))
log.warn("Deleting index status for %s", rdf_class.uri)
return self.tstore_conn.update_query(sparql) | python | def delete_idx_status(self, rdf_class):
"""
Removes all of the index status triples from the datastore
Args:
-----
rdf_class: The class of items to remove the status from
"""
sparql_template = """
DELETE
{{
?s kds:esIndexTime ?esTime .
?s kds:esIndexError ?esError .
}}
WHERE
{{
VALUES ?rdftypes {{\n\t\t{} }} .
?s a ?rdftypes .
OPTIONAL {{
?s kds:esIndexTime ?esTime
}}
OPTIONAL {{
?s kds:esIndexError ?esError
}}
FILTER(bound(?esTime)||bound(?esError))
}}
"""
rdf_types = [rdf_class.uri] + [item.uri
for item in rdf_class.subclasses]
sparql = sparql_template.format("\n\t\t".join(rdf_types))
log.warn("Deleting index status for %s", rdf_class.uri)
return self.tstore_conn.update_query(sparql) | [
"def",
"delete_idx_status",
"(",
"self",
",",
"rdf_class",
")",
":",
"sparql_template",
"=",
"\"\"\"\n DELETE\n {{\n ?s kds:esIndexTime ?esTime .\n ?s kds:esIndexError ?esError .\n }}\n WHERE\n {{\n\n VALUES ?rdftypes {{\\n\\t\\t{} }} .\n ?s a ?rdftypes .\n OPTIONAL {{\n ?s kds:esIndexTime ?esTime\n }}\n OPTIONAL {{\n ?s kds:esIndexError ?esError\n }}\n FILTER(bound(?esTime)||bound(?esError))\n }}\n \"\"\"",
"rdf_types",
"=",
"[",
"rdf_class",
".",
"uri",
"]",
"+",
"[",
"item",
".",
"uri",
"for",
"item",
"in",
"rdf_class",
".",
"subclasses",
"]",
"sparql",
"=",
"sparql_template",
".",
"format",
"(",
"\"\\n\\t\\t\"",
".",
"join",
"(",
"rdf_types",
")",
")",
"log",
".",
"warn",
"(",
"\"Deleting index status for %s\"",
",",
"rdf_class",
".",
"uri",
")",
"return",
"self",
".",
"tstore_conn",
".",
"update_query",
"(",
"sparql",
")"
] | Removes all of the index status triples from the datastore
Args:
-----
rdf_class: The class of items to remove the status from | [
"Removes",
"all",
"of",
"the",
"index",
"status",
"triples",
"from",
"the",
"datastore"
] | 9ec32dcc4bed51650a4b392cc5c15100fef7923a | https://github.com/KnowledgeLinks/rdfframework/blob/9ec32dcc4bed51650a4b392cc5c15100fef7923a/rdfframework/search/esloaders.py#L430-L463 |
249,494 | KnowledgeLinks/rdfframework | rdfframework/search/esloaders.py | EsRdfBulkLoader.get_es_ids | def get_es_ids(self):
"""
reads all the elasticssearch ids for an index
"""
search = self.search.source(['uri']).sort(['uri'])
es_ids = [item.meta.id for item in search.scan()]
return es_ids | python | def get_es_ids(self):
"""
reads all the elasticssearch ids for an index
"""
search = self.search.source(['uri']).sort(['uri'])
es_ids = [item.meta.id for item in search.scan()]
return es_ids | [
"def",
"get_es_ids",
"(",
"self",
")",
":",
"search",
"=",
"self",
".",
"search",
".",
"source",
"(",
"[",
"'uri'",
"]",
")",
".",
"sort",
"(",
"[",
"'uri'",
"]",
")",
"es_ids",
"=",
"[",
"item",
".",
"meta",
".",
"id",
"for",
"item",
"in",
"search",
".",
"scan",
"(",
")",
"]",
"return",
"es_ids"
] | reads all the elasticssearch ids for an index | [
"reads",
"all",
"the",
"elasticssearch",
"ids",
"for",
"an",
"index"
] | 9ec32dcc4bed51650a4b392cc5c15100fef7923a | https://github.com/KnowledgeLinks/rdfframework/blob/9ec32dcc4bed51650a4b392cc5c15100fef7923a/rdfframework/search/esloaders.py#L465-L471 |
249,495 | KnowledgeLinks/rdfframework | rdfframework/search/esloaders.py | EsRdfBulkLoader.validate_index | def validate_index(self, rdf_class):
"""
Will compare the triplestore and elasticsearch index to ensure that
that elasticsearch and triplestore items match. elasticsearch records
that are not in the triplestore will be deleteed
"""
es_ids = set(self.get_es_ids())
tstore_ids = set([item[1]
for item in self.get_uri_list(no_status=True)])
diff = es_ids - tstore_ids
if diff:
pdb.set_trace()
action_list = self.es_worker.make_action_list(diff,
action_type="delete")
results = self.es_worker.bulk_save(action_list) | python | def validate_index(self, rdf_class):
"""
Will compare the triplestore and elasticsearch index to ensure that
that elasticsearch and triplestore items match. elasticsearch records
that are not in the triplestore will be deleteed
"""
es_ids = set(self.get_es_ids())
tstore_ids = set([item[1]
for item in self.get_uri_list(no_status=True)])
diff = es_ids - tstore_ids
if diff:
pdb.set_trace()
action_list = self.es_worker.make_action_list(diff,
action_type="delete")
results = self.es_worker.bulk_save(action_list) | [
"def",
"validate_index",
"(",
"self",
",",
"rdf_class",
")",
":",
"es_ids",
"=",
"set",
"(",
"self",
".",
"get_es_ids",
"(",
")",
")",
"tstore_ids",
"=",
"set",
"(",
"[",
"item",
"[",
"1",
"]",
"for",
"item",
"in",
"self",
".",
"get_uri_list",
"(",
"no_status",
"=",
"True",
")",
"]",
")",
"diff",
"=",
"es_ids",
"-",
"tstore_ids",
"if",
"diff",
":",
"pdb",
".",
"set_trace",
"(",
")",
"action_list",
"=",
"self",
".",
"es_worker",
".",
"make_action_list",
"(",
"diff",
",",
"action_type",
"=",
"\"delete\"",
")",
"results",
"=",
"self",
".",
"es_worker",
".",
"bulk_save",
"(",
"action_list",
")"
] | Will compare the triplestore and elasticsearch index to ensure that
that elasticsearch and triplestore items match. elasticsearch records
that are not in the triplestore will be deleteed | [
"Will",
"compare",
"the",
"triplestore",
"and",
"elasticsearch",
"index",
"to",
"ensure",
"that",
"that",
"elasticsearch",
"and",
"triplestore",
"items",
"match",
".",
"elasticsearch",
"records",
"that",
"are",
"not",
"in",
"the",
"triplestore",
"will",
"be",
"deleteed"
] | 9ec32dcc4bed51650a4b392cc5c15100fef7923a | https://github.com/KnowledgeLinks/rdfframework/blob/9ec32dcc4bed51650a4b392cc5c15100fef7923a/rdfframework/search/esloaders.py#L473-L487 |
249,496 | anti1869/sunhead | src/sunhead/metrics/factory.py | Metrics._disable_prometheus_process_collector | def _disable_prometheus_process_collector(self) -> None:
"""
There is a bug in SDC' Docker implementation and intolerable prometheus_client code, due to which
its process_collector will fail.
See https://github.com/prometheus/client_python/issues/80
"""
logger.info("Removing prometheus process collector")
try:
core.REGISTRY.unregister(PROCESS_COLLECTOR)
except KeyError:
logger.debug("PROCESS_COLLECTOR already removed from prometheus") | python | def _disable_prometheus_process_collector(self) -> None:
"""
There is a bug in SDC' Docker implementation and intolerable prometheus_client code, due to which
its process_collector will fail.
See https://github.com/prometheus/client_python/issues/80
"""
logger.info("Removing prometheus process collector")
try:
core.REGISTRY.unregister(PROCESS_COLLECTOR)
except KeyError:
logger.debug("PROCESS_COLLECTOR already removed from prometheus") | [
"def",
"_disable_prometheus_process_collector",
"(",
"self",
")",
"->",
"None",
":",
"logger",
".",
"info",
"(",
"\"Removing prometheus process collector\"",
")",
"try",
":",
"core",
".",
"REGISTRY",
".",
"unregister",
"(",
"PROCESS_COLLECTOR",
")",
"except",
"KeyError",
":",
"logger",
".",
"debug",
"(",
"\"PROCESS_COLLECTOR already removed from prometheus\"",
")"
] | There is a bug in SDC' Docker implementation and intolerable prometheus_client code, due to which
its process_collector will fail.
See https://github.com/prometheus/client_python/issues/80 | [
"There",
"is",
"a",
"bug",
"in",
"SDC",
"Docker",
"implementation",
"and",
"intolerable",
"prometheus_client",
"code",
"due",
"to",
"which",
"its",
"process_collector",
"will",
"fail",
"."
] | 5117ec797a38eb82d955241d20547d125efe80f3 | https://github.com/anti1869/sunhead/blob/5117ec797a38eb82d955241d20547d125efe80f3/src/sunhead/metrics/factory.py#L63-L74 |
249,497 | rackerlabs/silverberg | silverberg/thrift_client.py | OnDemandThriftClient.connection | def connection(self, handshake=None):
"""
Connects if necessary, returns existing one if it can.
:param handshake: A function to be called with the client
to complete the handshake.
:returns: thrift connection, deferred if necessary
"""
if self._state == _State.CONNECTED:
return succeed(self._current_client)
elif self._state == _State.DISCONNECTING:
return fail(ClientDisconnecting())
elif self._state == _State.NOT_CONNECTED:
d = self._notify_on_connect()
self._connect(handshake)
return d
else:
assert self._state == _State.CONNECTING
return self._notify_on_connect() | python | def connection(self, handshake=None):
"""
Connects if necessary, returns existing one if it can.
:param handshake: A function to be called with the client
to complete the handshake.
:returns: thrift connection, deferred if necessary
"""
if self._state == _State.CONNECTED:
return succeed(self._current_client)
elif self._state == _State.DISCONNECTING:
return fail(ClientDisconnecting())
elif self._state == _State.NOT_CONNECTED:
d = self._notify_on_connect()
self._connect(handshake)
return d
else:
assert self._state == _State.CONNECTING
return self._notify_on_connect() | [
"def",
"connection",
"(",
"self",
",",
"handshake",
"=",
"None",
")",
":",
"if",
"self",
".",
"_state",
"==",
"_State",
".",
"CONNECTED",
":",
"return",
"succeed",
"(",
"self",
".",
"_current_client",
")",
"elif",
"self",
".",
"_state",
"==",
"_State",
".",
"DISCONNECTING",
":",
"return",
"fail",
"(",
"ClientDisconnecting",
"(",
")",
")",
"elif",
"self",
".",
"_state",
"==",
"_State",
".",
"NOT_CONNECTED",
":",
"d",
"=",
"self",
".",
"_notify_on_connect",
"(",
")",
"self",
".",
"_connect",
"(",
"handshake",
")",
"return",
"d",
"else",
":",
"assert",
"self",
".",
"_state",
"==",
"_State",
".",
"CONNECTING",
"return",
"self",
".",
"_notify_on_connect",
"(",
")"
] | Connects if necessary, returns existing one if it can.
:param handshake: A function to be called with the client
to complete the handshake.
:returns: thrift connection, deferred if necessary | [
"Connects",
"if",
"necessary",
"returns",
"existing",
"one",
"if",
"it",
"can",
"."
] | c6fae78923a019f1615e9516ab30fa105c72a542 | https://github.com/rackerlabs/silverberg/blob/c6fae78923a019f1615e9516ab30fa105c72a542/silverberg/thrift_client.py#L184-L204 |
249,498 | pavelsof/ipatok | ipatok/ipa.py | ensure_single_char | def ensure_single_char(func):
"""
Decorator that ensures that the first argument of the decorated function is
a single character, i.e. a string of length one.
"""
@functools.wraps(func)
def wrapper(*args, **kwargs):
if not isinstance(args[0], str) or len(args[0]) != 1:
raise ValueError((
'This function should be invoked with a string of length one '
'as its first argument'))
return func(*args, **kwargs)
return wrapper | python | def ensure_single_char(func):
"""
Decorator that ensures that the first argument of the decorated function is
a single character, i.e. a string of length one.
"""
@functools.wraps(func)
def wrapper(*args, **kwargs):
if not isinstance(args[0], str) or len(args[0]) != 1:
raise ValueError((
'This function should be invoked with a string of length one '
'as its first argument'))
return func(*args, **kwargs)
return wrapper | [
"def",
"ensure_single_char",
"(",
"func",
")",
":",
"@",
"functools",
".",
"wraps",
"(",
"func",
")",
"def",
"wrapper",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"if",
"not",
"isinstance",
"(",
"args",
"[",
"0",
"]",
",",
"str",
")",
"or",
"len",
"(",
"args",
"[",
"0",
"]",
")",
"!=",
"1",
":",
"raise",
"ValueError",
"(",
"(",
"'This function should be invoked with a string of length one '",
"'as its first argument'",
")",
")",
"return",
"func",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
"return",
"wrapper"
] | Decorator that ensures that the first argument of the decorated function is
a single character, i.e. a string of length one. | [
"Decorator",
"that",
"ensures",
"that",
"the",
"first",
"argument",
"of",
"the",
"decorated",
"function",
"is",
"a",
"single",
"character",
"i",
".",
"e",
".",
"a",
"string",
"of",
"length",
"one",
"."
] | fde3c334b8573315fd1073f14341b71f50f7f006 | https://github.com/pavelsof/ipatok/blob/fde3c334b8573315fd1073f14341b71f50f7f006/ipatok/ipa.py#L79-L92 |
249,499 | pavelsof/ipatok | ipatok/ipa.py | is_vowel | def is_vowel(char):
"""
Check whether the character is a vowel letter.
"""
if is_letter(char, strict=True):
return char in chart.vowels
return False | python | def is_vowel(char):
"""
Check whether the character is a vowel letter.
"""
if is_letter(char, strict=True):
return char in chart.vowels
return False | [
"def",
"is_vowel",
"(",
"char",
")",
":",
"if",
"is_letter",
"(",
"char",
",",
"strict",
"=",
"True",
")",
":",
"return",
"char",
"in",
"chart",
".",
"vowels",
"return",
"False"
] | Check whether the character is a vowel letter. | [
"Check",
"whether",
"the",
"character",
"is",
"a",
"vowel",
"letter",
"."
] | fde3c334b8573315fd1073f14341b71f50f7f006 | https://github.com/pavelsof/ipatok/blob/fde3c334b8573315fd1073f14341b71f50f7f006/ipatok/ipa.py#L113-L120 |
Subsets and Splits