repository_name
stringlengths
5
67
func_path_in_repository
stringlengths
4
234
func_name
stringlengths
0
314
whole_func_string
stringlengths
52
3.87M
language
stringclasses
6 values
func_code_string
stringlengths
39
1.84M
func_code_tokens
listlengths
15
672k
func_documentation_string
stringlengths
1
47.2k
func_documentation_tokens
listlengths
1
3.92k
split_name
stringclasses
1 value
func_code_url
stringlengths
85
339
bspaans/python-mingus
mingus/containers/note_container.py
NoteContainer.get_note_names
def get_note_names(self): """Return a list with all the note names in the current container. Every name will only be mentioned once. """ res = [] for n in self.notes: if n.name not in res: res.append(n.name) return res
python
def get_note_names(self): res = [] for n in self.notes: if n.name not in res: res.append(n.name) return res
[ "def", "get_note_names", "(", "self", ")", ":", "res", "=", "[", "]", "for", "n", "in", "self", ".", "notes", ":", "if", "n", ".", "name", "not", "in", "res", ":", "res", ".", "append", "(", "n", ".", "name", ")", "return", "res" ]
Return a list with all the note names in the current container. Every name will only be mentioned once.
[ "Return", "a", "list", "with", "all", "the", "note", "names", "in", "the", "current", "container", "." ]
train
https://github.com/bspaans/python-mingus/blob/aa5a5d992d45ada61be0f9f86261380731bd7749/mingus/containers/note_container.py#L283-L292
bspaans/python-mingus
mingus/core/scales.py
determine
def determine(notes): """Determine the scales containing the notes. All major and minor scales are recognized. Example: >>> determine(['A', 'Bb', 'E', 'F#', 'G']) ['G melodic minor', 'G Bachian', 'D harmonic major'] """ notes = set(notes) res = [] for key in keys: for scale in _Scale.__subclasses__(): if scale.type == 'major': if (notes <= set(scale(key[0]).ascending()) or notes <= set(scale(key[0]).descending())): res.append(scale(key[0]).name) elif scale.type == 'minor': if (notes <= set(scale(get_notes(key[1])[0]).ascending()) or notes <= set(scale(get_notes(key[1])[0]).descending())): res.append(scale(get_notes(key[1])[0]).name) return res
python
def determine(notes): notes = set(notes) res = [] for key in keys: for scale in _Scale.__subclasses__(): if scale.type == 'major': if (notes <= set(scale(key[0]).ascending()) or notes <= set(scale(key[0]).descending())): res.append(scale(key[0]).name) elif scale.type == 'minor': if (notes <= set(scale(get_notes(key[1])[0]).ascending()) or notes <= set(scale(get_notes(key[1])[0]).descending())): res.append(scale(get_notes(key[1])[0]).name) return res
[ "def", "determine", "(", "notes", ")", ":", "notes", "=", "set", "(", "notes", ")", "res", "=", "[", "]", "for", "key", "in", "keys", ":", "for", "scale", "in", "_Scale", ".", "__subclasses__", "(", ")", ":", "if", "scale", ".", "type", "==", "'major'", ":", "if", "(", "notes", "<=", "set", "(", "scale", "(", "key", "[", "0", "]", ")", ".", "ascending", "(", ")", ")", "or", "notes", "<=", "set", "(", "scale", "(", "key", "[", "0", "]", ")", ".", "descending", "(", ")", ")", ")", ":", "res", ".", "append", "(", "scale", "(", "key", "[", "0", "]", ")", ".", "name", ")", "elif", "scale", ".", "type", "==", "'minor'", ":", "if", "(", "notes", "<=", "set", "(", "scale", "(", "get_notes", "(", "key", "[", "1", "]", ")", "[", "0", "]", ")", ".", "ascending", "(", ")", ")", "or", "notes", "<=", "set", "(", "scale", "(", "get_notes", "(", "key", "[", "1", "]", ")", "[", "0", "]", ")", ".", "descending", "(", ")", ")", ")", ":", "res", ".", "append", "(", "scale", "(", "get_notes", "(", "key", "[", "1", "]", ")", "[", "0", "]", ")", ".", "name", ")", "return", "res" ]
Determine the scales containing the notes. All major and minor scales are recognized. Example: >>> determine(['A', 'Bb', 'E', 'F#', 'G']) ['G melodic minor', 'G Bachian', 'D harmonic major']
[ "Determine", "the", "scales", "containing", "the", "notes", "." ]
train
https://github.com/bspaans/python-mingus/blob/aa5a5d992d45ada61be0f9f86261380731bd7749/mingus/core/scales.py#L60-L82
bspaans/python-mingus
mingus/core/scales.py
_Scale.degree
def degree(self, degree_number, direction='a'): """Return the asked scale degree. The direction of the scale is 'a' for ascending (default) and 'd' for descending. """ if degree_number < 1: raise RangeError("degree '%s' out of range" % degree_number) if direction == 'a': notes = self.ascending()[:-1] return notes[degree_number-1] elif direction == 'd': notes = reversed(self.descending())[:-1] return notes[degree_number-1] else: raise FormatError("Unrecognised direction '%s'" % direction)
python
def degree(self, degree_number, direction='a'): if degree_number < 1: raise RangeError("degree '%s' out of range" % degree_number) if direction == 'a': notes = self.ascending()[:-1] return notes[degree_number-1] elif direction == 'd': notes = reversed(self.descending())[:-1] return notes[degree_number-1] else: raise FormatError("Unrecognised direction '%s'" % direction)
[ "def", "degree", "(", "self", ",", "degree_number", ",", "direction", "=", "'a'", ")", ":", "if", "degree_number", "<", "1", ":", "raise", "RangeError", "(", "\"degree '%s' out of range\"", "%", "degree_number", ")", "if", "direction", "==", "'a'", ":", "notes", "=", "self", ".", "ascending", "(", ")", "[", ":", "-", "1", "]", "return", "notes", "[", "degree_number", "-", "1", "]", "elif", "direction", "==", "'d'", ":", "notes", "=", "reversed", "(", "self", ".", "descending", "(", ")", ")", "[", ":", "-", "1", "]", "return", "notes", "[", "degree_number", "-", "1", "]", "else", ":", "raise", "FormatError", "(", "\"Unrecognised direction '%s'\"", "%", "direction", ")" ]
Return the asked scale degree. The direction of the scale is 'a' for ascending (default) and 'd' for descending.
[ "Return", "the", "asked", "scale", "degree", "." ]
train
https://github.com/bspaans/python-mingus/blob/aa5a5d992d45ada61be0f9f86261380731bd7749/mingus/core/scales.py#L126-L141
bspaans/python-mingus
mingus/extra/tablature.py
begin_track
def begin_track(tuning, padding=2): """Helper function that builds the first few characters of every bar.""" # find longest shorthand tuning base names = [x.to_shorthand() for x in tuning.tuning] basesize = len(max(names)) + 3 # Build result res = [] for x in names: r = ' %s' % x spaces = basesize - len(r) r += ' ' * spaces + '||' + '-' * padding res.append(r) return res
python
def begin_track(tuning, padding=2): names = [x.to_shorthand() for x in tuning.tuning] basesize = len(max(names)) + 3 res = [] for x in names: r = ' %s' % x spaces = basesize - len(r) r += ' ' * spaces + '||' + '-' * padding res.append(r) return res
[ "def", "begin_track", "(", "tuning", ",", "padding", "=", "2", ")", ":", "# find longest shorthand tuning base", "names", "=", "[", "x", ".", "to_shorthand", "(", ")", "for", "x", "in", "tuning", ".", "tuning", "]", "basesize", "=", "len", "(", "max", "(", "names", ")", ")", "+", "3", "# Build result", "res", "=", "[", "]", "for", "x", "in", "names", ":", "r", "=", "' %s'", "%", "x", "spaces", "=", "basesize", "-", "len", "(", "r", ")", "r", "+=", "' '", "*", "spaces", "+", "'||'", "+", "'-'", "*", "padding", "res", ".", "append", "(", "r", ")", "return", "res" ]
Helper function that builds the first few characters of every bar.
[ "Helper", "function", "that", "builds", "the", "first", "few", "characters", "of", "every", "bar", "." ]
train
https://github.com/bspaans/python-mingus/blob/aa5a5d992d45ada61be0f9f86261380731bd7749/mingus/extra/tablature.py#L28-L41
bspaans/python-mingus
mingus/extra/tablature.py
add_headers
def add_headers(width=80, title='Untitled', subtitle='', author='', email='', description='', tunings=[]): """Create a nice header in the form of a list of strings using the information that has been filled in. All arguments except 'width' and 'tunings' should be strings. 'width' should be an integer and 'tunings' a list of tunings representing the instruments. """ result = [''] title = str.upper(title) result += [str.center(' '.join(title), width)] if subtitle != '': result += ['', str.center(str.title(subtitle), width)] if author != '' or email != '': result += ['', ''] if email != '': result += [str.center('Written by: %s <%s>' % (author, email), width)] else: result += [str.center('Written by: %s' % author, width)] if description != '': result += ['', ''] words = description.split() lines = [] line = [] last = 0 for word in words: if len(word) + last < width - 10: line.append(word) last += len(word) + 1 else: lines.append(line) line = [word] last = len(word) + 1 lines.append(line) for line in lines: result += [str.center(' '.join(line), width)] if tunings != []: result += ['', '', str.center('Instruments', width)] for (i, tuning) in enumerate(tunings): result += ['', str.center('%d. %s' % (i + 1, tuning.instrument), width), str.center(tuning.description, width)] result += ['', ''] return result
python
def add_headers(width=80, title='Untitled', subtitle='', author='', email='', description='', tunings=[]): result = [''] title = str.upper(title) result += [str.center(' '.join(title), width)] if subtitle != '': result += ['', str.center(str.title(subtitle), width)] if author != '' or email != '': result += ['', ''] if email != '': result += [str.center('Written by: %s <%s>' % (author, email), width)] else: result += [str.center('Written by: %s' % author, width)] if description != '': result += ['', ''] words = description.split() lines = [] line = [] last = 0 for word in words: if len(word) + last < width - 10: line.append(word) last += len(word) + 1 else: lines.append(line) line = [word] last = len(word) + 1 lines.append(line) for line in lines: result += [str.center(' '.join(line), width)] if tunings != []: result += ['', '', str.center('Instruments', width)] for (i, tuning) in enumerate(tunings): result += ['', str.center('%d. %s' % (i + 1, tuning.instrument), width), str.center(tuning.description, width)] result += ['', ''] return result
[ "def", "add_headers", "(", "width", "=", "80", ",", "title", "=", "'Untitled'", ",", "subtitle", "=", "''", ",", "author", "=", "''", ",", "email", "=", "''", ",", "description", "=", "''", ",", "tunings", "=", "[", "]", ")", ":", "result", "=", "[", "''", "]", "title", "=", "str", ".", "upper", "(", "title", ")", "result", "+=", "[", "str", ".", "center", "(", "' '", ".", "join", "(", "title", ")", ",", "width", ")", "]", "if", "subtitle", "!=", "''", ":", "result", "+=", "[", "''", ",", "str", ".", "center", "(", "str", ".", "title", "(", "subtitle", ")", ",", "width", ")", "]", "if", "author", "!=", "''", "or", "email", "!=", "''", ":", "result", "+=", "[", "''", ",", "''", "]", "if", "email", "!=", "''", ":", "result", "+=", "[", "str", ".", "center", "(", "'Written by: %s <%s>'", "%", "(", "author", ",", "email", ")", ",", "width", ")", "]", "else", ":", "result", "+=", "[", "str", ".", "center", "(", "'Written by: %s'", "%", "author", ",", "width", ")", "]", "if", "description", "!=", "''", ":", "result", "+=", "[", "''", ",", "''", "]", "words", "=", "description", ".", "split", "(", ")", "lines", "=", "[", "]", "line", "=", "[", "]", "last", "=", "0", "for", "word", "in", "words", ":", "if", "len", "(", "word", ")", "+", "last", "<", "width", "-", "10", ":", "line", ".", "append", "(", "word", ")", "last", "+=", "len", "(", "word", ")", "+", "1", "else", ":", "lines", ".", "append", "(", "line", ")", "line", "=", "[", "word", "]", "last", "=", "len", "(", "word", ")", "+", "1", "lines", ".", "append", "(", "line", ")", "for", "line", "in", "lines", ":", "result", "+=", "[", "str", ".", "center", "(", "' '", ".", "join", "(", "line", ")", ",", "width", ")", "]", "if", "tunings", "!=", "[", "]", ":", "result", "+=", "[", "''", ",", "''", ",", "str", ".", "center", "(", "'Instruments'", ",", "width", ")", "]", "for", "(", "i", ",", "tuning", ")", "in", "enumerate", "(", "tunings", ")", ":", "result", "+=", "[", "''", ",", "str", ".", "center", "(", "'%d. %s'", "%", "(", "i", "+", "1", ",", "tuning", ".", "instrument", ")", ",", "width", ")", ",", "str", ".", "center", "(", "tuning", ".", "description", ",", "width", ")", "]", "result", "+=", "[", "''", ",", "''", "]", "return", "result" ]
Create a nice header in the form of a list of strings using the information that has been filled in. All arguments except 'width' and 'tunings' should be strings. 'width' should be an integer and 'tunings' a list of tunings representing the instruments.
[ "Create", "a", "nice", "header", "in", "the", "form", "of", "a", "list", "of", "strings", "using", "the", "information", "that", "has", "been", "filled", "in", "." ]
train
https://github.com/bspaans/python-mingus/blob/aa5a5d992d45ada61be0f9f86261380731bd7749/mingus/extra/tablature.py#L44-L88
bspaans/python-mingus
mingus/extra/tablature.py
from_Note
def from_Note(note, width=80, tuning=None): """Return a string made out of ASCII tablature representing a Note object or note string. Throw a RangeError if a suitable fret can't be found. 'tuning' should be a StringTuning object or None for the default tuning. To force a certain fingering you can use a 'string' and 'fret' attribute on the Note. If the fingering is valid, it will get used instead of the default one. """ if tuning is None: tuning = default_tuning result = begin_track(tuning) min = 1000 (s, f) = (-1, -1) # Do an attribute check if hasattr(note, 'string') and hasattr(note, 'fret'): n = tuning.get_Note(note.string, note.fret) if n is not None and int(n) == int(note): (s, f) = (note.string, note.fret) min = 0 if min == 1000: for (string, fret) in enumerate(tuning.find_frets(note)): if fret is not None: if fret < min: min = fret (s, f) = (string, fret) l = len(result[0]) w = max(4, (width - l) - 1) # Build ASCII if min != 1000: fret = str(f) for i in range(len(result)): d = len(fret) if i != s: result[i] += '-' * w + '|' else: d = w - len(fret) result[i] += '-' * (d / 2) + fret d = (w - d / 2) - len(fret) result[i] += '-' * d + '|' else: raise RangeError("No fret found that could play note '%s'. " "Note out of range." % note) result.reverse() return os.linesep.join(result)
python
def from_Note(note, width=80, tuning=None): if tuning is None: tuning = default_tuning result = begin_track(tuning) min = 1000 (s, f) = (-1, -1) if hasattr(note, 'string') and hasattr(note, 'fret'): n = tuning.get_Note(note.string, note.fret) if n is not None and int(n) == int(note): (s, f) = (note.string, note.fret) min = 0 if min == 1000: for (string, fret) in enumerate(tuning.find_frets(note)): if fret is not None: if fret < min: min = fret (s, f) = (string, fret) l = len(result[0]) w = max(4, (width - l) - 1) if min != 1000: fret = str(f) for i in range(len(result)): d = len(fret) if i != s: result[i] += '-' * w + '|' else: d = w - len(fret) result[i] += '-' * (d / 2) + fret d = (w - d / 2) - len(fret) result[i] += '-' * d + '|' else: raise RangeError("No fret found that could play note '%s'. " "Note out of range." % note) result.reverse() return os.linesep.join(result)
[ "def", "from_Note", "(", "note", ",", "width", "=", "80", ",", "tuning", "=", "None", ")", ":", "if", "tuning", "is", "None", ":", "tuning", "=", "default_tuning", "result", "=", "begin_track", "(", "tuning", ")", "min", "=", "1000", "(", "s", ",", "f", ")", "=", "(", "-", "1", ",", "-", "1", ")", "# Do an attribute check", "if", "hasattr", "(", "note", ",", "'string'", ")", "and", "hasattr", "(", "note", ",", "'fret'", ")", ":", "n", "=", "tuning", ".", "get_Note", "(", "note", ".", "string", ",", "note", ".", "fret", ")", "if", "n", "is", "not", "None", "and", "int", "(", "n", ")", "==", "int", "(", "note", ")", ":", "(", "s", ",", "f", ")", "=", "(", "note", ".", "string", ",", "note", ".", "fret", ")", "min", "=", "0", "if", "min", "==", "1000", ":", "for", "(", "string", ",", "fret", ")", "in", "enumerate", "(", "tuning", ".", "find_frets", "(", "note", ")", ")", ":", "if", "fret", "is", "not", "None", ":", "if", "fret", "<", "min", ":", "min", "=", "fret", "(", "s", ",", "f", ")", "=", "(", "string", ",", "fret", ")", "l", "=", "len", "(", "result", "[", "0", "]", ")", "w", "=", "max", "(", "4", ",", "(", "width", "-", "l", ")", "-", "1", ")", "# Build ASCII", "if", "min", "!=", "1000", ":", "fret", "=", "str", "(", "f", ")", "for", "i", "in", "range", "(", "len", "(", "result", ")", ")", ":", "d", "=", "len", "(", "fret", ")", "if", "i", "!=", "s", ":", "result", "[", "i", "]", "+=", "'-'", "*", "w", "+", "'|'", "else", ":", "d", "=", "w", "-", "len", "(", "fret", ")", "result", "[", "i", "]", "+=", "'-'", "*", "(", "d", "/", "2", ")", "+", "fret", "d", "=", "(", "w", "-", "d", "/", "2", ")", "-", "len", "(", "fret", ")", "result", "[", "i", "]", "+=", "'-'", "*", "d", "+", "'|'", "else", ":", "raise", "RangeError", "(", "\"No fret found that could play note '%s'. \"", "\"Note out of range.\"", "%", "note", ")", "result", ".", "reverse", "(", ")", "return", "os", ".", "linesep", ".", "join", "(", "result", ")" ]
Return a string made out of ASCII tablature representing a Note object or note string. Throw a RangeError if a suitable fret can't be found. 'tuning' should be a StringTuning object or None for the default tuning. To force a certain fingering you can use a 'string' and 'fret' attribute on the Note. If the fingering is valid, it will get used instead of the default one.
[ "Return", "a", "string", "made", "out", "of", "ASCII", "tablature", "representing", "a", "Note", "object", "or", "note", "string", "." ]
train
https://github.com/bspaans/python-mingus/blob/aa5a5d992d45ada61be0f9f86261380731bd7749/mingus/extra/tablature.py#L90-L140
bspaans/python-mingus
mingus/extra/tablature.py
from_NoteContainer
def from_NoteContainer(notes, width=80, tuning=None): """Return a string made out of ASCII tablature representing a NoteContainer object or list of note strings / Note objects. Throw a FingerError if no playable fingering can be found. 'tuning' should be a StringTuning object or None for the default tuning. To force a certain fingering you can use a 'string' and 'fret' attribute on one or more of the Notes. If the fingering is valid, it will get used instead of the default one. """ if tuning is None: tuning = default_tuning result = begin_track(tuning) l = len(result[0]) w = max(4, (width - l) - 1) fingerings = tuning.find_fingering(notes) if fingerings != []: # Do an attribute check f = [] attr = [] for note in notes: if hasattr(note, 'string') and hasattr(note, 'fret'): n = tuning.get_Note(note.string, note.fret) if n is not None and int(n) == int(note): f += (note.string, note.fret) attr.append(int(note)) # See if there are any possible fingerings with the attributes # that are set. fres = [] if f != []: for x in fingerings: found = True for pos in f: if pos not in x: found = False if found: fres.append(x) # Use best fingering. if fres != []: f = fres[0] else: # Use default fingering if attributes don't make sense f = fingerings[0] # Build {string: fret} result res = {} for (string, fret) in f: res[string] = str(fret) maxfret = max(res.values()) # Produce ASCII for i in range(len(result)): if i not in res.keys(): result[i] += '-' * w + '|' else: d = w - len(res[i]) result[i] += '-' * (d / 2) + res[i] d = (w - d / 2) - len(res[i]) result[i] += '-' * d + '|' else: raise FingerError('No playable fingering found for: %s' % notes) result.reverse() return os.linesep.join(result)
python
def from_NoteContainer(notes, width=80, tuning=None): if tuning is None: tuning = default_tuning result = begin_track(tuning) l = len(result[0]) w = max(4, (width - l) - 1) fingerings = tuning.find_fingering(notes) if fingerings != []: f = [] attr = [] for note in notes: if hasattr(note, 'string') and hasattr(note, 'fret'): n = tuning.get_Note(note.string, note.fret) if n is not None and int(n) == int(note): f += (note.string, note.fret) attr.append(int(note)) fres = [] if f != []: for x in fingerings: found = True for pos in f: if pos not in x: found = False if found: fres.append(x) if fres != []: f = fres[0] else: f = fingerings[0] res = {} for (string, fret) in f: res[string] = str(fret) maxfret = max(res.values()) for i in range(len(result)): if i not in res.keys(): result[i] += '-' * w + '|' else: d = w - len(res[i]) result[i] += '-' * (d / 2) + res[i] d = (w - d / 2) - len(res[i]) result[i] += '-' * d + '|' else: raise FingerError('No playable fingering found for: %s' % notes) result.reverse() return os.linesep.join(result)
[ "def", "from_NoteContainer", "(", "notes", ",", "width", "=", "80", ",", "tuning", "=", "None", ")", ":", "if", "tuning", "is", "None", ":", "tuning", "=", "default_tuning", "result", "=", "begin_track", "(", "tuning", ")", "l", "=", "len", "(", "result", "[", "0", "]", ")", "w", "=", "max", "(", "4", ",", "(", "width", "-", "l", ")", "-", "1", ")", "fingerings", "=", "tuning", ".", "find_fingering", "(", "notes", ")", "if", "fingerings", "!=", "[", "]", ":", "# Do an attribute check", "f", "=", "[", "]", "attr", "=", "[", "]", "for", "note", "in", "notes", ":", "if", "hasattr", "(", "note", ",", "'string'", ")", "and", "hasattr", "(", "note", ",", "'fret'", ")", ":", "n", "=", "tuning", ".", "get_Note", "(", "note", ".", "string", ",", "note", ".", "fret", ")", "if", "n", "is", "not", "None", "and", "int", "(", "n", ")", "==", "int", "(", "note", ")", ":", "f", "+=", "(", "note", ".", "string", ",", "note", ".", "fret", ")", "attr", ".", "append", "(", "int", "(", "note", ")", ")", "# See if there are any possible fingerings with the attributes", "# that are set.", "fres", "=", "[", "]", "if", "f", "!=", "[", "]", ":", "for", "x", "in", "fingerings", ":", "found", "=", "True", "for", "pos", "in", "f", ":", "if", "pos", "not", "in", "x", ":", "found", "=", "False", "if", "found", ":", "fres", ".", "append", "(", "x", ")", "# Use best fingering.", "if", "fres", "!=", "[", "]", ":", "f", "=", "fres", "[", "0", "]", "else", ":", "# Use default fingering if attributes don't make sense", "f", "=", "fingerings", "[", "0", "]", "# Build {string: fret} result", "res", "=", "{", "}", "for", "(", "string", ",", "fret", ")", "in", "f", ":", "res", "[", "string", "]", "=", "str", "(", "fret", ")", "maxfret", "=", "max", "(", "res", ".", "values", "(", ")", ")", "# Produce ASCII", "for", "i", "in", "range", "(", "len", "(", "result", ")", ")", ":", "if", "i", "not", "in", "res", ".", "keys", "(", ")", ":", "result", "[", "i", "]", "+=", "'-'", "*", "w", "+", "'|'", "else", ":", "d", "=", "w", "-", "len", "(", "res", "[", "i", "]", ")", "result", "[", "i", "]", "+=", "'-'", "*", "(", "d", "/", "2", ")", "+", "res", "[", "i", "]", "d", "=", "(", "w", "-", "d", "/", "2", ")", "-", "len", "(", "res", "[", "i", "]", ")", "result", "[", "i", "]", "+=", "'-'", "*", "d", "+", "'|'", "else", ":", "raise", "FingerError", "(", "'No playable fingering found for: %s'", "%", "notes", ")", "result", ".", "reverse", "(", ")", "return", "os", ".", "linesep", ".", "join", "(", "result", ")" ]
Return a string made out of ASCII tablature representing a NoteContainer object or list of note strings / Note objects. Throw a FingerError if no playable fingering can be found. 'tuning' should be a StringTuning object or None for the default tuning. To force a certain fingering you can use a 'string' and 'fret' attribute on one or more of the Notes. If the fingering is valid, it will get used instead of the default one.
[ "Return", "a", "string", "made", "out", "of", "ASCII", "tablature", "representing", "a", "NoteContainer", "object", "or", "list", "of", "note", "strings", "/", "Note", "objects", "." ]
train
https://github.com/bspaans/python-mingus/blob/aa5a5d992d45ada61be0f9f86261380731bd7749/mingus/extra/tablature.py#L142-L208
bspaans/python-mingus
mingus/extra/tablature.py
from_Bar
def from_Bar(bar, width=40, tuning=None, collapse=True): """Convert a mingus.containers.Bar object to ASCII tablature. Throw a FingerError if no playable fingering can be found. 'tuning' should be a StringTuning object or None for the default tuning. If 'collapse' is False this will return a list of lines, if it's True all lines will be concatenated with a newline symbol. Use 'string' and 'fret' attributes on Notes to force certain fingerings. """ if tuning is None: tuning = default_tuning # Size of a quarter note qsize = _get_qsize(tuning, width) result = begin_track(tuning, max(2, qsize / 2)) # Add bar for entry in bar.bar: (beat, duration, notes) = entry fingering = tuning.find_fingering(notes) if fingering != [] or notes is None: # Do an attribute check f = [] attr = [] if notes is not None: for note in notes: if hasattr(note, 'string') and hasattr(note, 'fret'): n = tuning.get_Note(note.string, note.fret) if n is not None and int(n) == int(note): f.append((note.string, note.fret)) attr.append(int(note)) # See if there are any possible fingerings with the attributes that # are set. fres = [] if f != []: for x in fingering: found = True for pos in f: if pos not in x: found = False if found: fres.append(x) # Use best fingering. maxlen = 0 if fres != []: f = fres[0] else: # Use default fingering if attributes don't make sense if notes is None: f = [] maxlen = 1 else: f = fingering[0] # Make {string: fret} dictionary and find highest fret d = {} for (string, fret) in f: d[string] = str(fret) if len(str(fret)) > maxlen: maxlen = len(str(fret)) # Add to result for i in range(len(result)): dur = int(((1.0 / duration) * qsize) * 4) - maxlen if i not in d.keys(): result[i] += '-' * maxlen + '-' * dur else: result[i] += ('%' + str(maxlen) + 's') % d[i] + '-' * dur else: raise FingerError('No playable fingering found for: %s' % notes) # Padding at the end l = len(result[i]) + 1 for i in range(len(result)): result[i] += (width - l) * '-' + '|' result.reverse() # Mark quarter notes pad = ' ' * int(((1.0 / bar.meter[1]) * qsize) * 4 - 1) r = ' ' * (result[0].find('||') + 2 + max(2, qsize / 2)) + ('*' + pad)\ * bar.meter[0] r += ' ' * (len(result[0]) - len(r)) if not collapse: return [r] + result else: return os.linesep.join([r] + result)
python
def from_Bar(bar, width=40, tuning=None, collapse=True): if tuning is None: tuning = default_tuning qsize = _get_qsize(tuning, width) result = begin_track(tuning, max(2, qsize / 2)) for entry in bar.bar: (beat, duration, notes) = entry fingering = tuning.find_fingering(notes) if fingering != [] or notes is None: f = [] attr = [] if notes is not None: for note in notes: if hasattr(note, 'string') and hasattr(note, 'fret'): n = tuning.get_Note(note.string, note.fret) if n is not None and int(n) == int(note): f.append((note.string, note.fret)) attr.append(int(note)) fres = [] if f != []: for x in fingering: found = True for pos in f: if pos not in x: found = False if found: fres.append(x) maxlen = 0 if fres != []: f = fres[0] else: if notes is None: f = [] maxlen = 1 else: f = fingering[0] d = {} for (string, fret) in f: d[string] = str(fret) if len(str(fret)) > maxlen: maxlen = len(str(fret)) for i in range(len(result)): dur = int(((1.0 / duration) * qsize) * 4) - maxlen if i not in d.keys(): result[i] += '-' * maxlen + '-' * dur else: result[i] += ('%' + str(maxlen) + 's') % d[i] + '-' * dur else: raise FingerError('No playable fingering found for: %s' % notes) l = len(result[i]) + 1 for i in range(len(result)): result[i] += (width - l) * '-' + '|' result.reverse() pad = ' ' * int(((1.0 / bar.meter[1]) * qsize) * 4 - 1) r = ' ' * (result[0].find('||') + 2 + max(2, qsize / 2)) + ('*' + pad)\ * bar.meter[0] r += ' ' * (len(result[0]) - len(r)) if not collapse: return [r] + result else: return os.linesep.join([r] + result)
[ "def", "from_Bar", "(", "bar", ",", "width", "=", "40", ",", "tuning", "=", "None", ",", "collapse", "=", "True", ")", ":", "if", "tuning", "is", "None", ":", "tuning", "=", "default_tuning", "# Size of a quarter note", "qsize", "=", "_get_qsize", "(", "tuning", ",", "width", ")", "result", "=", "begin_track", "(", "tuning", ",", "max", "(", "2", ",", "qsize", "/", "2", ")", ")", "# Add bar", "for", "entry", "in", "bar", ".", "bar", ":", "(", "beat", ",", "duration", ",", "notes", ")", "=", "entry", "fingering", "=", "tuning", ".", "find_fingering", "(", "notes", ")", "if", "fingering", "!=", "[", "]", "or", "notes", "is", "None", ":", "# Do an attribute check", "f", "=", "[", "]", "attr", "=", "[", "]", "if", "notes", "is", "not", "None", ":", "for", "note", "in", "notes", ":", "if", "hasattr", "(", "note", ",", "'string'", ")", "and", "hasattr", "(", "note", ",", "'fret'", ")", ":", "n", "=", "tuning", ".", "get_Note", "(", "note", ".", "string", ",", "note", ".", "fret", ")", "if", "n", "is", "not", "None", "and", "int", "(", "n", ")", "==", "int", "(", "note", ")", ":", "f", ".", "append", "(", "(", "note", ".", "string", ",", "note", ".", "fret", ")", ")", "attr", ".", "append", "(", "int", "(", "note", ")", ")", "# See if there are any possible fingerings with the attributes that", "# are set.", "fres", "=", "[", "]", "if", "f", "!=", "[", "]", ":", "for", "x", "in", "fingering", ":", "found", "=", "True", "for", "pos", "in", "f", ":", "if", "pos", "not", "in", "x", ":", "found", "=", "False", "if", "found", ":", "fres", ".", "append", "(", "x", ")", "# Use best fingering.", "maxlen", "=", "0", "if", "fres", "!=", "[", "]", ":", "f", "=", "fres", "[", "0", "]", "else", ":", "# Use default fingering if attributes don't make sense", "if", "notes", "is", "None", ":", "f", "=", "[", "]", "maxlen", "=", "1", "else", ":", "f", "=", "fingering", "[", "0", "]", "# Make {string: fret} dictionary and find highest fret", "d", "=", "{", "}", "for", "(", "string", ",", "fret", ")", "in", "f", ":", "d", "[", "string", "]", "=", "str", "(", "fret", ")", "if", "len", "(", "str", "(", "fret", ")", ")", ">", "maxlen", ":", "maxlen", "=", "len", "(", "str", "(", "fret", ")", ")", "# Add to result", "for", "i", "in", "range", "(", "len", "(", "result", ")", ")", ":", "dur", "=", "int", "(", "(", "(", "1.0", "/", "duration", ")", "*", "qsize", ")", "*", "4", ")", "-", "maxlen", "if", "i", "not", "in", "d", ".", "keys", "(", ")", ":", "result", "[", "i", "]", "+=", "'-'", "*", "maxlen", "+", "'-'", "*", "dur", "else", ":", "result", "[", "i", "]", "+=", "(", "'%'", "+", "str", "(", "maxlen", ")", "+", "'s'", ")", "%", "d", "[", "i", "]", "+", "'-'", "*", "dur", "else", ":", "raise", "FingerError", "(", "'No playable fingering found for: %s'", "%", "notes", ")", "# Padding at the end", "l", "=", "len", "(", "result", "[", "i", "]", ")", "+", "1", "for", "i", "in", "range", "(", "len", "(", "result", ")", ")", ":", "result", "[", "i", "]", "+=", "(", "width", "-", "l", ")", "*", "'-'", "+", "'|'", "result", ".", "reverse", "(", ")", "# Mark quarter notes", "pad", "=", "' '", "*", "int", "(", "(", "(", "1.0", "/", "bar", ".", "meter", "[", "1", "]", ")", "*", "qsize", ")", "*", "4", "-", "1", ")", "r", "=", "' '", "*", "(", "result", "[", "0", "]", ".", "find", "(", "'||'", ")", "+", "2", "+", "max", "(", "2", ",", "qsize", "/", "2", ")", ")", "+", "(", "'*'", "+", "pad", ")", "*", "bar", ".", "meter", "[", "0", "]", "r", "+=", "' '", "*", "(", "len", "(", "result", "[", "0", "]", ")", "-", "len", "(", "r", ")", ")", "if", "not", "collapse", ":", "return", "[", "r", "]", "+", "result", "else", ":", "return", "os", ".", "linesep", ".", "join", "(", "[", "r", "]", "+", "result", ")" ]
Convert a mingus.containers.Bar object to ASCII tablature. Throw a FingerError if no playable fingering can be found. 'tuning' should be a StringTuning object or None for the default tuning. If 'collapse' is False this will return a list of lines, if it's True all lines will be concatenated with a newline symbol. Use 'string' and 'fret' attributes on Notes to force certain fingerings.
[ "Convert", "a", "mingus", ".", "containers", ".", "Bar", "object", "to", "ASCII", "tablature", "." ]
train
https://github.com/bspaans/python-mingus/blob/aa5a5d992d45ada61be0f9f86261380731bd7749/mingus/extra/tablature.py#L210-L300
bspaans/python-mingus
mingus/extra/tablature.py
from_Track
def from_Track(track, maxwidth=80, tuning=None): """Convert a mingus.containers.Track object to an ASCII tablature string. 'tuning' should be set to a StringTuning object or to None to use the Track's tuning (or alternatively the default if the Track hasn't got its own tuning). 'string' and 'fret' attributes on Notes are taken into account. """ result = [] width = _get_width(maxwidth) if not tuning: tuning = track.get_tuning() lastlen = 0 for bar in track: r = from_Bar(bar, width, tuning, collapse=False) barstart = r[1].find('||') + 2 if (len(r[0]) + lastlen) - barstart < maxwidth and result != []: for i in range(1, len(r) + 1): item = r[len(r) - i] result[-i] += item[barstart:] else: result += ['', ''] + r lastlen = len(result[-1]) return os.linesep.join(result)
python
def from_Track(track, maxwidth=80, tuning=None): result = [] width = _get_width(maxwidth) if not tuning: tuning = track.get_tuning() lastlen = 0 for bar in track: r = from_Bar(bar, width, tuning, collapse=False) barstart = r[1].find('||') + 2 if (len(r[0]) + lastlen) - barstart < maxwidth and result != []: for i in range(1, len(r) + 1): item = r[len(r) - i] result[-i] += item[barstart:] else: result += ['', ''] + r lastlen = len(result[-1]) return os.linesep.join(result)
[ "def", "from_Track", "(", "track", ",", "maxwidth", "=", "80", ",", "tuning", "=", "None", ")", ":", "result", "=", "[", "]", "width", "=", "_get_width", "(", "maxwidth", ")", "if", "not", "tuning", ":", "tuning", "=", "track", ".", "get_tuning", "(", ")", "lastlen", "=", "0", "for", "bar", "in", "track", ":", "r", "=", "from_Bar", "(", "bar", ",", "width", ",", "tuning", ",", "collapse", "=", "False", ")", "barstart", "=", "r", "[", "1", "]", ".", "find", "(", "'||'", ")", "+", "2", "if", "(", "len", "(", "r", "[", "0", "]", ")", "+", "lastlen", ")", "-", "barstart", "<", "maxwidth", "and", "result", "!=", "[", "]", ":", "for", "i", "in", "range", "(", "1", ",", "len", "(", "r", ")", "+", "1", ")", ":", "item", "=", "r", "[", "len", "(", "r", ")", "-", "i", "]", "result", "[", "-", "i", "]", "+=", "item", "[", "barstart", ":", "]", "else", ":", "result", "+=", "[", "''", ",", "''", "]", "+", "r", "lastlen", "=", "len", "(", "result", "[", "-", "1", "]", ")", "return", "os", ".", "linesep", ".", "join", "(", "result", ")" ]
Convert a mingus.containers.Track object to an ASCII tablature string. 'tuning' should be set to a StringTuning object or to None to use the Track's tuning (or alternatively the default if the Track hasn't got its own tuning). 'string' and 'fret' attributes on Notes are taken into account.
[ "Convert", "a", "mingus", ".", "containers", ".", "Track", "object", "to", "an", "ASCII", "tablature", "string", "." ]
train
https://github.com/bspaans/python-mingus/blob/aa5a5d992d45ada61be0f9f86261380731bd7749/mingus/extra/tablature.py#L302-L326
bspaans/python-mingus
mingus/extra/tablature.py
from_Composition
def from_Composition(composition, width=80): """Convert a mingus.containers.Composition to an ASCII tablature string. Automatically add an header based on the title, subtitle, author, e-mail and description attributes. An extra description of the piece can also be given. Tunings can be set by using the Track.instrument.tuning or Track.tuning attribute. """ # Collect tunings instr_tunings = [] for track in composition: tun = track.get_tuning() if tun: instr_tunings.append(tun) else: instr_tunings.append(default_tuning) result = add_headers( width, composition.title, composition.subtitle, composition.author, composition.email, composition.description, instr_tunings, ) # Some variables w = _get_width(width) barindex = 0 bars = width / w lastlen = 0 maxlen = max([len(x) for x in composition.tracks]) while barindex < maxlen: notfirst = False for tracks in composition: tuning = tracks.get_tuning() ascii = [] for x in xrange(bars): if barindex + x < len(tracks): bar = tracks[barindex + x] r = from_Bar(bar, w, tuning, collapse=False) barstart = r[1].find('||') + 2 # Add extra '||' to quarter note marks to connect tracks. if notfirst: r[0] = (r[0])[:barstart - 2] + '||' + (r[0])[barstart:] # Add bar to ascii if ascii != []: for i in range(1, len(r) + 1): item = r[len(r) - i] ascii[-i] += item[barstart:] else: ascii += r # Add extra '||' to connect tracks if notfirst and ascii != []: pad = ascii[-1].find('||') result += [' ' * pad + '||', ' ' * pad + '||'] else: notfirst = True # Finally, add ascii to result result += ascii result += ['', '', ''] barindex += bars return os.linesep.join(result)
python
def from_Composition(composition, width=80): instr_tunings = [] for track in composition: tun = track.get_tuning() if tun: instr_tunings.append(tun) else: instr_tunings.append(default_tuning) result = add_headers( width, composition.title, composition.subtitle, composition.author, composition.email, composition.description, instr_tunings, ) w = _get_width(width) barindex = 0 bars = width / w lastlen = 0 maxlen = max([len(x) for x in composition.tracks]) while barindex < maxlen: notfirst = False for tracks in composition: tuning = tracks.get_tuning() ascii = [] for x in xrange(bars): if barindex + x < len(tracks): bar = tracks[barindex + x] r = from_Bar(bar, w, tuning, collapse=False) barstart = r[1].find('||') + 2 if notfirst: r[0] = (r[0])[:barstart - 2] + '||' + (r[0])[barstart:] if ascii != []: for i in range(1, len(r) + 1): item = r[len(r) - i] ascii[-i] += item[barstart:] else: ascii += r if notfirst and ascii != []: pad = ascii[-1].find('||') result += [' ' * pad + '||', ' ' * pad + '||'] else: notfirst = True result += ascii result += ['', '', ''] barindex += bars return os.linesep.join(result)
[ "def", "from_Composition", "(", "composition", ",", "width", "=", "80", ")", ":", "# Collect tunings", "instr_tunings", "=", "[", "]", "for", "track", "in", "composition", ":", "tun", "=", "track", ".", "get_tuning", "(", ")", "if", "tun", ":", "instr_tunings", ".", "append", "(", "tun", ")", "else", ":", "instr_tunings", ".", "append", "(", "default_tuning", ")", "result", "=", "add_headers", "(", "width", ",", "composition", ".", "title", ",", "composition", ".", "subtitle", ",", "composition", ".", "author", ",", "composition", ".", "email", ",", "composition", ".", "description", ",", "instr_tunings", ",", ")", "# Some variables", "w", "=", "_get_width", "(", "width", ")", "barindex", "=", "0", "bars", "=", "width", "/", "w", "lastlen", "=", "0", "maxlen", "=", "max", "(", "[", "len", "(", "x", ")", "for", "x", "in", "composition", ".", "tracks", "]", ")", "while", "barindex", "<", "maxlen", ":", "notfirst", "=", "False", "for", "tracks", "in", "composition", ":", "tuning", "=", "tracks", ".", "get_tuning", "(", ")", "ascii", "=", "[", "]", "for", "x", "in", "xrange", "(", "bars", ")", ":", "if", "barindex", "+", "x", "<", "len", "(", "tracks", ")", ":", "bar", "=", "tracks", "[", "barindex", "+", "x", "]", "r", "=", "from_Bar", "(", "bar", ",", "w", ",", "tuning", ",", "collapse", "=", "False", ")", "barstart", "=", "r", "[", "1", "]", ".", "find", "(", "'||'", ")", "+", "2", "# Add extra '||' to quarter note marks to connect tracks.", "if", "notfirst", ":", "r", "[", "0", "]", "=", "(", "r", "[", "0", "]", ")", "[", ":", "barstart", "-", "2", "]", "+", "'||'", "+", "(", "r", "[", "0", "]", ")", "[", "barstart", ":", "]", "# Add bar to ascii", "if", "ascii", "!=", "[", "]", ":", "for", "i", "in", "range", "(", "1", ",", "len", "(", "r", ")", "+", "1", ")", ":", "item", "=", "r", "[", "len", "(", "r", ")", "-", "i", "]", "ascii", "[", "-", "i", "]", "+=", "item", "[", "barstart", ":", "]", "else", ":", "ascii", "+=", "r", "# Add extra '||' to connect tracks", "if", "notfirst", "and", "ascii", "!=", "[", "]", ":", "pad", "=", "ascii", "[", "-", "1", "]", ".", "find", "(", "'||'", ")", "result", "+=", "[", "' '", "*", "pad", "+", "'||'", ",", "' '", "*", "pad", "+", "'||'", "]", "else", ":", "notfirst", "=", "True", "# Finally, add ascii to result", "result", "+=", "ascii", "result", "+=", "[", "''", ",", "''", ",", "''", "]", "barindex", "+=", "bars", "return", "os", ".", "linesep", ".", "join", "(", "result", ")" ]
Convert a mingus.containers.Composition to an ASCII tablature string. Automatically add an header based on the title, subtitle, author, e-mail and description attributes. An extra description of the piece can also be given. Tunings can be set by using the Track.instrument.tuning or Track.tuning attribute.
[ "Convert", "a", "mingus", ".", "containers", ".", "Composition", "to", "an", "ASCII", "tablature", "string", "." ]
train
https://github.com/bspaans/python-mingus/blob/aa5a5d992d45ada61be0f9f86261380731bd7749/mingus/extra/tablature.py#L328-L397
bspaans/python-mingus
mingus/extra/tablature.py
from_Suite
def from_Suite(suite, maxwidth=80): """Convert a mingus.containers.Suite to an ASCII tablature string, complete with headers. This function makes use of the Suite's title, subtitle, author, email and description attributes. """ subtitle = str(len(suite.compositions)) + ' Compositions' if suite.subtitle\ == '' else suite.subtitle result = os.linesep.join(add_headers( maxwidth, suite.title, subtitle, suite.author, suite.email, suite.description, )) hr = maxwidth * '=' n = os.linesep result = n + hr + n + result + n + hr + n + n for comp in suite: c = from_Composition(comp, maxwidth) result += c + n + hr + n + n return result
python
def from_Suite(suite, maxwidth=80): subtitle = str(len(suite.compositions)) + ' Compositions' if suite.subtitle\ == '' else suite.subtitle result = os.linesep.join(add_headers( maxwidth, suite.title, subtitle, suite.author, suite.email, suite.description, )) hr = maxwidth * '=' n = os.linesep result = n + hr + n + result + n + hr + n + n for comp in suite: c = from_Composition(comp, maxwidth) result += c + n + hr + n + n return result
[ "def", "from_Suite", "(", "suite", ",", "maxwidth", "=", "80", ")", ":", "subtitle", "=", "str", "(", "len", "(", "suite", ".", "compositions", ")", ")", "+", "' Compositions'", "if", "suite", ".", "subtitle", "==", "''", "else", "suite", ".", "subtitle", "result", "=", "os", ".", "linesep", ".", "join", "(", "add_headers", "(", "maxwidth", ",", "suite", ".", "title", ",", "subtitle", ",", "suite", ".", "author", ",", "suite", ".", "email", ",", "suite", ".", "description", ",", ")", ")", "hr", "=", "maxwidth", "*", "'='", "n", "=", "os", ".", "linesep", "result", "=", "n", "+", "hr", "+", "n", "+", "result", "+", "n", "+", "hr", "+", "n", "+", "n", "for", "comp", "in", "suite", ":", "c", "=", "from_Composition", "(", "comp", ",", "maxwidth", ")", "result", "+=", "c", "+", "n", "+", "hr", "+", "n", "+", "n", "return", "result" ]
Convert a mingus.containers.Suite to an ASCII tablature string, complete with headers. This function makes use of the Suite's title, subtitle, author, email and description attributes.
[ "Convert", "a", "mingus", ".", "containers", ".", "Suite", "to", "an", "ASCII", "tablature", "string", "complete", "with", "headers", "." ]
train
https://github.com/bspaans/python-mingus/blob/aa5a5d992d45ada61be0f9f86261380731bd7749/mingus/extra/tablature.py#L399-L422
bspaans/python-mingus
mingus/extra/tablature.py
_get_qsize
def _get_qsize(tuning, width): """Return a reasonable quarter note size for 'tuning' and 'width'.""" names = [x.to_shorthand() for x in tuning.tuning] basesize = len(max(names)) + 3 barsize = ((width - basesize) - 2) - 1 # x * 4 + 0.5x - barsize = 0 4.5x = barsize x = barsize / 4.5 return max(0, int(barsize / 4.5))
python
def _get_qsize(tuning, width): names = [x.to_shorthand() for x in tuning.tuning] basesize = len(max(names)) + 3 barsize = ((width - basesize) - 2) - 1 return max(0, int(barsize / 4.5))
[ "def", "_get_qsize", "(", "tuning", ",", "width", ")", ":", "names", "=", "[", "x", ".", "to_shorthand", "(", ")", "for", "x", "in", "tuning", ".", "tuning", "]", "basesize", "=", "len", "(", "max", "(", "names", ")", ")", "+", "3", "barsize", "=", "(", "(", "width", "-", "basesize", ")", "-", "2", ")", "-", "1", "# x * 4 + 0.5x - barsize = 0 4.5x = barsize x = barsize / 4.5", "return", "max", "(", "0", ",", "int", "(", "barsize", "/", "4.5", ")", ")" ]
Return a reasonable quarter note size for 'tuning' and 'width'.
[ "Return", "a", "reasonable", "quarter", "note", "size", "for", "tuning", "and", "width", "." ]
train
https://github.com/bspaans/python-mingus/blob/aa5a5d992d45ada61be0f9f86261380731bd7749/mingus/extra/tablature.py#L424-L431
bspaans/python-mingus
mingus/extra/tablature.py
_get_width
def _get_width(maxwidth): """Return the width of a single bar, when width of the page is given.""" width = maxwidth / 3 if maxwidth <= 60: width = maxwidth elif 60 < maxwidth <= 120: width = maxwidth / 2 return width
python
def _get_width(maxwidth): width = maxwidth / 3 if maxwidth <= 60: width = maxwidth elif 60 < maxwidth <= 120: width = maxwidth / 2 return width
[ "def", "_get_width", "(", "maxwidth", ")", ":", "width", "=", "maxwidth", "/", "3", "if", "maxwidth", "<=", "60", ":", "width", "=", "maxwidth", "elif", "60", "<", "maxwidth", "<=", "120", ":", "width", "=", "maxwidth", "/", "2", "return", "width" ]
Return the width of a single bar, when width of the page is given.
[ "Return", "the", "width", "of", "a", "single", "bar", "when", "width", "of", "the", "page", "is", "given", "." ]
train
https://github.com/bspaans/python-mingus/blob/aa5a5d992d45ada61be0f9f86261380731bd7749/mingus/extra/tablature.py#L433-L440
bspaans/python-mingus
mingus/core/chords.py
triad
def triad(note, key): """Return the triad on note in key as a list. Examples: >>> triad('E', 'C') ['E', 'G', 'B'] >>> triad('E', 'B') ['E', 'G#', 'B'] """ return [note, intervals.third(note, key), intervals.fifth(note, key)]
python
def triad(note, key): return [note, intervals.third(note, key), intervals.fifth(note, key)]
[ "def", "triad", "(", "note", ",", "key", ")", ":", "return", "[", "note", ",", "intervals", ".", "third", "(", "note", ",", "key", ")", ",", "intervals", ".", "fifth", "(", "note", ",", "key", ")", "]" ]
Return the triad on note in key as a list. Examples: >>> triad('E', 'C') ['E', 'G', 'B'] >>> triad('E', 'B') ['E', 'G#', 'B']
[ "Return", "the", "triad", "on", "note", "in", "key", "as", "a", "list", "." ]
train
https://github.com/bspaans/python-mingus/blob/aa5a5d992d45ada61be0f9f86261380731bd7749/mingus/core/chords.py#L165-L174
bspaans/python-mingus
mingus/core/chords.py
triads
def triads(key): """Return all the triads in key. Implemented using a cache. """ if _triads_cache.has_key(key): return _triads_cache[key] res = map(lambda x: triad(x, key), keys.get_notes(key)) _triads_cache[key] = res return res
python
def triads(key): if _triads_cache.has_key(key): return _triads_cache[key] res = map(lambda x: triad(x, key), keys.get_notes(key)) _triads_cache[key] = res return res
[ "def", "triads", "(", "key", ")", ":", "if", "_triads_cache", ".", "has_key", "(", "key", ")", ":", "return", "_triads_cache", "[", "key", "]", "res", "=", "map", "(", "lambda", "x", ":", "triad", "(", "x", ",", "key", ")", ",", "keys", ".", "get_notes", "(", "key", ")", ")", "_triads_cache", "[", "key", "]", "=", "res", "return", "res" ]
Return all the triads in key. Implemented using a cache.
[ "Return", "all", "the", "triads", "in", "key", "." ]
train
https://github.com/bspaans/python-mingus/blob/aa5a5d992d45ada61be0f9f86261380731bd7749/mingus/core/chords.py#L176-L185
bspaans/python-mingus
mingus/core/chords.py
augmented_triad
def augmented_triad(note): """Build an augmented triad on note. Example: >>> augmented_triad('C') ['C', 'E', 'G#'] """ return [note, intervals.major_third(note), notes.augment(intervals.major_fifth(note))]
python
def augmented_triad(note): return [note, intervals.major_third(note), notes.augment(intervals.major_fifth(note))]
[ "def", "augmented_triad", "(", "note", ")", ":", "return", "[", "note", ",", "intervals", ".", "major_third", "(", "note", ")", ",", "notes", ".", "augment", "(", "intervals", ".", "major_fifth", "(", "note", ")", ")", "]" ]
Build an augmented triad on note. Example: >>> augmented_triad('C') ['C', 'E', 'G#']
[ "Build", "an", "augmented", "triad", "on", "note", "." ]
train
https://github.com/bspaans/python-mingus/blob/aa5a5d992d45ada61be0f9f86261380731bd7749/mingus/core/chords.py#L214-L222
bspaans/python-mingus
mingus/core/chords.py
seventh
def seventh(note, key): """Return the seventh chord on note in key. Example: >>> seventh('C', 'C') ['C', 'E', 'G', 'B'] """ return triad(note, key) + [intervals.seventh(note, key)]
python
def seventh(note, key): return triad(note, key) + [intervals.seventh(note, key)]
[ "def", "seventh", "(", "note", ",", "key", ")", ":", "return", "triad", "(", "note", ",", "key", ")", "+", "[", "intervals", ".", "seventh", "(", "note", ",", "key", ")", "]" ]
Return the seventh chord on note in key. Example: >>> seventh('C', 'C') ['C', 'E', 'G', 'B']
[ "Return", "the", "seventh", "chord", "on", "note", "in", "key", "." ]
train
https://github.com/bspaans/python-mingus/blob/aa5a5d992d45ada61be0f9f86261380731bd7749/mingus/core/chords.py#L224-L231
bspaans/python-mingus
mingus/core/chords.py
sevenths
def sevenths(key): """Return all the sevenths chords in key in a list.""" if _sevenths_cache.has_key(key): return _sevenths_cache[key] res = map(lambda x: seventh(x, key), keys.get_notes(key)) _sevenths_cache[key] = res return res
python
def sevenths(key): if _sevenths_cache.has_key(key): return _sevenths_cache[key] res = map(lambda x: seventh(x, key), keys.get_notes(key)) _sevenths_cache[key] = res return res
[ "def", "sevenths", "(", "key", ")", ":", "if", "_sevenths_cache", ".", "has_key", "(", "key", ")", ":", "return", "_sevenths_cache", "[", "key", "]", "res", "=", "map", "(", "lambda", "x", ":", "seventh", "(", "x", ",", "key", ")", ",", "keys", ".", "get_notes", "(", "key", ")", ")", "_sevenths_cache", "[", "key", "]", "=", "res", "return", "res" ]
Return all the sevenths chords in key in a list.
[ "Return", "all", "the", "sevenths", "chords", "in", "key", "in", "a", "list", "." ]
train
https://github.com/bspaans/python-mingus/blob/aa5a5d992d45ada61be0f9f86261380731bd7749/mingus/core/chords.py#L233-L239
bspaans/python-mingus
mingus/core/chords.py
dominant_flat_ninth
def dominant_flat_ninth(note): """Build a dominant flat ninth chord on note. Example: >>> dominant_ninth('C') ['C', 'E', 'G', 'Bb', 'Db'] """ res = dominant_ninth(note) res[4] = intervals.minor_second(note) return res
python
def dominant_flat_ninth(note): res = dominant_ninth(note) res[4] = intervals.minor_second(note) return res
[ "def", "dominant_flat_ninth", "(", "note", ")", ":", "res", "=", "dominant_ninth", "(", "note", ")", "res", "[", "4", "]", "=", "intervals", ".", "minor_second", "(", "note", ")", "return", "res" ]
Build a dominant flat ninth chord on note. Example: >>> dominant_ninth('C') ['C', 'E', 'G', 'Bb', 'Db']
[ "Build", "a", "dominant", "flat", "ninth", "chord", "on", "note", "." ]
train
https://github.com/bspaans/python-mingus/blob/aa5a5d992d45ada61be0f9f86261380731bd7749/mingus/core/chords.py#L368-L377
bspaans/python-mingus
mingus/core/chords.py
dominant_sharp_ninth
def dominant_sharp_ninth(note): """Build a dominant sharp ninth chord on note. Example: >>> dominant_ninth('C') ['C', 'E', 'G', 'Bb', 'D#'] """ res = dominant_ninth(note) res[4] = notes.augment(intervals.major_second(note)) return res
python
def dominant_sharp_ninth(note): res = dominant_ninth(note) res[4] = notes.augment(intervals.major_second(note)) return res
[ "def", "dominant_sharp_ninth", "(", "note", ")", ":", "res", "=", "dominant_ninth", "(", "note", ")", "res", "[", "4", "]", "=", "notes", ".", "augment", "(", "intervals", ".", "major_second", "(", "note", ")", ")", "return", "res" ]
Build a dominant sharp ninth chord on note. Example: >>> dominant_ninth('C') ['C', 'E', 'G', 'Bb', 'D#']
[ "Build", "a", "dominant", "sharp", "ninth", "chord", "on", "note", "." ]
train
https://github.com/bspaans/python-mingus/blob/aa5a5d992d45ada61be0f9f86261380731bd7749/mingus/core/chords.py#L379-L388
bspaans/python-mingus
mingus/core/chords.py
eleventh
def eleventh(note): """Build an eleventh chord on note. Example: >>> eleventh('C') ['C', 'G', 'Bb', 'F'] """ return [note, intervals.perfect_fifth(note), intervals.minor_seventh(note), intervals.perfect_fourth(note)]
python
def eleventh(note): return [note, intervals.perfect_fifth(note), intervals.minor_seventh(note), intervals.perfect_fourth(note)]
[ "def", "eleventh", "(", "note", ")", ":", "return", "[", "note", ",", "intervals", ".", "perfect_fifth", "(", "note", ")", ",", "intervals", ".", "minor_seventh", "(", "note", ")", ",", "intervals", ".", "perfect_fourth", "(", "note", ")", "]" ]
Build an eleventh chord on note. Example: >>> eleventh('C') ['C', 'G', 'Bb', 'F']
[ "Build", "an", "eleventh", "chord", "on", "note", "." ]
train
https://github.com/bspaans/python-mingus/blob/aa5a5d992d45ada61be0f9f86261380731bd7749/mingus/core/chords.py#L390-L398
bspaans/python-mingus
mingus/core/chords.py
dominant_flat_five
def dominant_flat_five(note): """Build a dominant flat five chord on note. Example: >>> dominant_flat_five('C') ['C', 'E', 'Gb', 'Bb'] """ res = dominant_seventh(note) res[2] = notes.diminish(res[2]) return res
python
def dominant_flat_five(note): res = dominant_seventh(note) res[2] = notes.diminish(res[2]) return res
[ "def", "dominant_flat_five", "(", "note", ")", ":", "res", "=", "dominant_seventh", "(", "note", ")", "res", "[", "2", "]", "=", "notes", ".", "diminish", "(", "res", "[", "2", "]", ")", "return", "res" ]
Build a dominant flat five chord on note. Example: >>> dominant_flat_five('C') ['C', 'E', 'Gb', 'Bb']
[ "Build", "a", "dominant", "flat", "five", "chord", "on", "note", "." ]
train
https://github.com/bspaans/python-mingus/blob/aa5a5d992d45ada61be0f9f86261380731bd7749/mingus/core/chords.py#L494-L503
bspaans/python-mingus
mingus/core/chords.py
from_shorthand
def from_shorthand(shorthand_string, slash=None): """Take a chord written in shorthand and return the notes in the chord. The function can recognize triads, sevenths, sixths, ninths, elevenths, thirteenths, slashed chords and a number of altered chords. The second argument should not be given and is only used for a recursive call when a slashed chord or polychord is found. See http://tinyurl.com/3hn6v8u for a nice overview of chord patterns. Examples: >>> from_shorthand('Amin') ['A', 'C', 'E'] >>> from_shorthand('Am/M7') ['A', 'C', 'E', 'G#'] >>> from_shorthand('A') ['A', 'C#', 'E'] >>> from_shorthand('A/G') ['G', 'A', 'C#', 'E'] >>> from_shorthand('Dm|G') ['G', 'B', 'D', 'F', 'A'] Recognised abbreviations: the letters "m" and "M" in the following abbreviations can always be substituted by respectively "min", "mi" or "-" and "maj" or "ma". Example: >>> from_shorthand('Amin7') == from_shorthand('Am7') True Triads: 'm', 'M' or '', 'dim' Sevenths: 'm7', 'M7', '7', 'm7b5', 'dim7', 'm/M7' or 'mM7' Augmented chords: 'aug' or '+', '7#5' or 'M7+5', 'M7+', 'm7+', '7+' Suspended chords: 'sus4', 'sus2', 'sus47' or '7sus4', 'sus', '11', 'sus4b9' or 'susb9' Sixths: '6', 'm6', 'M6', '6/7' or '67', '6/9' or '69' Ninths: '9' or 'add9', 'M9', 'm9', '7b9', '7#9' Elevenths: '11' or 'add11', '7#11', 'm11' Thirteenths: '13' or 'add13', 'M13', 'm13' Altered chords: '7b5', '7b9', '7#9', '67' or '6/7' Special: '5', 'NC', 'hendrix' """ # warning reduce?? if type(shorthand_string) == list: res = [] for x in shorthand_string: res.append(from_shorthand(x)) return res if shorthand_string in ['NC', 'N.C.']: return [] # Shrink shorthand_string to a format recognised by chord_shorthand shorthand_string = shorthand_string.replace('min', 'm') shorthand_string = shorthand_string.replace('mi', 'm') shorthand_string = shorthand_string.replace('-', 'm') shorthand_string = shorthand_string.replace('maj', 'M') shorthand_string = shorthand_string.replace('ma', 'M') # Get the note name if not notes.is_valid_note(shorthand_string[0]): raise NoteFormatError, "Unrecognised note '%s' in chord '%s'"\ % (shorthand_string[0], shorthand_string) name = shorthand_string[0] # Look for accidentals for n in shorthand_string[1:]: if n == '#': name += n elif n == 'b': name += n else: break # Look for slashes and polychords '|' slash_index = -1 s = 0 rest_of_string = shorthand_string[len(name):] for n in rest_of_string: if n == '/': slash_index = s elif n == '|': # Generate polychord return from_shorthand(shorthand_string[:len(name) + s], from_shorthand(shorthand_string[len(name) + s + 1:])) s += 1 # Generate slash chord if slash_index != -1 and rest_of_string not in ['m/M7', '6/9', '6/7']: res = shorthand_string[:len(name) + slash_index] return from_shorthand(shorthand_string[:len(name) + slash_index], shorthand_string[len(name) + slash_index + 1:]) shorthand_start = len(name) short_chord = shorthand_string[shorthand_start:] if chord_shorthand.has_key(short_chord): res = chord_shorthand[short_chord](name) if slash != None: # Add slashed chords if type(slash) == str: if notes.is_valid_note(slash): res = [slash] + res else: raise NoteFormatError, \ "Unrecognised note '%s' in slash chord'%s'" % (slash, slash + shorthand_string) elif type(slash) == list: # Add polychords r = slash for n in res: if n != r[-1]: r.append(n) return r return res else: raise FormatError, 'Unknown shorthand: %s' % shorthand_string
python
def from_shorthand(shorthand_string, slash=None): if type(shorthand_string) == list: res = [] for x in shorthand_string: res.append(from_shorthand(x)) return res if shorthand_string in ['NC', 'N.C.']: return [] shorthand_string = shorthand_string.replace('min', 'm') shorthand_string = shorthand_string.replace('mi', 'm') shorthand_string = shorthand_string.replace('-', 'm') shorthand_string = shorthand_string.replace('maj', 'M') shorthand_string = shorthand_string.replace('ma', 'M') if not notes.is_valid_note(shorthand_string[0]): raise NoteFormatError, "Unrecognised note '%s' in chord '%s'"\ % (shorthand_string[0], shorthand_string) name = shorthand_string[0] for n in shorthand_string[1:]: if n == ' name += n elif n == 'b': name += n else: break slash_index = -1 s = 0 rest_of_string = shorthand_string[len(name):] for n in rest_of_string: if n == '/': slash_index = s elif n == '|': return from_shorthand(shorthand_string[:len(name) + s], from_shorthand(shorthand_string[len(name) + s + 1:])) s += 1 if slash_index != -1 and rest_of_string not in ['m/M7', '6/9', '6/7']: res = shorthand_string[:len(name) + slash_index] return from_shorthand(shorthand_string[:len(name) + slash_index], shorthand_string[len(name) + slash_index + 1:]) shorthand_start = len(name) short_chord = shorthand_string[shorthand_start:] if chord_shorthand.has_key(short_chord): res = chord_shorthand[short_chord](name) if slash != None: if type(slash) == str: if notes.is_valid_note(slash): res = [slash] + res else: raise NoteFormatError, \ "Unrecognised note '%s' in slash chord'%s'" % (slash, slash + shorthand_string) elif type(slash) == list: r = slash for n in res: if n != r[-1]: r.append(n) return r return res else: raise FormatError, 'Unknown shorthand: %s' % shorthand_string
[ "def", "from_shorthand", "(", "shorthand_string", ",", "slash", "=", "None", ")", ":", "# warning reduce??", "if", "type", "(", "shorthand_string", ")", "==", "list", ":", "res", "=", "[", "]", "for", "x", "in", "shorthand_string", ":", "res", ".", "append", "(", "from_shorthand", "(", "x", ")", ")", "return", "res", "if", "shorthand_string", "in", "[", "'NC'", ",", "'N.C.'", "]", ":", "return", "[", "]", "# Shrink shorthand_string to a format recognised by chord_shorthand", "shorthand_string", "=", "shorthand_string", ".", "replace", "(", "'min'", ",", "'m'", ")", "shorthand_string", "=", "shorthand_string", ".", "replace", "(", "'mi'", ",", "'m'", ")", "shorthand_string", "=", "shorthand_string", ".", "replace", "(", "'-'", ",", "'m'", ")", "shorthand_string", "=", "shorthand_string", ".", "replace", "(", "'maj'", ",", "'M'", ")", "shorthand_string", "=", "shorthand_string", ".", "replace", "(", "'ma'", ",", "'M'", ")", "# Get the note name", "if", "not", "notes", ".", "is_valid_note", "(", "shorthand_string", "[", "0", "]", ")", ":", "raise", "NoteFormatError", ",", "\"Unrecognised note '%s' in chord '%s'\"", "%", "(", "shorthand_string", "[", "0", "]", ",", "shorthand_string", ")", "name", "=", "shorthand_string", "[", "0", "]", "# Look for accidentals", "for", "n", "in", "shorthand_string", "[", "1", ":", "]", ":", "if", "n", "==", "'#'", ":", "name", "+=", "n", "elif", "n", "==", "'b'", ":", "name", "+=", "n", "else", ":", "break", "# Look for slashes and polychords '|'", "slash_index", "=", "-", "1", "s", "=", "0", "rest_of_string", "=", "shorthand_string", "[", "len", "(", "name", ")", ":", "]", "for", "n", "in", "rest_of_string", ":", "if", "n", "==", "'/'", ":", "slash_index", "=", "s", "elif", "n", "==", "'|'", ":", "# Generate polychord", "return", "from_shorthand", "(", "shorthand_string", "[", ":", "len", "(", "name", ")", "+", "s", "]", ",", "from_shorthand", "(", "shorthand_string", "[", "len", "(", "name", ")", "+", "s", "+", "1", ":", "]", ")", ")", "s", "+=", "1", "# Generate slash chord", "if", "slash_index", "!=", "-", "1", "and", "rest_of_string", "not", "in", "[", "'m/M7'", ",", "'6/9'", ",", "'6/7'", "]", ":", "res", "=", "shorthand_string", "[", ":", "len", "(", "name", ")", "+", "slash_index", "]", "return", "from_shorthand", "(", "shorthand_string", "[", ":", "len", "(", "name", ")", "+", "slash_index", "]", ",", "shorthand_string", "[", "len", "(", "name", ")", "+", "slash_index", "+", "1", ":", "]", ")", "shorthand_start", "=", "len", "(", "name", ")", "short_chord", "=", "shorthand_string", "[", "shorthand_start", ":", "]", "if", "chord_shorthand", ".", "has_key", "(", "short_chord", ")", ":", "res", "=", "chord_shorthand", "[", "short_chord", "]", "(", "name", ")", "if", "slash", "!=", "None", ":", "# Add slashed chords", "if", "type", "(", "slash", ")", "==", "str", ":", "if", "notes", ".", "is_valid_note", "(", "slash", ")", ":", "res", "=", "[", "slash", "]", "+", "res", "else", ":", "raise", "NoteFormatError", ",", "\"Unrecognised note '%s' in slash chord'%s'\"", "%", "(", "slash", ",", "slash", "+", "shorthand_string", ")", "elif", "type", "(", "slash", ")", "==", "list", ":", "# Add polychords", "r", "=", "slash", "for", "n", "in", "res", ":", "if", "n", "!=", "r", "[", "-", "1", "]", ":", "r", ".", "append", "(", "n", ")", "return", "r", "return", "res", "else", ":", "raise", "FormatError", ",", "'Unknown shorthand: %s'", "%", "shorthand_string" ]
Take a chord written in shorthand and return the notes in the chord. The function can recognize triads, sevenths, sixths, ninths, elevenths, thirteenths, slashed chords and a number of altered chords. The second argument should not be given and is only used for a recursive call when a slashed chord or polychord is found. See http://tinyurl.com/3hn6v8u for a nice overview of chord patterns. Examples: >>> from_shorthand('Amin') ['A', 'C', 'E'] >>> from_shorthand('Am/M7') ['A', 'C', 'E', 'G#'] >>> from_shorthand('A') ['A', 'C#', 'E'] >>> from_shorthand('A/G') ['G', 'A', 'C#', 'E'] >>> from_shorthand('Dm|G') ['G', 'B', 'D', 'F', 'A'] Recognised abbreviations: the letters "m" and "M" in the following abbreviations can always be substituted by respectively "min", "mi" or "-" and "maj" or "ma". Example: >>> from_shorthand('Amin7') == from_shorthand('Am7') True Triads: 'm', 'M' or '', 'dim' Sevenths: 'm7', 'M7', '7', 'm7b5', 'dim7', 'm/M7' or 'mM7' Augmented chords: 'aug' or '+', '7#5' or 'M7+5', 'M7+', 'm7+', '7+' Suspended chords: 'sus4', 'sus2', 'sus47' or '7sus4', 'sus', '11', 'sus4b9' or 'susb9' Sixths: '6', 'm6', 'M6', '6/7' or '67', '6/9' or '69' Ninths: '9' or 'add9', 'M9', 'm9', '7b9', '7#9' Elevenths: '11' or 'add11', '7#11', 'm11' Thirteenths: '13' or 'add13', 'M13', 'm13' Altered chords: '7b5', '7b9', '7#9', '67' or '6/7' Special: '5', 'NC', 'hendrix'
[ "Take", "a", "chord", "written", "in", "shorthand", "and", "return", "the", "notes", "in", "the", "chord", "." ]
train
https://github.com/bspaans/python-mingus/blob/aa5a5d992d45ada61be0f9f86261380731bd7749/mingus/core/chords.py#L699-L823
bspaans/python-mingus
mingus/core/chords.py
determine
def determine(chord, shorthand=False, no_inversions=False, no_polychords=False): """Name a chord. This function can determine almost every chord, from a simple triad to a fourteen note polychord.""" if chord == []: return [] elif len(chord) == 1: return chord elif len(chord) == 2: return [intervals.determine(chord[0], chord[1])] elif len(chord) == 3: return determine_triad(chord, shorthand, no_inversions, no_polychords) elif len(chord) == 4: return determine_seventh(chord, shorthand, no_inversions, no_polychords) elif len(chord) == 5: return determine_extended_chord5(chord, shorthand, no_inversions, no_polychords) elif len(chord) == 6: return determine_extended_chord6(chord, shorthand, no_inversions, no_polychords) elif len(chord) == 7: return determine_extended_chord7(chord, shorthand, no_inversions, no_polychords) else: return determine_polychords(chord, shorthand)
python
def determine(chord, shorthand=False, no_inversions=False, no_polychords=False): if chord == []: return [] elif len(chord) == 1: return chord elif len(chord) == 2: return [intervals.determine(chord[0], chord[1])] elif len(chord) == 3: return determine_triad(chord, shorthand, no_inversions, no_polychords) elif len(chord) == 4: return determine_seventh(chord, shorthand, no_inversions, no_polychords) elif len(chord) == 5: return determine_extended_chord5(chord, shorthand, no_inversions, no_polychords) elif len(chord) == 6: return determine_extended_chord6(chord, shorthand, no_inversions, no_polychords) elif len(chord) == 7: return determine_extended_chord7(chord, shorthand, no_inversions, no_polychords) else: return determine_polychords(chord, shorthand)
[ "def", "determine", "(", "chord", ",", "shorthand", "=", "False", ",", "no_inversions", "=", "False", ",", "no_polychords", "=", "False", ")", ":", "if", "chord", "==", "[", "]", ":", "return", "[", "]", "elif", "len", "(", "chord", ")", "==", "1", ":", "return", "chord", "elif", "len", "(", "chord", ")", "==", "2", ":", "return", "[", "intervals", ".", "determine", "(", "chord", "[", "0", "]", ",", "chord", "[", "1", "]", ")", "]", "elif", "len", "(", "chord", ")", "==", "3", ":", "return", "determine_triad", "(", "chord", ",", "shorthand", ",", "no_inversions", ",", "no_polychords", ")", "elif", "len", "(", "chord", ")", "==", "4", ":", "return", "determine_seventh", "(", "chord", ",", "shorthand", ",", "no_inversions", ",", "no_polychords", ")", "elif", "len", "(", "chord", ")", "==", "5", ":", "return", "determine_extended_chord5", "(", "chord", ",", "shorthand", ",", "no_inversions", ",", "no_polychords", ")", "elif", "len", "(", "chord", ")", "==", "6", ":", "return", "determine_extended_chord6", "(", "chord", ",", "shorthand", ",", "no_inversions", ",", "no_polychords", ")", "elif", "len", "(", "chord", ")", "==", "7", ":", "return", "determine_extended_chord7", "(", "chord", ",", "shorthand", ",", "no_inversions", ",", "no_polychords", ")", "else", ":", "return", "determine_polychords", "(", "chord", ",", "shorthand", ")" ]
Name a chord. This function can determine almost every chord, from a simple triad to a fourteen note polychord.
[ "Name", "a", "chord", "." ]
train
https://github.com/bspaans/python-mingus/blob/aa5a5d992d45ada61be0f9f86261380731bd7749/mingus/core/chords.py#L825-L850
bspaans/python-mingus
mingus/core/chords.py
determine_triad
def determine_triad(triad, shorthand=False, no_inversions=False, placeholder=None): """Name the triad; return answers in a list. The third argument should not be given. If shorthand is True the answers will be in abbreviated form. This function can determine major, minor, diminished and suspended triads. Also knows about invertions. Examples: >>> determine_triad(['A', 'C', 'E']) 'A minor triad' >>> determine_triad(['C', 'E', 'A']) 'A minor triad, first inversion' >>> determine_triad(['A', 'C', 'E'], True) 'Am' """ if len(triad) != 3: # warning: raise exception: not a triad return False def inversion_exhauster(triad, shorthand, tries, result): """Run tries every inversion and save the result.""" intval1 = intervals.determine(triad[0], triad[1], True) intval2 = intervals.determine(triad[0], triad[2], True) def add_result(short): result.append((short, tries, triad[0])) intval = intval1 + intval2 if intval == '25': add_result('sus2') elif intval == '3b7': add_result('dom7') # changed from just '7' elif intval == '3b5': add_result('7b5') # why not b5? elif intval == '35': add_result('M') elif intval == '3#5': add_result('aug') elif intval == '36': add_result('M6') elif intval == '37': add_result('M7') elif intval == 'b3b5': add_result('dim') elif intval == 'b35': add_result('m') elif intval == 'b36': add_result('m6') elif intval == 'b3b7': add_result('m7') elif intval == 'b37': add_result('m/M7') elif intval == '45': add_result('sus4') elif intval == '5b7': add_result('m7') elif intval == '57': add_result('M7') if tries != 3 and not no_inversions: return inversion_exhauster([triad[-1]] + triad[:-1], shorthand, tries + 1, result) else: res = [] for r in result: if shorthand: res.append(r[2] + r[0]) else: res.append(r[2] + chord_shorthand_meaning[r[0]] + int_desc(r[1])) return res return inversion_exhauster(triad, shorthand, 1, [])
python
def determine_triad(triad, shorthand=False, no_inversions=False, placeholder=None): if len(triad) != 3: return False def inversion_exhauster(triad, shorthand, tries, result): intval1 = intervals.determine(triad[0], triad[1], True) intval2 = intervals.determine(triad[0], triad[2], True) def add_result(short): result.append((short, tries, triad[0])) intval = intval1 + intval2 if intval == '25': add_result('sus2') elif intval == '3b7': add_result('dom7') elif intval == '3b5': add_result('7b5') elif intval == '35': add_result('M') elif intval == '3 add_result('aug') elif intval == '36': add_result('M6') elif intval == '37': add_result('M7') elif intval == 'b3b5': add_result('dim') elif intval == 'b35': add_result('m') elif intval == 'b36': add_result('m6') elif intval == 'b3b7': add_result('m7') elif intval == 'b37': add_result('m/M7') elif intval == '45': add_result('sus4') elif intval == '5b7': add_result('m7') elif intval == '57': add_result('M7') if tries != 3 and not no_inversions: return inversion_exhauster([triad[-1]] + triad[:-1], shorthand, tries + 1, result) else: res = [] for r in result: if shorthand: res.append(r[2] + r[0]) else: res.append(r[2] + chord_shorthand_meaning[r[0]] + int_desc(r[1])) return res return inversion_exhauster(triad, shorthand, 1, [])
[ "def", "determine_triad", "(", "triad", ",", "shorthand", "=", "False", ",", "no_inversions", "=", "False", ",", "placeholder", "=", "None", ")", ":", "if", "len", "(", "triad", ")", "!=", "3", ":", "# warning: raise exception: not a triad", "return", "False", "def", "inversion_exhauster", "(", "triad", ",", "shorthand", ",", "tries", ",", "result", ")", ":", "\"\"\"Run tries every inversion and save the result.\"\"\"", "intval1", "=", "intervals", ".", "determine", "(", "triad", "[", "0", "]", ",", "triad", "[", "1", "]", ",", "True", ")", "intval2", "=", "intervals", ".", "determine", "(", "triad", "[", "0", "]", ",", "triad", "[", "2", "]", ",", "True", ")", "def", "add_result", "(", "short", ")", ":", "result", ".", "append", "(", "(", "short", ",", "tries", ",", "triad", "[", "0", "]", ")", ")", "intval", "=", "intval1", "+", "intval2", "if", "intval", "==", "'25'", ":", "add_result", "(", "'sus2'", ")", "elif", "intval", "==", "'3b7'", ":", "add_result", "(", "'dom7'", ")", "# changed from just '7'", "elif", "intval", "==", "'3b5'", ":", "add_result", "(", "'7b5'", ")", "# why not b5?", "elif", "intval", "==", "'35'", ":", "add_result", "(", "'M'", ")", "elif", "intval", "==", "'3#5'", ":", "add_result", "(", "'aug'", ")", "elif", "intval", "==", "'36'", ":", "add_result", "(", "'M6'", ")", "elif", "intval", "==", "'37'", ":", "add_result", "(", "'M7'", ")", "elif", "intval", "==", "'b3b5'", ":", "add_result", "(", "'dim'", ")", "elif", "intval", "==", "'b35'", ":", "add_result", "(", "'m'", ")", "elif", "intval", "==", "'b36'", ":", "add_result", "(", "'m6'", ")", "elif", "intval", "==", "'b3b7'", ":", "add_result", "(", "'m7'", ")", "elif", "intval", "==", "'b37'", ":", "add_result", "(", "'m/M7'", ")", "elif", "intval", "==", "'45'", ":", "add_result", "(", "'sus4'", ")", "elif", "intval", "==", "'5b7'", ":", "add_result", "(", "'m7'", ")", "elif", "intval", "==", "'57'", ":", "add_result", "(", "'M7'", ")", "if", "tries", "!=", "3", "and", "not", "no_inversions", ":", "return", "inversion_exhauster", "(", "[", "triad", "[", "-", "1", "]", "]", "+", "triad", "[", ":", "-", "1", "]", ",", "shorthand", ",", "tries", "+", "1", ",", "result", ")", "else", ":", "res", "=", "[", "]", "for", "r", "in", "result", ":", "if", "shorthand", ":", "res", ".", "append", "(", "r", "[", "2", "]", "+", "r", "[", "0", "]", ")", "else", ":", "res", ".", "append", "(", "r", "[", "2", "]", "+", "chord_shorthand_meaning", "[", "r", "[", "0", "]", "]", "+", "int_desc", "(", "r", "[", "1", "]", ")", ")", "return", "res", "return", "inversion_exhauster", "(", "triad", ",", "shorthand", ",", "1", ",", "[", "]", ")" ]
Name the triad; return answers in a list. The third argument should not be given. If shorthand is True the answers will be in abbreviated form. This function can determine major, minor, diminished and suspended triads. Also knows about invertions. Examples: >>> determine_triad(['A', 'C', 'E']) 'A minor triad' >>> determine_triad(['C', 'E', 'A']) 'A minor triad, first inversion' >>> determine_triad(['A', 'C', 'E'], True) 'Am'
[ "Name", "the", "triad", ";", "return", "answers", "in", "a", "list", "." ]
train
https://github.com/bspaans/python-mingus/blob/aa5a5d992d45ada61be0f9f86261380731bd7749/mingus/core/chords.py#L852-L925
bspaans/python-mingus
mingus/core/chords.py
determine_seventh
def determine_seventh(seventh, shorthand=False, no_inversion=False, no_polychords=False): """Determine the type of seventh chord; return the results in a list, ordered on inversions. This function expects seventh to be a list of 4 notes. If shorthand is set to True, results will be returned in chord shorthand ('Cmin7', etc.); inversions will be dropped in that case. Example: >>> determine_seventh(['C', 'E', 'G', 'B']) ['C major seventh'] """ if len(seventh) != 4: # warning raise exception: seventh chord is not a seventh chord return False def inversion_exhauster(seventh, shorthand, tries, result, polychords): """Determine sevenths recursive functions.""" # Check whether the first three notes of seventh are part of some triad. triads = determine_triad(seventh[:3], True, True) # Get the interval between the first and last note intval3 = intervals.determine(seventh[0], seventh[3]) def add_result(short, poly=False): """Helper function.""" result.append((short, tries, seventh[0], poly)) # Recognizing polychords if tries == 1 and not no_polychords: polychords = polychords + determine_polychords(seventh, shorthand) # Recognizing sevenths for triad in triads: # Basic triads triad = triad[len(seventh[0]):] if triad == 'm': if intval3 == 'minor seventh': add_result('m7') elif intval3 == 'major seventh': add_result('m/M7') elif intval3 == 'major sixth': add_result('m6') elif triad == 'M': if intval3 == 'major seventh': add_result('M7') elif intval3 == 'minor seventh': add_result('7') elif intval3 == 'major sixth': add_result('M6') elif triad == 'dim': if intval3 == 'minor seventh': add_result('m7b5') elif intval3 == 'diminished seventh': add_result('dim7') elif triad == 'aug': if intval3 == 'minor seventh': add_result('m7+') if intval3 == 'major seventh': add_result('M7+') elif triad == 'sus4': if intval3 == 'minor seventh': add_result('sus47') elif intval3 == 'minor second': add_result('sus4b9') elif triad == 'm7': # Other if intval3 == 'perfect fourth': add_result('11') elif triad == '7b5': if intval3 == 'minor seventh': add_result('7b5') if tries != 4 and not no_inversion: return inversion_exhauster([seventh[-1]] + seventh[:-1], shorthand, tries + 1, result, polychords) else: # Return results res = [] # Reset seventh seventh = [seventh[3]] + seventh[0:3] for x in result: if shorthand: res.append(x[2] + x[0]) else: res.append(x[2] + chord_shorthand_meaning[x[0]] + int_desc(x[1])) return res + polychords return inversion_exhauster(seventh, shorthand, 1, [], [])
python
def determine_seventh(seventh, shorthand=False, no_inversion=False, no_polychords=False): if len(seventh) != 4: return False def inversion_exhauster(seventh, shorthand, tries, result, polychords): triads = determine_triad(seventh[:3], True, True) intval3 = intervals.determine(seventh[0], seventh[3]) def add_result(short, poly=False): result.append((short, tries, seventh[0], poly)) if tries == 1 and not no_polychords: polychords = polychords + determine_polychords(seventh, shorthand) for triad in triads: triad = triad[len(seventh[0]):] if triad == 'm': if intval3 == 'minor seventh': add_result('m7') elif intval3 == 'major seventh': add_result('m/M7') elif intval3 == 'major sixth': add_result('m6') elif triad == 'M': if intval3 == 'major seventh': add_result('M7') elif intval3 == 'minor seventh': add_result('7') elif intval3 == 'major sixth': add_result('M6') elif triad == 'dim': if intval3 == 'minor seventh': add_result('m7b5') elif intval3 == 'diminished seventh': add_result('dim7') elif triad == 'aug': if intval3 == 'minor seventh': add_result('m7+') if intval3 == 'major seventh': add_result('M7+') elif triad == 'sus4': if intval3 == 'minor seventh': add_result('sus47') elif intval3 == 'minor second': add_result('sus4b9') elif triad == 'm7': if intval3 == 'perfect fourth': add_result('11') elif triad == '7b5': if intval3 == 'minor seventh': add_result('7b5') if tries != 4 and not no_inversion: return inversion_exhauster([seventh[-1]] + seventh[:-1], shorthand, tries + 1, result, polychords) else: res = [] seventh = [seventh[3]] + seventh[0:3] for x in result: if shorthand: res.append(x[2] + x[0]) else: res.append(x[2] + chord_shorthand_meaning[x[0]] + int_desc(x[1])) return res + polychords return inversion_exhauster(seventh, shorthand, 1, [], [])
[ "def", "determine_seventh", "(", "seventh", ",", "shorthand", "=", "False", ",", "no_inversion", "=", "False", ",", "no_polychords", "=", "False", ")", ":", "if", "len", "(", "seventh", ")", "!=", "4", ":", "# warning raise exception: seventh chord is not a seventh chord", "return", "False", "def", "inversion_exhauster", "(", "seventh", ",", "shorthand", ",", "tries", ",", "result", ",", "polychords", ")", ":", "\"\"\"Determine sevenths recursive functions.\"\"\"", "# Check whether the first three notes of seventh are part of some triad.", "triads", "=", "determine_triad", "(", "seventh", "[", ":", "3", "]", ",", "True", ",", "True", ")", "# Get the interval between the first and last note", "intval3", "=", "intervals", ".", "determine", "(", "seventh", "[", "0", "]", ",", "seventh", "[", "3", "]", ")", "def", "add_result", "(", "short", ",", "poly", "=", "False", ")", ":", "\"\"\"Helper function.\"\"\"", "result", ".", "append", "(", "(", "short", ",", "tries", ",", "seventh", "[", "0", "]", ",", "poly", ")", ")", "# Recognizing polychords", "if", "tries", "==", "1", "and", "not", "no_polychords", ":", "polychords", "=", "polychords", "+", "determine_polychords", "(", "seventh", ",", "shorthand", ")", "# Recognizing sevenths", "for", "triad", "in", "triads", ":", "# Basic triads", "triad", "=", "triad", "[", "len", "(", "seventh", "[", "0", "]", ")", ":", "]", "if", "triad", "==", "'m'", ":", "if", "intval3", "==", "'minor seventh'", ":", "add_result", "(", "'m7'", ")", "elif", "intval3", "==", "'major seventh'", ":", "add_result", "(", "'m/M7'", ")", "elif", "intval3", "==", "'major sixth'", ":", "add_result", "(", "'m6'", ")", "elif", "triad", "==", "'M'", ":", "if", "intval3", "==", "'major seventh'", ":", "add_result", "(", "'M7'", ")", "elif", "intval3", "==", "'minor seventh'", ":", "add_result", "(", "'7'", ")", "elif", "intval3", "==", "'major sixth'", ":", "add_result", "(", "'M6'", ")", "elif", "triad", "==", "'dim'", ":", "if", "intval3", "==", "'minor seventh'", ":", "add_result", "(", "'m7b5'", ")", "elif", "intval3", "==", "'diminished seventh'", ":", "add_result", "(", "'dim7'", ")", "elif", "triad", "==", "'aug'", ":", "if", "intval3", "==", "'minor seventh'", ":", "add_result", "(", "'m7+'", ")", "if", "intval3", "==", "'major seventh'", ":", "add_result", "(", "'M7+'", ")", "elif", "triad", "==", "'sus4'", ":", "if", "intval3", "==", "'minor seventh'", ":", "add_result", "(", "'sus47'", ")", "elif", "intval3", "==", "'minor second'", ":", "add_result", "(", "'sus4b9'", ")", "elif", "triad", "==", "'m7'", ":", "# Other", "if", "intval3", "==", "'perfect fourth'", ":", "add_result", "(", "'11'", ")", "elif", "triad", "==", "'7b5'", ":", "if", "intval3", "==", "'minor seventh'", ":", "add_result", "(", "'7b5'", ")", "if", "tries", "!=", "4", "and", "not", "no_inversion", ":", "return", "inversion_exhauster", "(", "[", "seventh", "[", "-", "1", "]", "]", "+", "seventh", "[", ":", "-", "1", "]", ",", "shorthand", ",", "tries", "+", "1", ",", "result", ",", "polychords", ")", "else", ":", "# Return results", "res", "=", "[", "]", "# Reset seventh", "seventh", "=", "[", "seventh", "[", "3", "]", "]", "+", "seventh", "[", "0", ":", "3", "]", "for", "x", "in", "result", ":", "if", "shorthand", ":", "res", ".", "append", "(", "x", "[", "2", "]", "+", "x", "[", "0", "]", ")", "else", ":", "res", ".", "append", "(", "x", "[", "2", "]", "+", "chord_shorthand_meaning", "[", "x", "[", "0", "]", "]", "+", "int_desc", "(", "x", "[", "1", "]", ")", ")", "return", "res", "+", "polychords", "return", "inversion_exhauster", "(", "seventh", ",", "shorthand", ",", "1", ",", "[", "]", ",", "[", "]", ")" ]
Determine the type of seventh chord; return the results in a list, ordered on inversions. This function expects seventh to be a list of 4 notes. If shorthand is set to True, results will be returned in chord shorthand ('Cmin7', etc.); inversions will be dropped in that case. Example: >>> determine_seventh(['C', 'E', 'G', 'B']) ['C major seventh']
[ "Determine", "the", "type", "of", "seventh", "chord", ";", "return", "the", "results", "in", "a", "list", "ordered", "on", "inversions", "." ]
train
https://github.com/bspaans/python-mingus/blob/aa5a5d992d45ada61be0f9f86261380731bd7749/mingus/core/chords.py#L927-L1019
bspaans/python-mingus
mingus/core/chords.py
determine_extended_chord5
def determine_extended_chord5(chord, shorthand=False, no_inversions=False, no_polychords=False): """Determine the names of an extended chord.""" if len(chord) != 5: # warning raise exeption: not an extended chord return False def inversion_exhauster(chord, shorthand, tries, result, polychords): """Recursive helper function.""" def add_result(short): result.append((short, tries, chord[0])) triads = determine_triad(chord[:3], True, True) sevenths = determine_seventh(chord[:4], True, True, True) # Determine polychords if tries == 1 and not no_polychords: polychords += determine_polychords(chord, shorthand) intval4 = intervals.determine(chord[0], chord[4]) for seventh in sevenths: seventh = seventh[len(chord[0]):] if seventh == 'M7': if intval4 == 'major second': add_result('M9') elif seventh == 'm7': if intval4 == 'major second': add_result('m9') elif intval4 == 'perfect fourth': add_result('m11') elif seventh == '7': if intval4 == 'major second': add_result('9') elif intval4 == 'minor second': add_result('7b9') elif intval4 == 'augmented second': add_result('7#9') elif intval4 == 'minor third': add_result('7b12') elif intval4 == 'augmented fourth': add_result('7#11') elif intval4 == 'major sixth': add_result('13') elif seventh == 'M6': if intval4 == 'major second': add_result('6/9') elif intval4 == 'minor seventh': add_result('6/7') if tries != 5 and not no_inversions: return inversion_exhauster([chord[-1]] + chord[:-1], shorthand, tries + 1, result, polychords) else: res = [] for r in result: if shorthand: res.append(r[2] + r[0]) else: res.append(r[2] + chord_shorthand_meaning[r[0]] + int_desc(r[1])) return res + polychords return inversion_exhauster(chord, shorthand, 1, [], [])
python
def determine_extended_chord5(chord, shorthand=False, no_inversions=False, no_polychords=False): if len(chord) != 5: return False def inversion_exhauster(chord, shorthand, tries, result, polychords): def add_result(short): result.append((short, tries, chord[0])) triads = determine_triad(chord[:3], True, True) sevenths = determine_seventh(chord[:4], True, True, True) if tries == 1 and not no_polychords: polychords += determine_polychords(chord, shorthand) intval4 = intervals.determine(chord[0], chord[4]) for seventh in sevenths: seventh = seventh[len(chord[0]):] if seventh == 'M7': if intval4 == 'major second': add_result('M9') elif seventh == 'm7': if intval4 == 'major second': add_result('m9') elif intval4 == 'perfect fourth': add_result('m11') elif seventh == '7': if intval4 == 'major second': add_result('9') elif intval4 == 'minor second': add_result('7b9') elif intval4 == 'augmented second': add_result('7 elif intval4 == 'minor third': add_result('7b12') elif intval4 == 'augmented fourth': add_result('7 elif intval4 == 'major sixth': add_result('13') elif seventh == 'M6': if intval4 == 'major second': add_result('6/9') elif intval4 == 'minor seventh': add_result('6/7') if tries != 5 and not no_inversions: return inversion_exhauster([chord[-1]] + chord[:-1], shorthand, tries + 1, result, polychords) else: res = [] for r in result: if shorthand: res.append(r[2] + r[0]) else: res.append(r[2] + chord_shorthand_meaning[r[0]] + int_desc(r[1])) return res + polychords return inversion_exhauster(chord, shorthand, 1, [], [])
[ "def", "determine_extended_chord5", "(", "chord", ",", "shorthand", "=", "False", ",", "no_inversions", "=", "False", ",", "no_polychords", "=", "False", ")", ":", "if", "len", "(", "chord", ")", "!=", "5", ":", "# warning raise exeption: not an extended chord", "return", "False", "def", "inversion_exhauster", "(", "chord", ",", "shorthand", ",", "tries", ",", "result", ",", "polychords", ")", ":", "\"\"\"Recursive helper function.\"\"\"", "def", "add_result", "(", "short", ")", ":", "result", ".", "append", "(", "(", "short", ",", "tries", ",", "chord", "[", "0", "]", ")", ")", "triads", "=", "determine_triad", "(", "chord", "[", ":", "3", "]", ",", "True", ",", "True", ")", "sevenths", "=", "determine_seventh", "(", "chord", "[", ":", "4", "]", ",", "True", ",", "True", ",", "True", ")", "# Determine polychords", "if", "tries", "==", "1", "and", "not", "no_polychords", ":", "polychords", "+=", "determine_polychords", "(", "chord", ",", "shorthand", ")", "intval4", "=", "intervals", ".", "determine", "(", "chord", "[", "0", "]", ",", "chord", "[", "4", "]", ")", "for", "seventh", "in", "sevenths", ":", "seventh", "=", "seventh", "[", "len", "(", "chord", "[", "0", "]", ")", ":", "]", "if", "seventh", "==", "'M7'", ":", "if", "intval4", "==", "'major second'", ":", "add_result", "(", "'M9'", ")", "elif", "seventh", "==", "'m7'", ":", "if", "intval4", "==", "'major second'", ":", "add_result", "(", "'m9'", ")", "elif", "intval4", "==", "'perfect fourth'", ":", "add_result", "(", "'m11'", ")", "elif", "seventh", "==", "'7'", ":", "if", "intval4", "==", "'major second'", ":", "add_result", "(", "'9'", ")", "elif", "intval4", "==", "'minor second'", ":", "add_result", "(", "'7b9'", ")", "elif", "intval4", "==", "'augmented second'", ":", "add_result", "(", "'7#9'", ")", "elif", "intval4", "==", "'minor third'", ":", "add_result", "(", "'7b12'", ")", "elif", "intval4", "==", "'augmented fourth'", ":", "add_result", "(", "'7#11'", ")", "elif", "intval4", "==", "'major sixth'", ":", "add_result", "(", "'13'", ")", "elif", "seventh", "==", "'M6'", ":", "if", "intval4", "==", "'major second'", ":", "add_result", "(", "'6/9'", ")", "elif", "intval4", "==", "'minor seventh'", ":", "add_result", "(", "'6/7'", ")", "if", "tries", "!=", "5", "and", "not", "no_inversions", ":", "return", "inversion_exhauster", "(", "[", "chord", "[", "-", "1", "]", "]", "+", "chord", "[", ":", "-", "1", "]", ",", "shorthand", ",", "tries", "+", "1", ",", "result", ",", "polychords", ")", "else", ":", "res", "=", "[", "]", "for", "r", "in", "result", ":", "if", "shorthand", ":", "res", ".", "append", "(", "r", "[", "2", "]", "+", "r", "[", "0", "]", ")", "else", ":", "res", ".", "append", "(", "r", "[", "2", "]", "+", "chord_shorthand_meaning", "[", "r", "[", "0", "]", "]", "+", "int_desc", "(", "r", "[", "1", "]", ")", ")", "return", "res", "+", "polychords", "return", "inversion_exhauster", "(", "chord", ",", "shorthand", ",", "1", ",", "[", "]", ",", "[", "]", ")" ]
Determine the names of an extended chord.
[ "Determine", "the", "names", "of", "an", "extended", "chord", "." ]
train
https://github.com/bspaans/python-mingus/blob/aa5a5d992d45ada61be0f9f86261380731bd7749/mingus/core/chords.py#L1021-L1082
bspaans/python-mingus
mingus/core/chords.py
determine_extended_chord6
def determine_extended_chord6(chord, shorthand=False, no_inversions=False, no_polychords=False): """Determine the names of an 6 note chord.""" if len(chord) != 6: # warning raise exeption: not an extended chord return False def inversion_exhauster( chord, shorthand, tries, result, polychords, ): """Recursive helper function""" # Determine polychords if tries == 1 and not no_polychords: polychords += determine_polychords(chord, shorthand) def add_result(short): result.append((short, tries, chord[0])) ch = determine_extended_chord5(chord[:5], True, True, True) intval5 = intervals.determine(chord[0], chord[5]) for c in ch: c = c[len(chord[0]):] if c == '9': if intval5 == 'perfect fourth': add_result('11') elif intval5 == 'augmented fourth': add_result('7#11') elif intval5 == 'major sixth': add_result('13') elif c == 'm9': if intval5 == 'perfect fourth': add_result('m11') elif intval5 == 'major sixth': add_result('m13') elif c == 'M9': if intval5 == 'perfect fourth': add_result('M11') elif intval5 == 'major sixth': add_result('M13') if tries != 6 and not no_inversions: return inversion_exhauster([chord[-1]] + chord[:-1], shorthand, tries + 1, result, polychords) else: res = [] for r in result: if shorthand: res.append(r[2] + r[0]) else: res.append(r[2] + chord_shorthand_meaning[r[0]] + int_desc(r[1])) return res + polychords return inversion_exhauster(chord, shorthand, 1, [], [])
python
def determine_extended_chord6(chord, shorthand=False, no_inversions=False, no_polychords=False): if len(chord) != 6: return False def inversion_exhauster( chord, shorthand, tries, result, polychords, ): if tries == 1 and not no_polychords: polychords += determine_polychords(chord, shorthand) def add_result(short): result.append((short, tries, chord[0])) ch = determine_extended_chord5(chord[:5], True, True, True) intval5 = intervals.determine(chord[0], chord[5]) for c in ch: c = c[len(chord[0]):] if c == '9': if intval5 == 'perfect fourth': add_result('11') elif intval5 == 'augmented fourth': add_result('7 elif intval5 == 'major sixth': add_result('13') elif c == 'm9': if intval5 == 'perfect fourth': add_result('m11') elif intval5 == 'major sixth': add_result('m13') elif c == 'M9': if intval5 == 'perfect fourth': add_result('M11') elif intval5 == 'major sixth': add_result('M13') if tries != 6 and not no_inversions: return inversion_exhauster([chord[-1]] + chord[:-1], shorthand, tries + 1, result, polychords) else: res = [] for r in result: if shorthand: res.append(r[2] + r[0]) else: res.append(r[2] + chord_shorthand_meaning[r[0]] + int_desc(r[1])) return res + polychords return inversion_exhauster(chord, shorthand, 1, [], [])
[ "def", "determine_extended_chord6", "(", "chord", ",", "shorthand", "=", "False", ",", "no_inversions", "=", "False", ",", "no_polychords", "=", "False", ")", ":", "if", "len", "(", "chord", ")", "!=", "6", ":", "# warning raise exeption: not an extended chord", "return", "False", "def", "inversion_exhauster", "(", "chord", ",", "shorthand", ",", "tries", ",", "result", ",", "polychords", ",", ")", ":", "\"\"\"Recursive helper function\"\"\"", "# Determine polychords", "if", "tries", "==", "1", "and", "not", "no_polychords", ":", "polychords", "+=", "determine_polychords", "(", "chord", ",", "shorthand", ")", "def", "add_result", "(", "short", ")", ":", "result", ".", "append", "(", "(", "short", ",", "tries", ",", "chord", "[", "0", "]", ")", ")", "ch", "=", "determine_extended_chord5", "(", "chord", "[", ":", "5", "]", ",", "True", ",", "True", ",", "True", ")", "intval5", "=", "intervals", ".", "determine", "(", "chord", "[", "0", "]", ",", "chord", "[", "5", "]", ")", "for", "c", "in", "ch", ":", "c", "=", "c", "[", "len", "(", "chord", "[", "0", "]", ")", ":", "]", "if", "c", "==", "'9'", ":", "if", "intval5", "==", "'perfect fourth'", ":", "add_result", "(", "'11'", ")", "elif", "intval5", "==", "'augmented fourth'", ":", "add_result", "(", "'7#11'", ")", "elif", "intval5", "==", "'major sixth'", ":", "add_result", "(", "'13'", ")", "elif", "c", "==", "'m9'", ":", "if", "intval5", "==", "'perfect fourth'", ":", "add_result", "(", "'m11'", ")", "elif", "intval5", "==", "'major sixth'", ":", "add_result", "(", "'m13'", ")", "elif", "c", "==", "'M9'", ":", "if", "intval5", "==", "'perfect fourth'", ":", "add_result", "(", "'M11'", ")", "elif", "intval5", "==", "'major sixth'", ":", "add_result", "(", "'M13'", ")", "if", "tries", "!=", "6", "and", "not", "no_inversions", ":", "return", "inversion_exhauster", "(", "[", "chord", "[", "-", "1", "]", "]", "+", "chord", "[", ":", "-", "1", "]", ",", "shorthand", ",", "tries", "+", "1", ",", "result", ",", "polychords", ")", "else", ":", "res", "=", "[", "]", "for", "r", "in", "result", ":", "if", "shorthand", ":", "res", ".", "append", "(", "r", "[", "2", "]", "+", "r", "[", "0", "]", ")", "else", ":", "res", ".", "append", "(", "r", "[", "2", "]", "+", "chord_shorthand_meaning", "[", "r", "[", "0", "]", "]", "+", "int_desc", "(", "r", "[", "1", "]", ")", ")", "return", "res", "+", "polychords", "return", "inversion_exhauster", "(", "chord", ",", "shorthand", ",", "1", ",", "[", "]", ",", "[", "]", ")" ]
Determine the names of an 6 note chord.
[ "Determine", "the", "names", "of", "an", "6", "note", "chord", "." ]
train
https://github.com/bspaans/python-mingus/blob/aa5a5d992d45ada61be0f9f86261380731bd7749/mingus/core/chords.py#L1084-L1140
bspaans/python-mingus
mingus/core/chords.py
determine_polychords
def determine_polychords(chord, shorthand=False): """Determine the polychords in chord. This function can handle anything from polychords based on two triads to 6 note extended chords. """ polychords = [] function_list = [determine_triad, determine_seventh, determine_extended_chord5, determine_extended_chord6, determine_extended_chord7] # Range tracking. if len(chord) <= 3: return [] elif len(chord) > 14: return [] elif len(chord) - 3 <= 5: function_nr = range(0, len(chord) - 3) else: function_nr = range(0, 5) for f in function_nr: for f2 in function_nr: # The clever part: Try the function_list[f] on the len(chord) - (3 + # f) last notes of the chord. Then try the function_list[f2] on the # f2 + 3 first notes of the chord. Thus, trying all possible # combinations. for chord1 in function_list[f](chord[len(chord) - (3 + f):], True, True, True): for chord2 in function_list[f2](chord[:f2 + 3], True, True, True): polychords.append('%s|%s' % (chord1, chord2)) if shorthand: for p in polychords: p = p + ' polychord' return polychords
python
def determine_polychords(chord, shorthand=False): polychords = [] function_list = [determine_triad, determine_seventh, determine_extended_chord5, determine_extended_chord6, determine_extended_chord7] if len(chord) <= 3: return [] elif len(chord) > 14: return [] elif len(chord) - 3 <= 5: function_nr = range(0, len(chord) - 3) else: function_nr = range(0, 5) for f in function_nr: for f2 in function_nr: for chord1 in function_list[f](chord[len(chord) - (3 + f):], True, True, True): for chord2 in function_list[f2](chord[:f2 + 3], True, True, True): polychords.append('%s|%s' % (chord1, chord2)) if shorthand: for p in polychords: p = p + ' polychord' return polychords
[ "def", "determine_polychords", "(", "chord", ",", "shorthand", "=", "False", ")", ":", "polychords", "=", "[", "]", "function_list", "=", "[", "determine_triad", ",", "determine_seventh", ",", "determine_extended_chord5", ",", "determine_extended_chord6", ",", "determine_extended_chord7", "]", "# Range tracking.", "if", "len", "(", "chord", ")", "<=", "3", ":", "return", "[", "]", "elif", "len", "(", "chord", ")", ">", "14", ":", "return", "[", "]", "elif", "len", "(", "chord", ")", "-", "3", "<=", "5", ":", "function_nr", "=", "range", "(", "0", ",", "len", "(", "chord", ")", "-", "3", ")", "else", ":", "function_nr", "=", "range", "(", "0", ",", "5", ")", "for", "f", "in", "function_nr", ":", "for", "f2", "in", "function_nr", ":", "# The clever part: Try the function_list[f] on the len(chord) - (3 +", "# f) last notes of the chord. Then try the function_list[f2] on the", "# f2 + 3 first notes of the chord. Thus, trying all possible", "# combinations.", "for", "chord1", "in", "function_list", "[", "f", "]", "(", "chord", "[", "len", "(", "chord", ")", "-", "(", "3", "+", "f", ")", ":", "]", ",", "True", ",", "True", ",", "True", ")", ":", "for", "chord2", "in", "function_list", "[", "f2", "]", "(", "chord", "[", ":", "f2", "+", "3", "]", ",", "True", ",", "True", ",", "True", ")", ":", "polychords", ".", "append", "(", "'%s|%s'", "%", "(", "chord1", ",", "chord2", ")", ")", "if", "shorthand", ":", "for", "p", "in", "polychords", ":", "p", "=", "p", "+", "' polychord'", "return", "polychords" ]
Determine the polychords in chord. This function can handle anything from polychords based on two triads to 6 note extended chords.
[ "Determine", "the", "polychords", "in", "chord", ".", "This", "function", "can", "handle", "anything", "from", "polychords", "based", "on", "two", "triads", "to", "6", "note", "extended", "chords", "." ]
train
https://github.com/bspaans/python-mingus/blob/aa5a5d992d45ada61be0f9f86261380731bd7749/mingus/core/chords.py#L1197-L1231
bspaans/python-mingus
mingus/extra/lilypond.py
from_Note
def from_Note(note, process_octaves=True, standalone=True): """Get a Note object and return the LilyPond equivalent in a string. If process_octaves is set to False, all data regarding octaves will be ignored. If standalone is True, the result can be used by functions like to_png and will produce a valid output. The argument is mostly here to let from_NoteContainer make use of this function. """ # Throw exception if not hasattr(note, 'name'): return False # Lower the case of the name result = note.name[0].lower() # Convert #'s and b's to 'is' and 'es' suffixes for accidental in note.name[1:]: if accidental == '#': result += 'is' elif accidental == 'b': result += 'es' # Place ' and , for octaves if process_octaves: oct = note.octave if oct >= 4: while oct > 3: result += "'" oct -= 1 elif oct < 3: while oct < 3: result += ',' oct += 1 if standalone: return '{ %s }' % result else: return result
python
def from_Note(note, process_octaves=True, standalone=True): if not hasattr(note, 'name'): return False result = note.name[0].lower() for accidental in note.name[1:]: if accidental == ' result += 'is' elif accidental == 'b': result += 'es' if process_octaves: oct = note.octave if oct >= 4: while oct > 3: result += "'" oct -= 1 elif oct < 3: while oct < 3: result += ',' oct += 1 if standalone: return '{ %s }' % result else: return result
[ "def", "from_Note", "(", "note", ",", "process_octaves", "=", "True", ",", "standalone", "=", "True", ")", ":", "# Throw exception", "if", "not", "hasattr", "(", "note", ",", "'name'", ")", ":", "return", "False", "# Lower the case of the name", "result", "=", "note", ".", "name", "[", "0", "]", ".", "lower", "(", ")", "# Convert #'s and b's to 'is' and 'es' suffixes", "for", "accidental", "in", "note", ".", "name", "[", "1", ":", "]", ":", "if", "accidental", "==", "'#'", ":", "result", "+=", "'is'", "elif", "accidental", "==", "'b'", ":", "result", "+=", "'es'", "# Place ' and , for octaves", "if", "process_octaves", ":", "oct", "=", "note", ".", "octave", "if", "oct", ">=", "4", ":", "while", "oct", ">", "3", ":", "result", "+=", "\"'\"", "oct", "-=", "1", "elif", "oct", "<", "3", ":", "while", "oct", "<", "3", ":", "result", "+=", "','", "oct", "+=", "1", "if", "standalone", ":", "return", "'{ %s }'", "%", "result", "else", ":", "return", "result" ]
Get a Note object and return the LilyPond equivalent in a string. If process_octaves is set to False, all data regarding octaves will be ignored. If standalone is True, the result can be used by functions like to_png and will produce a valid output. The argument is mostly here to let from_NoteContainer make use of this function.
[ "Get", "a", "Note", "object", "and", "return", "the", "LilyPond", "equivalent", "in", "a", "string", "." ]
train
https://github.com/bspaans/python-mingus/blob/aa5a5d992d45ada61be0f9f86261380731bd7749/mingus/extra/lilypond.py#L34-L70
bspaans/python-mingus
mingus/extra/lilypond.py
from_NoteContainer
def from_NoteContainer(nc, duration=None, standalone=True): """Get a NoteContainer object and return the LilyPond equivalent in a string. The second argument determining the duration of the NoteContainer is optional. When the standalone argument is True the result of this function can be used directly by functions like to_png. It is mostly here to be used by from_Bar. """ # Throw exception if nc is not None and not hasattr(nc, 'notes'): return False # Return rests for None or empty lists if nc is None or len(nc.notes) == 0: result = 'r' elif len(nc.notes) == 1: # Return a single note if the list contains only one note result = from_Note(nc.notes[0], standalone=False) else: # Return the notes grouped in '<' and '>' result = '<' for notes in nc.notes: result += from_Note(notes, standalone=False) + ' ' result = result[:-1] + '>' # Add the duration if duration != None: parsed_value = value.determine(duration) # Special case: check for longa and breve in the duration (issue #37) dur = parsed_value[0] if dur == value.longa: result += '\\longa' elif dur == value.breve: result += '\\breve' else: result += str(int(parsed_value[0])) for i in range(parsed_value[1]): result += '.' if not standalone: return result else: return '{ %s }' % result
python
def from_NoteContainer(nc, duration=None, standalone=True): if nc is not None and not hasattr(nc, 'notes'): return False if nc is None or len(nc.notes) == 0: result = 'r' elif len(nc.notes) == 1: result = from_Note(nc.notes[0], standalone=False) else: result = '<' for notes in nc.notes: result += from_Note(notes, standalone=False) + ' ' result = result[:-1] + '>' if duration != None: parsed_value = value.determine(duration) dur = parsed_value[0] if dur == value.longa: result += '\\longa' elif dur == value.breve: result += '\\breve' else: result += str(int(parsed_value[0])) for i in range(parsed_value[1]): result += '.' if not standalone: return result else: return '{ %s }' % result
[ "def", "from_NoteContainer", "(", "nc", ",", "duration", "=", "None", ",", "standalone", "=", "True", ")", ":", "# Throw exception", "if", "nc", "is", "not", "None", "and", "not", "hasattr", "(", "nc", ",", "'notes'", ")", ":", "return", "False", "# Return rests for None or empty lists", "if", "nc", "is", "None", "or", "len", "(", "nc", ".", "notes", ")", "==", "0", ":", "result", "=", "'r'", "elif", "len", "(", "nc", ".", "notes", ")", "==", "1", ":", "# Return a single note if the list contains only one note", "result", "=", "from_Note", "(", "nc", ".", "notes", "[", "0", "]", ",", "standalone", "=", "False", ")", "else", ":", "# Return the notes grouped in '<' and '>'", "result", "=", "'<'", "for", "notes", "in", "nc", ".", "notes", ":", "result", "+=", "from_Note", "(", "notes", ",", "standalone", "=", "False", ")", "+", "' '", "result", "=", "result", "[", ":", "-", "1", "]", "+", "'>'", "# Add the duration", "if", "duration", "!=", "None", ":", "parsed_value", "=", "value", ".", "determine", "(", "duration", ")", "# Special case: check for longa and breve in the duration (issue #37)", "dur", "=", "parsed_value", "[", "0", "]", "if", "dur", "==", "value", ".", "longa", ":", "result", "+=", "'\\\\longa'", "elif", "dur", "==", "value", ".", "breve", ":", "result", "+=", "'\\\\breve'", "else", ":", "result", "+=", "str", "(", "int", "(", "parsed_value", "[", "0", "]", ")", ")", "for", "i", "in", "range", "(", "parsed_value", "[", "1", "]", ")", ":", "result", "+=", "'.'", "if", "not", "standalone", ":", "return", "result", "else", ":", "return", "'{ %s }'", "%", "result" ]
Get a NoteContainer object and return the LilyPond equivalent in a string. The second argument determining the duration of the NoteContainer is optional. When the standalone argument is True the result of this function can be used directly by functions like to_png. It is mostly here to be used by from_Bar.
[ "Get", "a", "NoteContainer", "object", "and", "return", "the", "LilyPond", "equivalent", "in", "a", "string", "." ]
train
https://github.com/bspaans/python-mingus/blob/aa5a5d992d45ada61be0f9f86261380731bd7749/mingus/extra/lilypond.py#L72-L116
bspaans/python-mingus
mingus/extra/lilypond.py
from_Bar
def from_Bar(bar, showkey=True, showtime=True): """Get a Bar object and return the LilyPond equivalent in a string. The showkey and showtime parameters can be set to determine whether the key and the time should be shown. """ # Throw exception if not hasattr(bar, 'bar'): return False # Process the key if showkey: key_note = Note(bar.key.key[0].upper() + bar.key.key[1:]) key = '\\key %s \\%s ' % (from_Note(key_note, False, standalone=False), bar.key.mode) result = key else: result = '' # Handle the NoteContainers latest_ratio = (1, 1) ratio_has_changed = False for bar_entry in bar.bar: parsed_value = value.determine(bar_entry[1]) ratio = parsed_value[2:] if ratio == latest_ratio: result += from_NoteContainer(bar_entry[2], bar_entry[1], standalone=False) + ' ' else: if ratio_has_changed: result += '}' result += '\\times %d/%d {' % (ratio[1], ratio[0]) result += from_NoteContainer(bar_entry[2], bar_entry[1], standalone=False) + ' ' latest_ratio = ratio ratio_has_changed = True if ratio_has_changed: result += '}' # Process the time if showtime: return '{ \\time %d/%d %s}' % (bar.meter[0], bar.meter[1], result) else: return '{ %s}' % result
python
def from_Bar(bar, showkey=True, showtime=True): if not hasattr(bar, 'bar'): return False if showkey: key_note = Note(bar.key.key[0].upper() + bar.key.key[1:]) key = '\\key %s \\%s ' % (from_Note(key_note, False, standalone=False), bar.key.mode) result = key else: result = '' latest_ratio = (1, 1) ratio_has_changed = False for bar_entry in bar.bar: parsed_value = value.determine(bar_entry[1]) ratio = parsed_value[2:] if ratio == latest_ratio: result += from_NoteContainer(bar_entry[2], bar_entry[1], standalone=False) + ' ' else: if ratio_has_changed: result += '}' result += '\\times %d/%d {' % (ratio[1], ratio[0]) result += from_NoteContainer(bar_entry[2], bar_entry[1], standalone=False) + ' ' latest_ratio = ratio ratio_has_changed = True if ratio_has_changed: result += '}' if showtime: return '{ \\time %d/%d %s}' % (bar.meter[0], bar.meter[1], result) else: return '{ %s}' % result
[ "def", "from_Bar", "(", "bar", ",", "showkey", "=", "True", ",", "showtime", "=", "True", ")", ":", "# Throw exception", "if", "not", "hasattr", "(", "bar", ",", "'bar'", ")", ":", "return", "False", "# Process the key", "if", "showkey", ":", "key_note", "=", "Note", "(", "bar", ".", "key", ".", "key", "[", "0", "]", ".", "upper", "(", ")", "+", "bar", ".", "key", ".", "key", "[", "1", ":", "]", ")", "key", "=", "'\\\\key %s \\\\%s '", "%", "(", "from_Note", "(", "key_note", ",", "False", ",", "standalone", "=", "False", ")", ",", "bar", ".", "key", ".", "mode", ")", "result", "=", "key", "else", ":", "result", "=", "''", "# Handle the NoteContainers", "latest_ratio", "=", "(", "1", ",", "1", ")", "ratio_has_changed", "=", "False", "for", "bar_entry", "in", "bar", ".", "bar", ":", "parsed_value", "=", "value", ".", "determine", "(", "bar_entry", "[", "1", "]", ")", "ratio", "=", "parsed_value", "[", "2", ":", "]", "if", "ratio", "==", "latest_ratio", ":", "result", "+=", "from_NoteContainer", "(", "bar_entry", "[", "2", "]", ",", "bar_entry", "[", "1", "]", ",", "standalone", "=", "False", ")", "+", "' '", "else", ":", "if", "ratio_has_changed", ":", "result", "+=", "'}'", "result", "+=", "'\\\\times %d/%d {'", "%", "(", "ratio", "[", "1", "]", ",", "ratio", "[", "0", "]", ")", "result", "+=", "from_NoteContainer", "(", "bar_entry", "[", "2", "]", ",", "bar_entry", "[", "1", "]", ",", "standalone", "=", "False", ")", "+", "' '", "latest_ratio", "=", "ratio", "ratio_has_changed", "=", "True", "if", "ratio_has_changed", ":", "result", "+=", "'}'", "# Process the time", "if", "showtime", ":", "return", "'{ \\\\time %d/%d %s}'", "%", "(", "bar", ".", "meter", "[", "0", "]", ",", "bar", ".", "meter", "[", "1", "]", ",", "result", ")", "else", ":", "return", "'{ %s}'", "%", "result" ]
Get a Bar object and return the LilyPond equivalent in a string. The showkey and showtime parameters can be set to determine whether the key and the time should be shown.
[ "Get", "a", "Bar", "object", "and", "return", "the", "LilyPond", "equivalent", "in", "a", "string", "." ]
train
https://github.com/bspaans/python-mingus/blob/aa5a5d992d45ada61be0f9f86261380731bd7749/mingus/extra/lilypond.py#L118-L160
bspaans/python-mingus
mingus/extra/lilypond.py
from_Track
def from_Track(track): """Process a Track object and return the LilyPond equivalent in a string.""" # Throw exception if not hasattr(track, 'bars'): return False lastkey = Key('C') lasttime = (4, 4) # Handle the Bars: result = '' for bar in track.bars: if lastkey != bar.key: showkey = True else: showkey = False if lasttime != bar.meter: showtime = True else: showtime = False result += from_Bar(bar, showkey, showtime) + ' ' lastkey = bar.key lasttime = bar.meter return '{ %s}' % result
python
def from_Track(track): if not hasattr(track, 'bars'): return False lastkey = Key('C') lasttime = (4, 4) result = '' for bar in track.bars: if lastkey != bar.key: showkey = True else: showkey = False if lasttime != bar.meter: showtime = True else: showtime = False result += from_Bar(bar, showkey, showtime) + ' ' lastkey = bar.key lasttime = bar.meter return '{ %s}' % result
[ "def", "from_Track", "(", "track", ")", ":", "# Throw exception", "if", "not", "hasattr", "(", "track", ",", "'bars'", ")", ":", "return", "False", "lastkey", "=", "Key", "(", "'C'", ")", "lasttime", "=", "(", "4", ",", "4", ")", "# Handle the Bars:", "result", "=", "''", "for", "bar", "in", "track", ".", "bars", ":", "if", "lastkey", "!=", "bar", ".", "key", ":", "showkey", "=", "True", "else", ":", "showkey", "=", "False", "if", "lasttime", "!=", "bar", ".", "meter", ":", "showtime", "=", "True", "else", ":", "showtime", "=", "False", "result", "+=", "from_Bar", "(", "bar", ",", "showkey", ",", "showtime", ")", "+", "' '", "lastkey", "=", "bar", ".", "key", "lasttime", "=", "bar", ".", "meter", "return", "'{ %s}'", "%", "result" ]
Process a Track object and return the LilyPond equivalent in a string.
[ "Process", "a", "Track", "object", "and", "return", "the", "LilyPond", "equivalent", "in", "a", "string", "." ]
train
https://github.com/bspaans/python-mingus/blob/aa5a5d992d45ada61be0f9f86261380731bd7749/mingus/extra/lilypond.py#L162-L184
bspaans/python-mingus
mingus/extra/lilypond.py
from_Composition
def from_Composition(composition): """Return the LilyPond equivalent of a Composition in a string.""" # warning Throw exception if not hasattr(composition, 'tracks'): return False result = '\\header { title = "%s" composer = "%s" opus = "%s" } '\ % (composition.title, composition.author, composition.subtitle) for track in composition.tracks: result += from_Track(track) + ' ' return result[:-1]
python
def from_Composition(composition): if not hasattr(composition, 'tracks'): return False result = '\\header { title = "%s" composer = "%s" opus = "%s" } '\ % (composition.title, composition.author, composition.subtitle) for track in composition.tracks: result += from_Track(track) + ' ' return result[:-1]
[ "def", "from_Composition", "(", "composition", ")", ":", "# warning Throw exception", "if", "not", "hasattr", "(", "composition", ",", "'tracks'", ")", ":", "return", "False", "result", "=", "'\\\\header { title = \"%s\" composer = \"%s\" opus = \"%s\" } '", "%", "(", "composition", ".", "title", ",", "composition", ".", "author", ",", "composition", ".", "subtitle", ")", "for", "track", "in", "composition", ".", "tracks", ":", "result", "+=", "from_Track", "(", "track", ")", "+", "' '", "return", "result", "[", ":", "-", "1", "]" ]
Return the LilyPond equivalent of a Composition in a string.
[ "Return", "the", "LilyPond", "equivalent", "of", "a", "Composition", "in", "a", "string", "." ]
train
https://github.com/bspaans/python-mingus/blob/aa5a5d992d45ada61be0f9f86261380731bd7749/mingus/extra/lilypond.py#L186-L195
bspaans/python-mingus
mingus/extra/lilypond.py
save_string_and_execute_LilyPond
def save_string_and_execute_LilyPond(ly_string, filename, command): """A helper function for to_png and to_pdf. Should not be used directly.""" ly_string = '\\version "2.10.33"\n' + ly_string if filename[-4:] in ['.pdf', '.png']: filename = filename[:-4] try: f = open(filename + '.ly', 'w') f.write(ly_string) f.close() except: return False command = 'lilypond %s -o "%s" "%s.ly"' % (command, filename, filename) print 'Executing: %s' % command p = subprocess.Popen(command, shell=True).wait() os.remove(filename + '.ly') return True
python
def save_string_and_execute_LilyPond(ly_string, filename, command): ly_string = '\\version "2.10.33"\n' + ly_string if filename[-4:] in ['.pdf', '.png']: filename = filename[:-4] try: f = open(filename + '.ly', 'w') f.write(ly_string) f.close() except: return False command = 'lilypond %s -o "%s" "%s.ly"' % (command, filename, filename) print 'Executing: %s' % command p = subprocess.Popen(command, shell=True).wait() os.remove(filename + '.ly') return True
[ "def", "save_string_and_execute_LilyPond", "(", "ly_string", ",", "filename", ",", "command", ")", ":", "ly_string", "=", "'\\\\version \"2.10.33\"\\n'", "+", "ly_string", "if", "filename", "[", "-", "4", ":", "]", "in", "[", "'.pdf'", ",", "'.png'", "]", ":", "filename", "=", "filename", "[", ":", "-", "4", "]", "try", ":", "f", "=", "open", "(", "filename", "+", "'.ly'", ",", "'w'", ")", "f", ".", "write", "(", "ly_string", ")", "f", ".", "close", "(", ")", "except", ":", "return", "False", "command", "=", "'lilypond %s -o \"%s\" \"%s.ly\"'", "%", "(", "command", ",", "filename", ",", "filename", ")", "print", "'Executing: %s'", "%", "command", "p", "=", "subprocess", ".", "Popen", "(", "command", ",", "shell", "=", "True", ")", ".", "wait", "(", ")", "os", ".", "remove", "(", "filename", "+", "'.ly'", ")", "return", "True" ]
A helper function for to_png and to_pdf. Should not be used directly.
[ "A", "helper", "function", "for", "to_png", "and", "to_pdf", ".", "Should", "not", "be", "used", "directly", "." ]
train
https://github.com/bspaans/python-mingus/blob/aa5a5d992d45ada61be0f9f86261380731bd7749/mingus/extra/lilypond.py#L214-L229
bspaans/python-mingus
mingus/core/value.py
determine
def determine(value): """Analyse the value and return a tuple containing the parts it's made of. The tuple respectively consists of the base note value, the number of dots, and the ratio (see tuplet). Examples: >>> determine(8) (8, 0, 1, 1) >>> determine(12) (8, 0, 3, 2) >>> determine(14) (8, 0, 7, 4) This function recognizes all the base values, triplets, quintuplets, septuplets and up to four dots. The values are matched on range. """ i = -2 for v in base_values: if value == v: return (value, 0, 1, 1) if value < v: break i += 1 scaled = float(value) / 2 ** i if scaled >= 0.9375: # base value return (base_values[i], 0, 1, 1) elif scaled >= 0.8125: # septuplet: scaled = 0.875 return (base_values[i + 1], 0, 7, 4) elif scaled >= 17 / 24.0: # triplet: scaled = 0.75 return (base_values[i + 1], 0, 3, 2) elif scaled >= 31 / 48.0: # dotted note (one dot): scaled = 2/3.0 return (v, 1, 1, 1) elif scaled >= 67 / 112.0: # quintuplet: scaled = 0.625 return (base_values[i + 1], 0, 5, 4) d = 3 for x in range(2, 5): d += 2 ** x if scaled == 2.0 ** x / d: return (v, x, 1, 1) return (base_values[i + 1], 0, 1, 1)
python
def determine(value): i = -2 for v in base_values: if value == v: return (value, 0, 1, 1) if value < v: break i += 1 scaled = float(value) / 2 ** i if scaled >= 0.9375: return (base_values[i], 0, 1, 1) elif scaled >= 0.8125: return (base_values[i + 1], 0, 7, 4) elif scaled >= 17 / 24.0: return (base_values[i + 1], 0, 3, 2) elif scaled >= 31 / 48.0: return (v, 1, 1, 1) elif scaled >= 67 / 112.0: return (base_values[i + 1], 0, 5, 4) d = 3 for x in range(2, 5): d += 2 ** x if scaled == 2.0 ** x / d: return (v, x, 1, 1) return (base_values[i + 1], 0, 1, 1)
[ "def", "determine", "(", "value", ")", ":", "i", "=", "-", "2", "for", "v", "in", "base_values", ":", "if", "value", "==", "v", ":", "return", "(", "value", ",", "0", ",", "1", ",", "1", ")", "if", "value", "<", "v", ":", "break", "i", "+=", "1", "scaled", "=", "float", "(", "value", ")", "/", "2", "**", "i", "if", "scaled", ">=", "0.9375", ":", "# base value", "return", "(", "base_values", "[", "i", "]", ",", "0", ",", "1", ",", "1", ")", "elif", "scaled", ">=", "0.8125", ":", "# septuplet: scaled = 0.875", "return", "(", "base_values", "[", "i", "+", "1", "]", ",", "0", ",", "7", ",", "4", ")", "elif", "scaled", ">=", "17", "/", "24.0", ":", "# triplet: scaled = 0.75", "return", "(", "base_values", "[", "i", "+", "1", "]", ",", "0", ",", "3", ",", "2", ")", "elif", "scaled", ">=", "31", "/", "48.0", ":", "# dotted note (one dot): scaled = 2/3.0", "return", "(", "v", ",", "1", ",", "1", ",", "1", ")", "elif", "scaled", ">=", "67", "/", "112.0", ":", "# quintuplet: scaled = 0.625", "return", "(", "base_values", "[", "i", "+", "1", "]", ",", "0", ",", "5", ",", "4", ")", "d", "=", "3", "for", "x", "in", "range", "(", "2", ",", "5", ")", ":", "d", "+=", "2", "**", "x", "if", "scaled", "==", "2.0", "**", "x", "/", "d", ":", "return", "(", "v", ",", "x", ",", "1", ",", "1", ")", "return", "(", "base_values", "[", "i", "+", "1", "]", ",", "0", ",", "1", ",", "1", ")" ]
Analyse the value and return a tuple containing the parts it's made of. The tuple respectively consists of the base note value, the number of dots, and the ratio (see tuplet). Examples: >>> determine(8) (8, 0, 1, 1) >>> determine(12) (8, 0, 3, 2) >>> determine(14) (8, 0, 7, 4) This function recognizes all the base values, triplets, quintuplets, septuplets and up to four dots. The values are matched on range.
[ "Analyse", "the", "value", "and", "return", "a", "tuple", "containing", "the", "parts", "it", "s", "made", "of", "." ]
train
https://github.com/bspaans/python-mingus/blob/aa5a5d992d45ada61be0f9f86261380731bd7749/mingus/core/value.py#L230-L274
bspaans/python-mingus
mingus/midi/fluidsynth.py
init
def init(sf2, driver=None, file=None): """Initialize the audio. Return True on success, False on failure. This function needs to be called before you can have any audio. The sf2 argument should be the location of a valid soundfont file. The optional driver argument can be any of 'alsa', 'oss', 'jack', 'portaudio', 'sndmgr', 'coreaudio' or 'Direct Sound'. If the file argument is not None, then instead of loading the driver, a new wave file will be initialized to store the audio data. """ global midi, initialized if not initialized: if file is not None: midi.start_recording(file) else: midi.start_audio_output(driver) if not midi.load_sound_font(sf2): return False midi.fs.program_reset() initialized = True return True
python
def init(sf2, driver=None, file=None): global midi, initialized if not initialized: if file is not None: midi.start_recording(file) else: midi.start_audio_output(driver) if not midi.load_sound_font(sf2): return False midi.fs.program_reset() initialized = True return True
[ "def", "init", "(", "sf2", ",", "driver", "=", "None", ",", "file", "=", "None", ")", ":", "global", "midi", ",", "initialized", "if", "not", "initialized", ":", "if", "file", "is", "not", "None", ":", "midi", ".", "start_recording", "(", "file", ")", "else", ":", "midi", ".", "start_audio_output", "(", "driver", ")", "if", "not", "midi", ".", "load_sound_font", "(", "sf2", ")", ":", "return", "False", "midi", ".", "fs", ".", "program_reset", "(", ")", "initialized", "=", "True", "return", "True" ]
Initialize the audio. Return True on success, False on failure. This function needs to be called before you can have any audio. The sf2 argument should be the location of a valid soundfont file. The optional driver argument can be any of 'alsa', 'oss', 'jack', 'portaudio', 'sndmgr', 'coreaudio' or 'Direct Sound'. If the file argument is not None, then instead of loading the driver, a new wave file will be initialized to store the audio data.
[ "Initialize", "the", "audio", "." ]
train
https://github.com/bspaans/python-mingus/blob/aa5a5d992d45ada61be0f9f86261380731bd7749/mingus/midi/fluidsynth.py#L105-L130
bspaans/python-mingus
mingus/midi/fluidsynth.py
FluidSynthSequencer.start_recording
def start_recording(self, file='mingus_dump.wav'): """Initialize a new wave file for recording.""" w = wave.open(file, 'wb') w.setnchannels(2) w.setsampwidth(2) w.setframerate(44100) self.wav = w
python
def start_recording(self, file='mingus_dump.wav'): w = wave.open(file, 'wb') w.setnchannels(2) w.setsampwidth(2) w.setframerate(44100) self.wav = w
[ "def", "start_recording", "(", "self", ",", "file", "=", "'mingus_dump.wav'", ")", ":", "w", "=", "wave", ".", "open", "(", "file", ",", "'wb'", ")", "w", ".", "setnchannels", "(", "2", ")", "w", ".", "setsampwidth", "(", "2", ")", "w", ".", "setframerate", "(", "44100", ")", "self", ".", "wav", "=", "w" ]
Initialize a new wave file for recording.
[ "Initialize", "a", "new", "wave", "file", "for", "recording", "." ]
train
https://github.com/bspaans/python-mingus/blob/aa5a5d992d45ada61be0f9f86261380731bd7749/mingus/midi/fluidsynth.py#L61-L67
bspaans/python-mingus
mingus/midi/fluidsynth.py
FluidSynthSequencer.load_sound_font
def load_sound_font(self, sf2): """Load a sound font. Return True on success, False on failure. This function should be called before your audio can be played, since the instruments are kept in the sf2 file. """ self.sfid = self.fs.sfload(sf2) return not self.sfid == -1
python
def load_sound_font(self, sf2): self.sfid = self.fs.sfload(sf2) return not self.sfid == -1
[ "def", "load_sound_font", "(", "self", ",", "sf2", ")", ":", "self", ".", "sfid", "=", "self", ".", "fs", ".", "sfload", "(", "sf2", ")", "return", "not", "self", ".", "sfid", "==", "-", "1" ]
Load a sound font. Return True on success, False on failure. This function should be called before your audio can be played, since the instruments are kept in the sf2 file.
[ "Load", "a", "sound", "font", "." ]
train
https://github.com/bspaans/python-mingus/blob/aa5a5d992d45ada61be0f9f86261380731bd7749/mingus/midi/fluidsynth.py#L69-L78
awslabs/aws-greengrass-group-setup
gg_group_setup/cmd.py
GroupCommands.create
def create(self, group_type, config_file, group_name=None, region=None, profile_name=None): """ Create a Greengrass group in the given region. :param group_type: the type of group to create. Must match a `key` in the `group_types` dict :param config_file: config file of the group to create :param group_name: the name of the group. If no name is given, then group_type will be used. :param region: the region in which to create the new group. [default: us-west-2] :param profile_name: the name of the `awscli` profile to use. [default: None] """ logging.info("[begin] create command using group_types:{0}".format( self.group_types)) config = GroupConfigFile(config_file=config_file) if config.is_fresh() is False: raise ValueError( "Config file already tracking previously created group" ) if group_type not in self.group_types.keys(): raise ValueError("Can only create {0} groups.".format( self.group_types) ) if region is None: region = self._region # create an instance of the requested group type that uses the given # config file and region gt = self.group_types[group_type](config=config, region=region) # get and store the account's IoT endpoint for future use ep = _get_iot_session(region=region).describe_endpoint() misc = config['misc'] misc['iot_endpoint'] = ep['endpointAddress'] config['misc'] = misc # Create a Group logging.info("[begin] Creating a Greengrass Group") if group_name is None: group_name = group_type gg_client = _get_gg_session(region=region, profile_name=profile_name) group_info = gg_client.create_group(Name="{0}".format(group_name)) config['group'] = {"id": group_info['Id']} # setup the policies and roles gt.create_and_attach_thing_policy() gt.create_and_attach_iam_role() cl_arn = self._create_core_definition( gg_client=gg_client, group_type=gt, config=config, group_name=group_name ) dl_arn = self._create_device_definition( gg_client=gg_client, group_type=gt, config=config, group_name=group_name ) lv_arn = self._create_function_definition( gg_client=gg_client, group_type=gt, config=config ) log_arn = self._create_logger_definition( gg_client=gg_client, group_type=gt, config=config ) sub_arn = self._create_subscription_definition( gg_client=gg_client, group_type=gt, config=config ) logging.info( 'Group details, core_def:{0} device_def:{1} func_def:{2} ' 'logger_def:{3} subs_def:{4}'.format( cl_arn, dl_arn, lv_arn, log_arn, sub_arn) ) # Add all the constituent parts to the Greengrass Group group_args = {'GroupId': group_info['Id']} if cl_arn: group_args['CoreDefinitionVersionArn'] = cl_arn if dl_arn: group_args['DeviceDefinitionVersionArn'] = dl_arn if lv_arn: group_args['FunctionDefinitionVersionArn'] = lv_arn if log_arn: group_args['LoggerDefinitionVersionArn'] = log_arn if sub_arn: group_args['SubscriptionDefinitionVersionArn'] = sub_arn grp = gg_client.create_group_version( **group_args ) # store info about the provisioned artifacts into the local config file config['group'] = { "id": group_info['Id'], "version_arn": grp['Arn'], "version": grp['Version'], "name": group_name } logging.info( "[end] Created Greengrass Group {0}".format(group_info['Id']))
python
def create(self, group_type, config_file, group_name=None, region=None, profile_name=None): logging.info("[begin] create command using group_types:{0}".format( self.group_types)) config = GroupConfigFile(config_file=config_file) if config.is_fresh() is False: raise ValueError( "Config file already tracking previously created group" ) if group_type not in self.group_types.keys(): raise ValueError("Can only create {0} groups.".format( self.group_types) ) if region is None: region = self._region gt = self.group_types[group_type](config=config, region=region) ep = _get_iot_session(region=region).describe_endpoint() misc = config['misc'] misc['iot_endpoint'] = ep['endpointAddress'] config['misc'] = misc logging.info("[begin] Creating a Greengrass Group") if group_name is None: group_name = group_type gg_client = _get_gg_session(region=region, profile_name=profile_name) group_info = gg_client.create_group(Name="{0}".format(group_name)) config['group'] = {"id": group_info['Id']} gt.create_and_attach_thing_policy() gt.create_and_attach_iam_role() cl_arn = self._create_core_definition( gg_client=gg_client, group_type=gt, config=config, group_name=group_name ) dl_arn = self._create_device_definition( gg_client=gg_client, group_type=gt, config=config, group_name=group_name ) lv_arn = self._create_function_definition( gg_client=gg_client, group_type=gt, config=config ) log_arn = self._create_logger_definition( gg_client=gg_client, group_type=gt, config=config ) sub_arn = self._create_subscription_definition( gg_client=gg_client, group_type=gt, config=config ) logging.info( 'Group details, core_def:{0} device_def:{1} func_def:{2} ' 'logger_def:{3} subs_def:{4}'.format( cl_arn, dl_arn, lv_arn, log_arn, sub_arn) ) group_args = {'GroupId': group_info['Id']} if cl_arn: group_args['CoreDefinitionVersionArn'] = cl_arn if dl_arn: group_args['DeviceDefinitionVersionArn'] = dl_arn if lv_arn: group_args['FunctionDefinitionVersionArn'] = lv_arn if log_arn: group_args['LoggerDefinitionVersionArn'] = log_arn if sub_arn: group_args['SubscriptionDefinitionVersionArn'] = sub_arn grp = gg_client.create_group_version( **group_args ) config['group'] = { "id": group_info['Id'], "version_arn": grp['Arn'], "version": grp['Version'], "name": group_name } logging.info( "[end] Created Greengrass Group {0}".format(group_info['Id']))
[ "def", "create", "(", "self", ",", "group_type", ",", "config_file", ",", "group_name", "=", "None", ",", "region", "=", "None", ",", "profile_name", "=", "None", ")", ":", "logging", ".", "info", "(", "\"[begin] create command using group_types:{0}\"", ".", "format", "(", "self", ".", "group_types", ")", ")", "config", "=", "GroupConfigFile", "(", "config_file", "=", "config_file", ")", "if", "config", ".", "is_fresh", "(", ")", "is", "False", ":", "raise", "ValueError", "(", "\"Config file already tracking previously created group\"", ")", "if", "group_type", "not", "in", "self", ".", "group_types", ".", "keys", "(", ")", ":", "raise", "ValueError", "(", "\"Can only create {0} groups.\"", ".", "format", "(", "self", ".", "group_types", ")", ")", "if", "region", "is", "None", ":", "region", "=", "self", ".", "_region", "# create an instance of the requested group type that uses the given", "# config file and region", "gt", "=", "self", ".", "group_types", "[", "group_type", "]", "(", "config", "=", "config", ",", "region", "=", "region", ")", "# get and store the account's IoT endpoint for future use", "ep", "=", "_get_iot_session", "(", "region", "=", "region", ")", ".", "describe_endpoint", "(", ")", "misc", "=", "config", "[", "'misc'", "]", "misc", "[", "'iot_endpoint'", "]", "=", "ep", "[", "'endpointAddress'", "]", "config", "[", "'misc'", "]", "=", "misc", "# Create a Group", "logging", ".", "info", "(", "\"[begin] Creating a Greengrass Group\"", ")", "if", "group_name", "is", "None", ":", "group_name", "=", "group_type", "gg_client", "=", "_get_gg_session", "(", "region", "=", "region", ",", "profile_name", "=", "profile_name", ")", "group_info", "=", "gg_client", ".", "create_group", "(", "Name", "=", "\"{0}\"", ".", "format", "(", "group_name", ")", ")", "config", "[", "'group'", "]", "=", "{", "\"id\"", ":", "group_info", "[", "'Id'", "]", "}", "# setup the policies and roles", "gt", ".", "create_and_attach_thing_policy", "(", ")", "gt", ".", "create_and_attach_iam_role", "(", ")", "cl_arn", "=", "self", ".", "_create_core_definition", "(", "gg_client", "=", "gg_client", ",", "group_type", "=", "gt", ",", "config", "=", "config", ",", "group_name", "=", "group_name", ")", "dl_arn", "=", "self", ".", "_create_device_definition", "(", "gg_client", "=", "gg_client", ",", "group_type", "=", "gt", ",", "config", "=", "config", ",", "group_name", "=", "group_name", ")", "lv_arn", "=", "self", ".", "_create_function_definition", "(", "gg_client", "=", "gg_client", ",", "group_type", "=", "gt", ",", "config", "=", "config", ")", "log_arn", "=", "self", ".", "_create_logger_definition", "(", "gg_client", "=", "gg_client", ",", "group_type", "=", "gt", ",", "config", "=", "config", ")", "sub_arn", "=", "self", ".", "_create_subscription_definition", "(", "gg_client", "=", "gg_client", ",", "group_type", "=", "gt", ",", "config", "=", "config", ")", "logging", ".", "info", "(", "'Group details, core_def:{0} device_def:{1} func_def:{2} '", "'logger_def:{3} subs_def:{4}'", ".", "format", "(", "cl_arn", ",", "dl_arn", ",", "lv_arn", ",", "log_arn", ",", "sub_arn", ")", ")", "# Add all the constituent parts to the Greengrass Group", "group_args", "=", "{", "'GroupId'", ":", "group_info", "[", "'Id'", "]", "}", "if", "cl_arn", ":", "group_args", "[", "'CoreDefinitionVersionArn'", "]", "=", "cl_arn", "if", "dl_arn", ":", "group_args", "[", "'DeviceDefinitionVersionArn'", "]", "=", "dl_arn", "if", "lv_arn", ":", "group_args", "[", "'FunctionDefinitionVersionArn'", "]", "=", "lv_arn", "if", "log_arn", ":", "group_args", "[", "'LoggerDefinitionVersionArn'", "]", "=", "log_arn", "if", "sub_arn", ":", "group_args", "[", "'SubscriptionDefinitionVersionArn'", "]", "=", "sub_arn", "grp", "=", "gg_client", ".", "create_group_version", "(", "*", "*", "group_args", ")", "# store info about the provisioned artifacts into the local config file", "config", "[", "'group'", "]", "=", "{", "\"id\"", ":", "group_info", "[", "'Id'", "]", ",", "\"version_arn\"", ":", "grp", "[", "'Arn'", "]", ",", "\"version\"", ":", "grp", "[", "'Version'", "]", ",", "\"name\"", ":", "group_name", "}", "logging", ".", "info", "(", "\"[end] Created Greengrass Group {0}\"", ".", "format", "(", "group_info", "[", "'Id'", "]", ")", ")" ]
Create a Greengrass group in the given region. :param group_type: the type of group to create. Must match a `key` in the `group_types` dict :param config_file: config file of the group to create :param group_name: the name of the group. If no name is given, then group_type will be used. :param region: the region in which to create the new group. [default: us-west-2] :param profile_name: the name of the `awscli` profile to use. [default: None]
[ "Create", "a", "Greengrass", "group", "in", "the", "given", "region", "." ]
train
https://github.com/awslabs/aws-greengrass-group-setup/blob/06189ceccb794fedf80e0e7649938c18792e16c9/gg_group_setup/cmd.py#L73-L180
awslabs/aws-greengrass-group-setup
gg_group_setup/cmd.py
GroupCommands._create_subscription_definition
def _create_subscription_definition(gg_client, group_type, config): """ Configure routing subscriptions for a Greengrass group. group_type: either default or an overridden group type config: GroupConfigFile object used for routing subscriptions """ logging.info('[begin] Configuring routing subscriptions') sub_info = gg_client.create_subscription_definition( Name="{0}_routing".format(group_type.type_name) ) logging.info('Created subscription definition: {0}'.format(sub_info)) subs = group_type.get_subscription_definition(config=config) if subs is None: logging.warning( "[end] No SubscriptionDefinition exists in GroupType:{0}".format( group_type.type_name) ) return subv = gg_client.create_subscription_definition_version( SubscriptionDefinitionId=sub_info['Id'], Subscriptions=subs ) sub_arn = subv['Arn'] config['subscription_def'] = { "id": sub_info['Id'], "version_arn": sub_arn } logging.info('[end] Configured routing subscriptions') return sub_arn
python
def _create_subscription_definition(gg_client, group_type, config): logging.info('[begin] Configuring routing subscriptions') sub_info = gg_client.create_subscription_definition( Name="{0}_routing".format(group_type.type_name) ) logging.info('Created subscription definition: {0}'.format(sub_info)) subs = group_type.get_subscription_definition(config=config) if subs is None: logging.warning( "[end] No SubscriptionDefinition exists in GroupType:{0}".format( group_type.type_name) ) return subv = gg_client.create_subscription_definition_version( SubscriptionDefinitionId=sub_info['Id'], Subscriptions=subs ) sub_arn = subv['Arn'] config['subscription_def'] = { "id": sub_info['Id'], "version_arn": sub_arn } logging.info('[end] Configured routing subscriptions') return sub_arn
[ "def", "_create_subscription_definition", "(", "gg_client", ",", "group_type", ",", "config", ")", ":", "logging", ".", "info", "(", "'[begin] Configuring routing subscriptions'", ")", "sub_info", "=", "gg_client", ".", "create_subscription_definition", "(", "Name", "=", "\"{0}_routing\"", ".", "format", "(", "group_type", ".", "type_name", ")", ")", "logging", ".", "info", "(", "'Created subscription definition: {0}'", ".", "format", "(", "sub_info", ")", ")", "subs", "=", "group_type", ".", "get_subscription_definition", "(", "config", "=", "config", ")", "if", "subs", "is", "None", ":", "logging", ".", "warning", "(", "\"[end] No SubscriptionDefinition exists in GroupType:{0}\"", ".", "format", "(", "group_type", ".", "type_name", ")", ")", "return", "subv", "=", "gg_client", ".", "create_subscription_definition_version", "(", "SubscriptionDefinitionId", "=", "sub_info", "[", "'Id'", "]", ",", "Subscriptions", "=", "subs", ")", "sub_arn", "=", "subv", "[", "'Arn'", "]", "config", "[", "'subscription_def'", "]", "=", "{", "\"id\"", ":", "sub_info", "[", "'Id'", "]", ",", "\"version_arn\"", ":", "sub_arn", "}", "logging", ".", "info", "(", "'[end] Configured routing subscriptions'", ")", "return", "sub_arn" ]
Configure routing subscriptions for a Greengrass group. group_type: either default or an overridden group type config: GroupConfigFile object used for routing subscriptions
[ "Configure", "routing", "subscriptions", "for", "a", "Greengrass", "group", "." ]
train
https://github.com/awslabs/aws-greengrass-group-setup/blob/06189ceccb794fedf80e0e7649938c18792e16c9/gg_group_setup/cmd.py#L321-L352
awslabs/aws-greengrass-group-setup
gg_group_setup/cmd.py
GroupCommands.clean_core
def clean_core(self, config_file, region=None, profile_name=None): """ Clean all Core related provisioned artifacts from both the local file and the AWS Greengrass service. :param config_file: config file containing the core to clean :param region: the region in which the core should be cleaned. [default: us-west-2] :param profile_name: the name of the `awscli` profile to use. [default: None] :return: """ config = GroupConfigFile(config_file=config_file) if region is None: region = self._region # delete the Core's Certificate core_cert_id = config['core']['cert_id'] core_cert_arn = config['core']['cert_arn'] core_thing_name = config['core']['thing_name'] policy_name = config['misc']['policy_name'] logging.info('Deleting core_thing_name:{0}'.format(core_thing_name)) GroupCommands._delete_thing( cert_arn=core_cert_arn, cert_id=core_cert_id, thing_name=core_thing_name, region=region, policy_name=policy_name, profile_name=profile_name ) config.make_core_fresh()
python
def clean_core(self, config_file, region=None, profile_name=None): config = GroupConfigFile(config_file=config_file) if region is None: region = self._region core_cert_id = config['core']['cert_id'] core_cert_arn = config['core']['cert_arn'] core_thing_name = config['core']['thing_name'] policy_name = config['misc']['policy_name'] logging.info('Deleting core_thing_name:{0}'.format(core_thing_name)) GroupCommands._delete_thing( cert_arn=core_cert_arn, cert_id=core_cert_id, thing_name=core_thing_name, region=region, policy_name=policy_name, profile_name=profile_name ) config.make_core_fresh()
[ "def", "clean_core", "(", "self", ",", "config_file", ",", "region", "=", "None", ",", "profile_name", "=", "None", ")", ":", "config", "=", "GroupConfigFile", "(", "config_file", "=", "config_file", ")", "if", "region", "is", "None", ":", "region", "=", "self", ".", "_region", "# delete the Core's Certificate", "core_cert_id", "=", "config", "[", "'core'", "]", "[", "'cert_id'", "]", "core_cert_arn", "=", "config", "[", "'core'", "]", "[", "'cert_arn'", "]", "core_thing_name", "=", "config", "[", "'core'", "]", "[", "'thing_name'", "]", "policy_name", "=", "config", "[", "'misc'", "]", "[", "'policy_name'", "]", "logging", ".", "info", "(", "'Deleting core_thing_name:{0}'", ".", "format", "(", "core_thing_name", ")", ")", "GroupCommands", ".", "_delete_thing", "(", "cert_arn", "=", "core_cert_arn", ",", "cert_id", "=", "core_cert_id", ",", "thing_name", "=", "core_thing_name", ",", "region", "=", "region", ",", "policy_name", "=", "policy_name", ",", "profile_name", "=", "profile_name", ")", "config", ".", "make_core_fresh", "(", ")" ]
Clean all Core related provisioned artifacts from both the local file and the AWS Greengrass service. :param config_file: config file containing the core to clean :param region: the region in which the core should be cleaned. [default: us-west-2] :param profile_name: the name of the `awscli` profile to use. [default: None] :return:
[ "Clean", "all", "Core", "related", "provisioned", "artifacts", "from", "both", "the", "local", "file", "and", "the", "AWS", "Greengrass", "service", "." ]
train
https://github.com/awslabs/aws-greengrass-group-setup/blob/06189ceccb794fedf80e0e7649938c18792e16c9/gg_group_setup/cmd.py#L490-L518
awslabs/aws-greengrass-group-setup
gg_group_setup/cmd.py
GroupCommands.clean_devices
def clean_devices(self, config_file, region=None, profile_name=None): """ Clean all device related provisioned artifacts from both the local file and the AWS Greengrass service. :param config_file: config file containing the devices to clean :param region: the region in which the devices should be cleaned. [default: us-west-2] :param profile_name: the name of the `awscli` profile to use. [default: None] """ config = GroupConfigFile(config_file=config_file) if region is None: region = self._region devices = config['devices'] if 'device_thing_name' in devices: logging.info('Configured devices already clean') return policy_name = config['misc']['policy_name'] for device in devices: cert_arn = devices[device]['cert_arn'] cert_id = devices[device]['cert_id'] thing_name = device logging.info('Deleting device_thing_name:{0}'.format(thing_name)) GroupCommands._delete_thing( cert_arn, cert_id, thing_name, region, policy_name, profile_name ) config.make_devices_fresh()
python
def clean_devices(self, config_file, region=None, profile_name=None): config = GroupConfigFile(config_file=config_file) if region is None: region = self._region devices = config['devices'] if 'device_thing_name' in devices: logging.info('Configured devices already clean') return policy_name = config['misc']['policy_name'] for device in devices: cert_arn = devices[device]['cert_arn'] cert_id = devices[device]['cert_id'] thing_name = device logging.info('Deleting device_thing_name:{0}'.format(thing_name)) GroupCommands._delete_thing( cert_arn, cert_id, thing_name, region, policy_name, profile_name ) config.make_devices_fresh()
[ "def", "clean_devices", "(", "self", ",", "config_file", ",", "region", "=", "None", ",", "profile_name", "=", "None", ")", ":", "config", "=", "GroupConfigFile", "(", "config_file", "=", "config_file", ")", "if", "region", "is", "None", ":", "region", "=", "self", ".", "_region", "devices", "=", "config", "[", "'devices'", "]", "if", "'device_thing_name'", "in", "devices", ":", "logging", ".", "info", "(", "'Configured devices already clean'", ")", "return", "policy_name", "=", "config", "[", "'misc'", "]", "[", "'policy_name'", "]", "for", "device", "in", "devices", ":", "cert_arn", "=", "devices", "[", "device", "]", "[", "'cert_arn'", "]", "cert_id", "=", "devices", "[", "device", "]", "[", "'cert_id'", "]", "thing_name", "=", "device", "logging", ".", "info", "(", "'Deleting device_thing_name:{0}'", ".", "format", "(", "thing_name", ")", ")", "GroupCommands", ".", "_delete_thing", "(", "cert_arn", ",", "cert_id", ",", "thing_name", ",", "region", ",", "policy_name", ",", "profile_name", ")", "config", ".", "make_devices_fresh", "(", ")" ]
Clean all device related provisioned artifacts from both the local file and the AWS Greengrass service. :param config_file: config file containing the devices to clean :param region: the region in which the devices should be cleaned. [default: us-west-2] :param profile_name: the name of the `awscli` profile to use. [default: None]
[ "Clean", "all", "device", "related", "provisioned", "artifacts", "from", "both", "the", "local", "file", "and", "the", "AWS", "Greengrass", "service", "." ]
train
https://github.com/awslabs/aws-greengrass-group-setup/blob/06189ceccb794fedf80e0e7649938c18792e16c9/gg_group_setup/cmd.py#L520-L549
awslabs/aws-greengrass-group-setup
gg_group_setup/cmd.py
GroupCommands.clean_file
def clean_file(config_file): """ Clean all provisioned artifacts from the local config file. :param config_file: config file of the group to clean """ logging.info('[begin] Cleaning config file') config = GroupConfigFile(config_file=config_file) if config.is_fresh() is True: raise ValueError("Config is already clean.") config.make_fresh() logging.info('[end] Cleaned config file:{0}'.format(config_file))
python
def clean_file(config_file): logging.info('[begin] Cleaning config file') config = GroupConfigFile(config_file=config_file) if config.is_fresh() is True: raise ValueError("Config is already clean.") config.make_fresh() logging.info('[end] Cleaned config file:{0}'.format(config_file))
[ "def", "clean_file", "(", "config_file", ")", ":", "logging", ".", "info", "(", "'[begin] Cleaning config file'", ")", "config", "=", "GroupConfigFile", "(", "config_file", "=", "config_file", ")", "if", "config", ".", "is_fresh", "(", ")", "is", "True", ":", "raise", "ValueError", "(", "\"Config is already clean.\"", ")", "config", ".", "make_fresh", "(", ")", "logging", ".", "info", "(", "'[end] Cleaned config file:{0}'", ".", "format", "(", "config_file", ")", ")" ]
Clean all provisioned artifacts from the local config file. :param config_file: config file of the group to clean
[ "Clean", "all", "provisioned", "artifacts", "from", "the", "local", "config", "file", "." ]
train
https://github.com/awslabs/aws-greengrass-group-setup/blob/06189ceccb794fedf80e0e7649938c18792e16c9/gg_group_setup/cmd.py#L552-L564
awslabs/aws-greengrass-group-setup
gg_group_setup/cmd.py
GroupCommands.clean_all
def clean_all(self, config_file, region=None, profile_name=None): """ Clean all provisioned artifacts from both the local file and the AWS Greengrass service. :param config_file: config file containing the group to clean :param region: the region in which the group should be cleaned. [default: us-west-2] :param profile_name: the name of the `awscli` profile to use. [default: None] """ logging.info('[begin] Cleaning all provisioned artifacts') config = GroupConfigFile(config_file=config_file) if config.is_fresh() is True: raise ValueError("Config is already clean.") if region is None: region = self._region self._delete_group( config_file, region=region, profile_name=profile_name) self.clean_core(config_file, region=region) self.clean_devices(config_file, region=region) self.clean_file(config_file) logging.info('[end] Cleaned all provisioned artifacts')
python
def clean_all(self, config_file, region=None, profile_name=None): logging.info('[begin] Cleaning all provisioned artifacts') config = GroupConfigFile(config_file=config_file) if config.is_fresh() is True: raise ValueError("Config is already clean.") if region is None: region = self._region self._delete_group( config_file, region=region, profile_name=profile_name) self.clean_core(config_file, region=region) self.clean_devices(config_file, region=region) self.clean_file(config_file) logging.info('[end] Cleaned all provisioned artifacts')
[ "def", "clean_all", "(", "self", ",", "config_file", ",", "region", "=", "None", ",", "profile_name", "=", "None", ")", ":", "logging", ".", "info", "(", "'[begin] Cleaning all provisioned artifacts'", ")", "config", "=", "GroupConfigFile", "(", "config_file", "=", "config_file", ")", "if", "config", ".", "is_fresh", "(", ")", "is", "True", ":", "raise", "ValueError", "(", "\"Config is already clean.\"", ")", "if", "region", "is", "None", ":", "region", "=", "self", ".", "_region", "self", ".", "_delete_group", "(", "config_file", ",", "region", "=", "region", ",", "profile_name", "=", "profile_name", ")", "self", ".", "clean_core", "(", "config_file", ",", "region", "=", "region", ")", "self", ".", "clean_devices", "(", "config_file", ",", "region", "=", "region", ")", "self", ".", "clean_file", "(", "config_file", ")", "logging", ".", "info", "(", "'[end] Cleaned all provisioned artifacts'", ")" ]
Clean all provisioned artifacts from both the local file and the AWS Greengrass service. :param config_file: config file containing the group to clean :param region: the region in which the group should be cleaned. [default: us-west-2] :param profile_name: the name of the `awscli` profile to use. [default: None]
[ "Clean", "all", "provisioned", "artifacts", "from", "both", "the", "local", "file", "and", "the", "AWS", "Greengrass", "service", "." ]
train
https://github.com/awslabs/aws-greengrass-group-setup/blob/06189ceccb794fedf80e0e7649938c18792e16c9/gg_group_setup/cmd.py#L566-L591
awslabs/aws-greengrass-group-setup
gg_group_setup/cmd.py
GroupCommands.deploy
def deploy(self, config_file, region=None, profile_name=None): """ Deploy the configuration and Lambda functions of a Greengrass group to the Greengrass core contained in the group. :param config_file: config file of the group to deploy :param region: the region from which to deploy the group. :param profile_name: the name of the `awscli` profile to use. [default: None] """ config = GroupConfigFile(config_file=config_file) if config.is_fresh(): raise ValueError("Config not yet tracking a group. Cannot deploy.") if region is None: region = self._region gg_client = _get_gg_session(region=region, profile_name=profile_name) dep_req = gg_client.create_deployment( GroupId=config['group']['id'], GroupVersionId=config['group']['version'], DeploymentType="NewDeployment" ) print("Group deploy requested for deployment_id:{0}".format( dep_req['DeploymentId'], ))
python
def deploy(self, config_file, region=None, profile_name=None): config = GroupConfigFile(config_file=config_file) if config.is_fresh(): raise ValueError("Config not yet tracking a group. Cannot deploy.") if region is None: region = self._region gg_client = _get_gg_session(region=region, profile_name=profile_name) dep_req = gg_client.create_deployment( GroupId=config['group']['id'], GroupVersionId=config['group']['version'], DeploymentType="NewDeployment" ) print("Group deploy requested for deployment_id:{0}".format( dep_req['DeploymentId'], ))
[ "def", "deploy", "(", "self", ",", "config_file", ",", "region", "=", "None", ",", "profile_name", "=", "None", ")", ":", "config", "=", "GroupConfigFile", "(", "config_file", "=", "config_file", ")", "if", "config", ".", "is_fresh", "(", ")", ":", "raise", "ValueError", "(", "\"Config not yet tracking a group. Cannot deploy.\"", ")", "if", "region", "is", "None", ":", "region", "=", "self", ".", "_region", "gg_client", "=", "_get_gg_session", "(", "region", "=", "region", ",", "profile_name", "=", "profile_name", ")", "dep_req", "=", "gg_client", ".", "create_deployment", "(", "GroupId", "=", "config", "[", "'group'", "]", "[", "'id'", "]", ",", "GroupVersionId", "=", "config", "[", "'group'", "]", "[", "'version'", "]", ",", "DeploymentType", "=", "\"NewDeployment\"", ")", "print", "(", "\"Group deploy requested for deployment_id:{0}\"", ".", "format", "(", "dep_req", "[", "'DeploymentId'", "]", ",", ")", ")" ]
Deploy the configuration and Lambda functions of a Greengrass group to the Greengrass core contained in the group. :param config_file: config file of the group to deploy :param region: the region from which to deploy the group. :param profile_name: the name of the `awscli` profile to use. [default: None]
[ "Deploy", "the", "configuration", "and", "Lambda", "functions", "of", "a", "Greengrass", "group", "to", "the", "Greengrass", "core", "contained", "in", "the", "group", "." ]
train
https://github.com/awslabs/aws-greengrass-group-setup/blob/06189ceccb794fedf80e0e7649938c18792e16c9/gg_group_setup/cmd.py#L593-L619
awslabs/aws-greengrass-group-setup
gg_group_setup/cmd.py
GroupCommands.create_core
def create_core(self, thing_name, config_file, region=None, cert_dir=None, account_id=None, policy_name='ggc-default-policy', profile_name=None): """ Using the `thing_name` value, creates a Thing in AWS IoT, attaches and downloads new keys & certs to the certificate directory, then records the created information in the local config file for inclusion in the Greengrass Group as a Greengrass Core. :param thing_name: the name of the thing to create and use as a Greengrass Core :param config_file: config file used to track the Greengrass Core in the group :param region: the region in which to create the new core. [default: us-west-2] :param cert_dir: the directory in which to store the thing's keys and certs. If `None` then use the current directory. :param account_id: the account_id in which to create the new core. [default: None] :param policy_name: the name of the policy to associate with the device. [default: 'ggc-default-policy'] :param profile_name: the name of the `awscli` profile to use. [default: None] """ config = GroupConfigFile(config_file=config_file) if config.is_fresh() is False: raise ValueError( "Config file already tracking previously created core or group" ) if region is None: region = self._region if account_id is None: account_id = self._account_id keys_cert, thing = self.create_thing(thing_name, region, cert_dir) cert_arn = keys_cert['certificateArn'] config['core'] = { 'thing_arn': thing['thingArn'], 'cert_arn': cert_arn, 'cert_id': keys_cert['certificateId'], 'thing_name': thing_name } logging.debug("create_core cfg:{0}".format(config)) logging.info("Thing:'{0}' associated with cert:'{1}'".format( thing_name, cert_arn)) core_policy = self.get_core_policy( core_name=thing_name, account_id=account_id, region=region) iot_client = _get_iot_session(region=region, profile_name=profile_name) self._create_attach_thing_policy( cert_arn, core_policy, iot_client=iot_client, policy_name=policy_name ) misc = config['misc'] misc['policy_name'] = policy_name config['misc'] = misc
python
def create_core(self, thing_name, config_file, region=None, cert_dir=None, account_id=None, policy_name='ggc-default-policy', profile_name=None): config = GroupConfigFile(config_file=config_file) if config.is_fresh() is False: raise ValueError( "Config file already tracking previously created core or group" ) if region is None: region = self._region if account_id is None: account_id = self._account_id keys_cert, thing = self.create_thing(thing_name, region, cert_dir) cert_arn = keys_cert['certificateArn'] config['core'] = { 'thing_arn': thing['thingArn'], 'cert_arn': cert_arn, 'cert_id': keys_cert['certificateId'], 'thing_name': thing_name } logging.debug("create_core cfg:{0}".format(config)) logging.info("Thing:'{0}' associated with cert:'{1}'".format( thing_name, cert_arn)) core_policy = self.get_core_policy( core_name=thing_name, account_id=account_id, region=region) iot_client = _get_iot_session(region=region, profile_name=profile_name) self._create_attach_thing_policy( cert_arn, core_policy, iot_client=iot_client, policy_name=policy_name ) misc = config['misc'] misc['policy_name'] = policy_name config['misc'] = misc
[ "def", "create_core", "(", "self", ",", "thing_name", ",", "config_file", ",", "region", "=", "None", ",", "cert_dir", "=", "None", ",", "account_id", "=", "None", ",", "policy_name", "=", "'ggc-default-policy'", ",", "profile_name", "=", "None", ")", ":", "config", "=", "GroupConfigFile", "(", "config_file", "=", "config_file", ")", "if", "config", ".", "is_fresh", "(", ")", "is", "False", ":", "raise", "ValueError", "(", "\"Config file already tracking previously created core or group\"", ")", "if", "region", "is", "None", ":", "region", "=", "self", ".", "_region", "if", "account_id", "is", "None", ":", "account_id", "=", "self", ".", "_account_id", "keys_cert", ",", "thing", "=", "self", ".", "create_thing", "(", "thing_name", ",", "region", ",", "cert_dir", ")", "cert_arn", "=", "keys_cert", "[", "'certificateArn'", "]", "config", "[", "'core'", "]", "=", "{", "'thing_arn'", ":", "thing", "[", "'thingArn'", "]", ",", "'cert_arn'", ":", "cert_arn", ",", "'cert_id'", ":", "keys_cert", "[", "'certificateId'", "]", ",", "'thing_name'", ":", "thing_name", "}", "logging", ".", "debug", "(", "\"create_core cfg:{0}\"", ".", "format", "(", "config", ")", ")", "logging", ".", "info", "(", "\"Thing:'{0}' associated with cert:'{1}'\"", ".", "format", "(", "thing_name", ",", "cert_arn", ")", ")", "core_policy", "=", "self", ".", "get_core_policy", "(", "core_name", "=", "thing_name", ",", "account_id", "=", "account_id", ",", "region", "=", "region", ")", "iot_client", "=", "_get_iot_session", "(", "region", "=", "region", ",", "profile_name", "=", "profile_name", ")", "self", ".", "_create_attach_thing_policy", "(", "cert_arn", ",", "core_policy", ",", "iot_client", "=", "iot_client", ",", "policy_name", "=", "policy_name", ")", "misc", "=", "config", "[", "'misc'", "]", "misc", "[", "'policy_name'", "]", "=", "policy_name", "config", "[", "'misc'", "]", "=", "misc" ]
Using the `thing_name` value, creates a Thing in AWS IoT, attaches and downloads new keys & certs to the certificate directory, then records the created information in the local config file for inclusion in the Greengrass Group as a Greengrass Core. :param thing_name: the name of the thing to create and use as a Greengrass Core :param config_file: config file used to track the Greengrass Core in the group :param region: the region in which to create the new core. [default: us-west-2] :param cert_dir: the directory in which to store the thing's keys and certs. If `None` then use the current directory. :param account_id: the account_id in which to create the new core. [default: None] :param policy_name: the name of the policy to associate with the device. [default: 'ggc-default-policy'] :param profile_name: the name of the `awscli` profile to use. [default: None]
[ "Using", "the", "thing_name", "value", "creates", "a", "Thing", "in", "AWS", "IoT", "attaches", "and", "downloads", "new", "keys", "&", "certs", "to", "the", "certificate", "directory", "then", "records", "the", "created", "information", "in", "the", "local", "config", "file", "for", "inclusion", "in", "the", "Greengrass", "Group", "as", "a", "Greengrass", "Core", "." ]
train
https://github.com/awslabs/aws-greengrass-group-setup/blob/06189ceccb794fedf80e0e7649938c18792e16c9/gg_group_setup/cmd.py#L679-L733
awslabs/aws-greengrass-group-setup
gg_group_setup/cmd.py
GroupCommands.create_devices
def create_devices(self, thing_names, config_file, region=None, cert_dir=None, append=False, account_id=None, policy_name='ggd-discovery-policy', profile_name=None): """ Using the `thing_names` values, creates Things in AWS IoT, attaches and downloads new keys & certs to the certificate directory, then records the created information in the local config file for inclusion in the Greengrass Group as Greengrass Devices. :param thing_names: the thing name or list of thing names to create and use as Greengrass Devices :param config_file: config file used to track the Greengrass Devices in the group :param region: the region in which to create the new devices. [default: us-west-2] :param cert_dir: the directory in which to store the thing's keys and certs. If `None` then use the current directory. :param append: append the created devices to the list of devices in the config file. [default: False] :param account_id: the account ID in which to create devices. If 'None' the config_file will be checked for an `account_id` value in the `misc` section. :param policy_name: the name of the policy to associate with the device. [default: 'ggd-discovery-policy'] :param profile_name: the name of the `awscli` profile to use. [default: None] """ logging.info("create_devices thing_names:{0}".format(thing_names)) config = GroupConfigFile(config_file=config_file) if append is False and config.is_device_fresh() is False: raise ValueError( "Config file tracking previously created devices. Append " "devices instead" ) if region is None: region = self._region if account_id is None: account_id = self._account_id devices = dict() if append: devices = config['devices'] if type(thing_names) is str: thing_names = [thing_names] iot_client = _get_iot_session(region=region, profile_name=profile_name) for thing_name in thing_names: keys_cert, thing = self.create_thing(thing_name, region, cert_dir) cert_arn = keys_cert['certificateArn'] devices[thing_name] = { 'thing_arn': thing['thingArn'], 'cert_arn': cert_arn, 'cert_id': keys_cert['certificateId'], 'thing_name': thing_name } logging.info("Thing:'{0}' associated with cert:'{1}'".format( thing_name, cert_arn)) device_policy = self.get_device_policy( device_name=thing_name, account_id=account_id, region=region ) self._create_attach_thing_policy(cert_arn, device_policy, iot_client, policy_name) config['devices'] = devices logging.info("create_devices cfg:{0}".format(config))
python
def create_devices(self, thing_names, config_file, region=None, cert_dir=None, append=False, account_id=None, policy_name='ggd-discovery-policy', profile_name=None): logging.info("create_devices thing_names:{0}".format(thing_names)) config = GroupConfigFile(config_file=config_file) if append is False and config.is_device_fresh() is False: raise ValueError( "Config file tracking previously created devices. Append " "devices instead" ) if region is None: region = self._region if account_id is None: account_id = self._account_id devices = dict() if append: devices = config['devices'] if type(thing_names) is str: thing_names = [thing_names] iot_client = _get_iot_session(region=region, profile_name=profile_name) for thing_name in thing_names: keys_cert, thing = self.create_thing(thing_name, region, cert_dir) cert_arn = keys_cert['certificateArn'] devices[thing_name] = { 'thing_arn': thing['thingArn'], 'cert_arn': cert_arn, 'cert_id': keys_cert['certificateId'], 'thing_name': thing_name } logging.info("Thing:'{0}' associated with cert:'{1}'".format( thing_name, cert_arn)) device_policy = self.get_device_policy( device_name=thing_name, account_id=account_id, region=region ) self._create_attach_thing_policy(cert_arn, device_policy, iot_client, policy_name) config['devices'] = devices logging.info("create_devices cfg:{0}".format(config))
[ "def", "create_devices", "(", "self", ",", "thing_names", ",", "config_file", ",", "region", "=", "None", ",", "cert_dir", "=", "None", ",", "append", "=", "False", ",", "account_id", "=", "None", ",", "policy_name", "=", "'ggd-discovery-policy'", ",", "profile_name", "=", "None", ")", ":", "logging", ".", "info", "(", "\"create_devices thing_names:{0}\"", ".", "format", "(", "thing_names", ")", ")", "config", "=", "GroupConfigFile", "(", "config_file", "=", "config_file", ")", "if", "append", "is", "False", "and", "config", ".", "is_device_fresh", "(", ")", "is", "False", ":", "raise", "ValueError", "(", "\"Config file tracking previously created devices. Append \"", "\"devices instead\"", ")", "if", "region", "is", "None", ":", "region", "=", "self", ".", "_region", "if", "account_id", "is", "None", ":", "account_id", "=", "self", ".", "_account_id", "devices", "=", "dict", "(", ")", "if", "append", ":", "devices", "=", "config", "[", "'devices'", "]", "if", "type", "(", "thing_names", ")", "is", "str", ":", "thing_names", "=", "[", "thing_names", "]", "iot_client", "=", "_get_iot_session", "(", "region", "=", "region", ",", "profile_name", "=", "profile_name", ")", "for", "thing_name", "in", "thing_names", ":", "keys_cert", ",", "thing", "=", "self", ".", "create_thing", "(", "thing_name", ",", "region", ",", "cert_dir", ")", "cert_arn", "=", "keys_cert", "[", "'certificateArn'", "]", "devices", "[", "thing_name", "]", "=", "{", "'thing_arn'", ":", "thing", "[", "'thingArn'", "]", ",", "'cert_arn'", ":", "cert_arn", ",", "'cert_id'", ":", "keys_cert", "[", "'certificateId'", "]", ",", "'thing_name'", ":", "thing_name", "}", "logging", ".", "info", "(", "\"Thing:'{0}' associated with cert:'{1}'\"", ".", "format", "(", "thing_name", ",", "cert_arn", ")", ")", "device_policy", "=", "self", ".", "get_device_policy", "(", "device_name", "=", "thing_name", ",", "account_id", "=", "account_id", ",", "region", "=", "region", ")", "self", ".", "_create_attach_thing_policy", "(", "cert_arn", ",", "device_policy", ",", "iot_client", ",", "policy_name", ")", "config", "[", "'devices'", "]", "=", "devices", "logging", ".", "info", "(", "\"create_devices cfg:{0}\"", ".", "format", "(", "config", ")", ")" ]
Using the `thing_names` values, creates Things in AWS IoT, attaches and downloads new keys & certs to the certificate directory, then records the created information in the local config file for inclusion in the Greengrass Group as Greengrass Devices. :param thing_names: the thing name or list of thing names to create and use as Greengrass Devices :param config_file: config file used to track the Greengrass Devices in the group :param region: the region in which to create the new devices. [default: us-west-2] :param cert_dir: the directory in which to store the thing's keys and certs. If `None` then use the current directory. :param append: append the created devices to the list of devices in the config file. [default: False] :param account_id: the account ID in which to create devices. If 'None' the config_file will be checked for an `account_id` value in the `misc` section. :param policy_name: the name of the policy to associate with the device. [default: 'ggd-discovery-policy'] :param profile_name: the name of the `awscli` profile to use. [default: None]
[ "Using", "the", "thing_names", "values", "creates", "Things", "in", "AWS", "IoT", "attaches", "and", "downloads", "new", "keys", "&", "certs", "to", "the", "certificate", "directory", "then", "records", "the", "created", "information", "in", "the", "local", "config", "file", "for", "inclusion", "in", "the", "Greengrass", "Group", "as", "Greengrass", "Devices", "." ]
train
https://github.com/awslabs/aws-greengrass-group-setup/blob/06189ceccb794fedf80e0e7649938c18792e16c9/gg_group_setup/cmd.py#L773-L837
awslabs/aws-greengrass-group-setup
gg_group_setup/cmd.py
GroupCommands.associate_devices
def associate_devices(self, thing_names, config_file, region=None, profile_name=None): # TODO remove this function when Group discovery is enriched """ Using the `thing_names` values, associate existing Things in AWS IoT with the config of another Greengrass Group for use as Greengrass Devices. :param thing_names: the thing name or list of thing names to associate as Greengrass Devices :param config_file: config file used to track the Greengrass Devices in the group :param region: the region in which to associate devices. [default: us-west-2] :param profile_name: the name of the `awscli` profile to use. [default: None] """ logging.info("associate_devices thing_names:{0}".format(thing_names)) config = GroupConfigFile(config_file=config_file) if region is None: region = self._region devices = config['devices'] if type(thing_names) is str: thing_names = [thing_names] iot_client = _get_iot_session(region=region, profile_name=profile_name) for thing_name in thing_names: thing = iot_client.describe_thing(thingName=thing_name) logging.info("Found existing Thing:{0}".format(thing)) p = iot_client.list_thing_principals(thingName=thing_name) logging.info("Existing Thing has principals:{0}".format(p)) devices[thing_name] = { 'thing_arn': thing['attributes']['thingArn'], 'cert_arn': p['principals'][0], 'cert_id': thing['attributes']['certificateId'], 'thing_name': thing_name } logging.info("Thing:'{0}' associated with config:'{1}'".format( thing_name, config_file)) config['devices'] = devices
python
def associate_devices(self, thing_names, config_file, region=None, profile_name=None): logging.info("associate_devices thing_names:{0}".format(thing_names)) config = GroupConfigFile(config_file=config_file) if region is None: region = self._region devices = config['devices'] if type(thing_names) is str: thing_names = [thing_names] iot_client = _get_iot_session(region=region, profile_name=profile_name) for thing_name in thing_names: thing = iot_client.describe_thing(thingName=thing_name) logging.info("Found existing Thing:{0}".format(thing)) p = iot_client.list_thing_principals(thingName=thing_name) logging.info("Existing Thing has principals:{0}".format(p)) devices[thing_name] = { 'thing_arn': thing['attributes']['thingArn'], 'cert_arn': p['principals'][0], 'cert_id': thing['attributes']['certificateId'], 'thing_name': thing_name } logging.info("Thing:'{0}' associated with config:'{1}'".format( thing_name, config_file)) config['devices'] = devices
[ "def", "associate_devices", "(", "self", ",", "thing_names", ",", "config_file", ",", "region", "=", "None", ",", "profile_name", "=", "None", ")", ":", "# TODO remove this function when Group discovery is enriched", "logging", ".", "info", "(", "\"associate_devices thing_names:{0}\"", ".", "format", "(", "thing_names", ")", ")", "config", "=", "GroupConfigFile", "(", "config_file", "=", "config_file", ")", "if", "region", "is", "None", ":", "region", "=", "self", ".", "_region", "devices", "=", "config", "[", "'devices'", "]", "if", "type", "(", "thing_names", ")", "is", "str", ":", "thing_names", "=", "[", "thing_names", "]", "iot_client", "=", "_get_iot_session", "(", "region", "=", "region", ",", "profile_name", "=", "profile_name", ")", "for", "thing_name", "in", "thing_names", ":", "thing", "=", "iot_client", ".", "describe_thing", "(", "thingName", "=", "thing_name", ")", "logging", ".", "info", "(", "\"Found existing Thing:{0}\"", ".", "format", "(", "thing", ")", ")", "p", "=", "iot_client", ".", "list_thing_principals", "(", "thingName", "=", "thing_name", ")", "logging", ".", "info", "(", "\"Existing Thing has principals:{0}\"", ".", "format", "(", "p", ")", ")", "devices", "[", "thing_name", "]", "=", "{", "'thing_arn'", ":", "thing", "[", "'attributes'", "]", "[", "'thingArn'", "]", ",", "'cert_arn'", ":", "p", "[", "'principals'", "]", "[", "0", "]", ",", "'cert_id'", ":", "thing", "[", "'attributes'", "]", "[", "'certificateId'", "]", ",", "'thing_name'", ":", "thing_name", "}", "logging", ".", "info", "(", "\"Thing:'{0}' associated with config:'{1}'\"", ".", "format", "(", "thing_name", ",", "config_file", ")", ")", "config", "[", "'devices'", "]", "=", "devices" ]
Using the `thing_names` values, associate existing Things in AWS IoT with the config of another Greengrass Group for use as Greengrass Devices. :param thing_names: the thing name or list of thing names to associate as Greengrass Devices :param config_file: config file used to track the Greengrass Devices in the group :param region: the region in which to associate devices. [default: us-west-2] :param profile_name: the name of the `awscli` profile to use. [default: None]
[ "Using", "the", "thing_names", "values", "associate", "existing", "Things", "in", "AWS", "IoT", "with", "the", "config", "of", "another", "Greengrass", "Group", "for", "use", "as", "Greengrass", "Devices", "." ]
train
https://github.com/awslabs/aws-greengrass-group-setup/blob/06189ceccb794fedf80e0e7649938c18792e16c9/gg_group_setup/cmd.py#L839-L878
yuanxu-li/html-table-extractor
html_table_extractor/extractor.py
Extractor._check_validity
def _check_validity(self, i, j, height, width): """ check if a rectangle (i, j, height, width) can be put into self.output """ return all(self._check_cell_validity(ii, jj) for ii in range(i, i+height) for jj in range(j, j+width))
python
def _check_validity(self, i, j, height, width): return all(self._check_cell_validity(ii, jj) for ii in range(i, i+height) for jj in range(j, j+width))
[ "def", "_check_validity", "(", "self", ",", "i", ",", "j", ",", "height", ",", "width", ")", ":", "return", "all", "(", "self", ".", "_check_cell_validity", "(", "ii", ",", "jj", ")", "for", "ii", "in", "range", "(", "i", ",", "i", "+", "height", ")", "for", "jj", "in", "range", "(", "j", ",", "j", "+", "width", ")", ")" ]
check if a rectangle (i, j, height, width) can be put into self.output
[ "check", "if", "a", "rectangle", "(", "i", "j", "height", "width", ")", "can", "be", "put", "into", "self", ".", "output" ]
train
https://github.com/yuanxu-li/html-table-extractor/blob/e3e0f51121e112c304a5c251180d81906131b276/html_table_extractor/extractor.py#L84-L88
yuanxu-li/html-table-extractor
html_table_extractor/extractor.py
Extractor._check_cell_validity
def _check_cell_validity(self, i, j): """ check if a cell (i, j) can be put into self._output """ if i >= len(self._output): return True if j >= len(self._output[i]): return True if self._output[i][j] is None: return True return False
python
def _check_cell_validity(self, i, j): if i >= len(self._output): return True if j >= len(self._output[i]): return True if self._output[i][j] is None: return True return False
[ "def", "_check_cell_validity", "(", "self", ",", "i", ",", "j", ")", ":", "if", "i", ">=", "len", "(", "self", ".", "_output", ")", ":", "return", "True", "if", "j", ">=", "len", "(", "self", ".", "_output", "[", "i", "]", ")", ":", "return", "True", "if", "self", ".", "_output", "[", "i", "]", "[", "j", "]", "is", "None", ":", "return", "True", "return", "False" ]
check if a cell (i, j) can be put into self._output
[ "check", "if", "a", "cell", "(", "i", "j", ")", "can", "be", "put", "into", "self", ".", "_output" ]
train
https://github.com/yuanxu-li/html-table-extractor/blob/e3e0f51121e112c304a5c251180d81906131b276/html_table_extractor/extractor.py#L90-L100
coinkite/connectrum
connectrum/protocol.py
StratumProtocol.send_data
def send_data(self, message): ''' Given an object, encode as JSON and transmit to the server. ''' #logger.debug("TX:\n%s", json.dumps(message, indent=2)) data = json.dumps(message).encode('utf-8') + b'\n' self.transport.write(data)
python
def send_data(self, message): data = json.dumps(message).encode('utf-8') + b'\n' self.transport.write(data)
[ "def", "send_data", "(", "self", ",", "message", ")", ":", "#logger.debug(\"TX:\\n%s\", json.dumps(message, indent=2))", "data", "=", "json", ".", "dumps", "(", "message", ")", ".", "encode", "(", "'utf-8'", ")", "+", "b'\\n'", "self", ".", "transport", ".", "write", "(", "data", ")" ]
Given an object, encode as JSON and transmit to the server.
[ "Given", "an", "object", "encode", "as", "JSON", "and", "transmit", "to", "the", "server", "." ]
train
https://github.com/coinkite/connectrum/blob/99948f92cc5c3ecb1a8a70146294014e608e50fc/connectrum/protocol.py#L57-L63
coinkite/connectrum
connectrum/findall.py
IrcListener.got_who_reply
async def got_who_reply(self, nick=None, real_name=None, **kws): ''' Server replied to one of our WHO requests, with details. ''' #logger.debug('who reply: %r' % kws) nick = nick[2:] if nick[0:2] == 'E_' else nick host, ports = real_name.split(' ', 1) self.servers.remove(nick) logger.debug("Found: '%s' at %s with port list: %s",nick, host, ports) self.results[host.lower()] = ServerInfo(nick, host, ports) if not self.servers: self.all_done.set()
python
async def got_who_reply(self, nick=None, real_name=None, **kws): nick = nick[2:] if nick[0:2] == 'E_' else nick host, ports = real_name.split(' ', 1) self.servers.remove(nick) logger.debug("Found: '%s' at %s with port list: %s",nick, host, ports) self.results[host.lower()] = ServerInfo(nick, host, ports) if not self.servers: self.all_done.set()
[ "async", "def", "got_who_reply", "(", "self", ",", "nick", "=", "None", ",", "real_name", "=", "None", ",", "*", "*", "kws", ")", ":", "#logger.debug('who reply: %r' % kws)", "nick", "=", "nick", "[", "2", ":", "]", "if", "nick", "[", "0", ":", "2", "]", "==", "'E_'", "else", "nick", "host", ",", "ports", "=", "real_name", ".", "split", "(", "' '", ",", "1", ")", "self", ".", "servers", ".", "remove", "(", "nick", ")", "logger", ".", "debug", "(", "\"Found: '%s' at %s with port list: %s\"", ",", "nick", ",", "host", ",", "ports", ")", "self", ".", "results", "[", "host", ".", "lower", "(", ")", "]", "=", "ServerInfo", "(", "nick", ",", "host", ",", "ports", ")", "if", "not", "self", ".", "servers", ":", "self", ".", "all_done", ".", "set", "(", ")" ]
Server replied to one of our WHO requests, with details.
[ "Server", "replied", "to", "one", "of", "our", "WHO", "requests", "with", "details", "." ]
train
https://github.com/coinkite/connectrum/blob/99948f92cc5c3ecb1a8a70146294014e608e50fc/connectrum/findall.py#L59-L74
coinkite/connectrum
connectrum/client.py
StratumClient.connect
async def connect(self, server_info, proto_code=None, *, use_tor=False, disable_cert_verify=False, proxy=None, short_term=False): ''' Start connection process. Destination must be specified in a ServerInfo() record (first arg). ''' self.server_info = server_info if not proto_code: proto_code,*_ = server_info.protocols self.proto_code = proto_code logger.debug("Connecting to: %r" % server_info) if proto_code == 'g': # websocket # to do this, we'll need a websockets implementation that # operates more like a asyncio.Transport # maybe: `asyncws` or `aiohttp` raise NotImplementedError('sorry no WebSocket transport yet') hostname, port, use_ssl = server_info.get_port(proto_code) if use_tor: if have_aiosocks: # Connect via Tor proxy proxy, assumed to be on localhost:9050 # unless a tuple is given with another host/port combo. try: socks_host, socks_port = use_tor except TypeError: socks_host, socks_port = 'localhost', 9050 # basically no-one has .onion SSL certificates, and # pointless anyway. disable_cert_verify = True assert not proxy, "Sorry not yet supporting proxy->tor->dest" logger.debug(" .. using TOR") proxy = aiosocks.Socks5Addr(socks_host, int(socks_port)) else: logger.debug("Error: want to use tor, but no aiosocks module.") if use_ssl == True and disable_cert_verify: # Create a more liberal SSL context that won't # object to self-signed certicates. This is # very bad on public Internet, but probably ok # over Tor use_ssl = ssl.create_default_context() use_ssl.check_hostname = False use_ssl.verify_mode = ssl.CERT_NONE logger.debug(" .. SSL cert check disabled") async def _reconnect(): if self.protocol: return # race/duplicate work if proxy: if have_aiosocks: transport, protocol = await aiosocks.create_connection( StratumProtocol, proxy=proxy, proxy_auth=None, remote_resolve=True, ssl=use_ssl, dst=(hostname, port)) else: logger.debug("Error: want to use proxy, but no aiosocks module.") else: transport, protocol = await self.loop.create_connection( StratumProtocol, host=hostname, port=port, ssl=use_ssl) self.protocol = protocol protocol.client = self # capture actual values used self.actual_connection = dict(hostname=hostname, port=int(port), ssl=bool(use_ssl), tor=bool(proxy)) self.actual_connection['ip_addr'] = transport.get_extra_info('peername', default=['unknown'])[0] if not short_term: self.ka_task = self.loop.create_task(self._keepalive()) logger.debug("Connected to: %r" % server_info) # close whatever we had if self.protocol: self.protocol.close() self.protocol = None self.reconnect = _reconnect await self.reconnect()
python
async def connect(self, server_info, proto_code=None, *, use_tor=False, disable_cert_verify=False, proxy=None, short_term=False): self.server_info = server_info if not proto_code: proto_code,*_ = server_info.protocols self.proto_code = proto_code logger.debug("Connecting to: %r" % server_info) if proto_code == 'g': raise NotImplementedError('sorry no WebSocket transport yet') hostname, port, use_ssl = server_info.get_port(proto_code) if use_tor: if have_aiosocks: try: socks_host, socks_port = use_tor except TypeError: socks_host, socks_port = 'localhost', 9050 disable_cert_verify = True assert not proxy, "Sorry not yet supporting proxy->tor->dest" logger.debug(" .. using TOR") proxy = aiosocks.Socks5Addr(socks_host, int(socks_port)) else: logger.debug("Error: want to use tor, but no aiosocks module.") if use_ssl == True and disable_cert_verify: use_ssl = ssl.create_default_context() use_ssl.check_hostname = False use_ssl.verify_mode = ssl.CERT_NONE logger.debug(" .. SSL cert check disabled") async def _reconnect(): if self.protocol: return if proxy: if have_aiosocks: transport, protocol = await aiosocks.create_connection( StratumProtocol, proxy=proxy, proxy_auth=None, remote_resolve=True, ssl=use_ssl, dst=(hostname, port)) else: logger.debug("Error: want to use proxy, but no aiosocks module.") else: transport, protocol = await self.loop.create_connection( StratumProtocol, host=hostname, port=port, ssl=use_ssl) self.protocol = protocol protocol.client = self self.actual_connection = dict(hostname=hostname, port=int(port), ssl=bool(use_ssl), tor=bool(proxy)) self.actual_connection['ip_addr'] = transport.get_extra_info('peername', default=['unknown'])[0] if not short_term: self.ka_task = self.loop.create_task(self._keepalive()) logger.debug("Connected to: %r" % server_info) if self.protocol: self.protocol.close() self.protocol = None self.reconnect = _reconnect await self.reconnect()
[ "async", "def", "connect", "(", "self", ",", "server_info", ",", "proto_code", "=", "None", ",", "*", ",", "use_tor", "=", "False", ",", "disable_cert_verify", "=", "False", ",", "proxy", "=", "None", ",", "short_term", "=", "False", ")", ":", "self", ".", "server_info", "=", "server_info", "if", "not", "proto_code", ":", "proto_code", ",", "", "*", "_", "=", "server_info", ".", "protocols", "self", ".", "proto_code", "=", "proto_code", "logger", ".", "debug", "(", "\"Connecting to: %r\"", "%", "server_info", ")", "if", "proto_code", "==", "'g'", ":", "# websocket", "# to do this, we'll need a websockets implementation that", "# operates more like a asyncio.Transport", "# maybe: `asyncws` or `aiohttp` ", "raise", "NotImplementedError", "(", "'sorry no WebSocket transport yet'", ")", "hostname", ",", "port", ",", "use_ssl", "=", "server_info", ".", "get_port", "(", "proto_code", ")", "if", "use_tor", ":", "if", "have_aiosocks", ":", "# Connect via Tor proxy proxy, assumed to be on localhost:9050", "# unless a tuple is given with another host/port combo.", "try", ":", "socks_host", ",", "socks_port", "=", "use_tor", "except", "TypeError", ":", "socks_host", ",", "socks_port", "=", "'localhost'", ",", "9050", "# basically no-one has .onion SSL certificates, and", "# pointless anyway.", "disable_cert_verify", "=", "True", "assert", "not", "proxy", ",", "\"Sorry not yet supporting proxy->tor->dest\"", "logger", ".", "debug", "(", "\" .. using TOR\"", ")", "proxy", "=", "aiosocks", ".", "Socks5Addr", "(", "socks_host", ",", "int", "(", "socks_port", ")", ")", "else", ":", "logger", ".", "debug", "(", "\"Error: want to use tor, but no aiosocks module.\"", ")", "if", "use_ssl", "==", "True", "and", "disable_cert_verify", ":", "# Create a more liberal SSL context that won't", "# object to self-signed certicates. This is ", "# very bad on public Internet, but probably ok", "# over Tor", "use_ssl", "=", "ssl", ".", "create_default_context", "(", ")", "use_ssl", ".", "check_hostname", "=", "False", "use_ssl", ".", "verify_mode", "=", "ssl", ".", "CERT_NONE", "logger", ".", "debug", "(", "\" .. SSL cert check disabled\"", ")", "async", "def", "_reconnect", "(", ")", ":", "if", "self", ".", "protocol", ":", "return", "# race/duplicate work", "if", "proxy", ":", "if", "have_aiosocks", ":", "transport", ",", "protocol", "=", "await", "aiosocks", ".", "create_connection", "(", "StratumProtocol", ",", "proxy", "=", "proxy", ",", "proxy_auth", "=", "None", ",", "remote_resolve", "=", "True", ",", "ssl", "=", "use_ssl", ",", "dst", "=", "(", "hostname", ",", "port", ")", ")", "else", ":", "logger", ".", "debug", "(", "\"Error: want to use proxy, but no aiosocks module.\"", ")", "else", ":", "transport", ",", "protocol", "=", "await", "self", ".", "loop", ".", "create_connection", "(", "StratumProtocol", ",", "host", "=", "hostname", ",", "port", "=", "port", ",", "ssl", "=", "use_ssl", ")", "self", ".", "protocol", "=", "protocol", "protocol", ".", "client", "=", "self", "# capture actual values used", "self", ".", "actual_connection", "=", "dict", "(", "hostname", "=", "hostname", ",", "port", "=", "int", "(", "port", ")", ",", "ssl", "=", "bool", "(", "use_ssl", ")", ",", "tor", "=", "bool", "(", "proxy", ")", ")", "self", ".", "actual_connection", "[", "'ip_addr'", "]", "=", "transport", ".", "get_extra_info", "(", "'peername'", ",", "default", "=", "[", "'unknown'", "]", ")", "[", "0", "]", "if", "not", "short_term", ":", "self", ".", "ka_task", "=", "self", ".", "loop", ".", "create_task", "(", "self", ".", "_keepalive", "(", ")", ")", "logger", ".", "debug", "(", "\"Connected to: %r\"", "%", "server_info", ")", "# close whatever we had", "if", "self", ".", "protocol", ":", "self", ".", "protocol", ".", "close", "(", ")", "self", ".", "protocol", "=", "None", "self", ".", "reconnect", "=", "_reconnect", "await", "self", ".", "reconnect", "(", ")" ]
Start connection process. Destination must be specified in a ServerInfo() record (first arg).
[ "Start", "connection", "process", ".", "Destination", "must", "be", "specified", "in", "a", "ServerInfo", "()", "record", "(", "first", "arg", ")", "." ]
train
https://github.com/coinkite/connectrum/blob/99948f92cc5c3ecb1a8a70146294014e608e50fc/connectrum/client.py#L68-L159
coinkite/connectrum
connectrum/client.py
StratumClient._keepalive
async def _keepalive(self): ''' Keep our connect to server alive forever, with some pointless traffic. ''' while self.protocol: vers = await self.RPC('server.version') logger.debug("Server version: " + repr(vers)) # Five minutes isn't really enough anymore; looks like # servers are killing 2-minute old idle connections now. # But decreasing interval this seems rude. await asyncio.sleep(600)
python
async def _keepalive(self): while self.protocol: vers = await self.RPC('server.version') logger.debug("Server version: " + repr(vers)) await asyncio.sleep(600)
[ "async", "def", "_keepalive", "(", "self", ")", ":", "while", "self", ".", "protocol", ":", "vers", "=", "await", "self", ".", "RPC", "(", "'server.version'", ")", "logger", ".", "debug", "(", "\"Server version: \"", "+", "repr", "(", "vers", ")", ")", "# Five minutes isn't really enough anymore; looks like", "# servers are killing 2-minute old idle connections now.", "# But decreasing interval this seems rude.", "await", "asyncio", ".", "sleep", "(", "600", ")" ]
Keep our connect to server alive forever, with some pointless traffic.
[ "Keep", "our", "connect", "to", "server", "alive", "forever", "with", "some", "pointless", "traffic", "." ]
train
https://github.com/coinkite/connectrum/blob/99948f92cc5c3ecb1a8a70146294014e608e50fc/connectrum/client.py#L161-L173
coinkite/connectrum
connectrum/client.py
StratumClient._send_request
def _send_request(self, method, params=[], is_subscribe = False): ''' Send a new request to the server. Serialized the JSON and tracks id numbers and optional callbacks. ''' # pick a new ID self.next_id += 1 req_id = self.next_id # serialize as JSON msg = {'id': req_id, 'method': method, 'params': params} # subscriptions are a Q, normal requests are a future if is_subscribe: waitQ = asyncio.Queue() self.subscriptions[method].append(waitQ) fut = asyncio.Future(loop=self.loop) self.inflight[req_id] = (msg, fut) # send it via the transport, which serializes it if not self.protocol: logger.debug("Need to reconnect to server") async def connect_first(): await self.reconnect() self.protocol.send_data(msg) self.loop.create_task(connect_first()) else: # typical case, send request immediatedly, response is a future self.protocol.send_data(msg) return fut if not is_subscribe else (fut, waitQ)
python
def _send_request(self, method, params=[], is_subscribe = False): self.next_id += 1 req_id = self.next_id msg = {'id': req_id, 'method': method, 'params': params} if is_subscribe: waitQ = asyncio.Queue() self.subscriptions[method].append(waitQ) fut = asyncio.Future(loop=self.loop) self.inflight[req_id] = (msg, fut) if not self.protocol: logger.debug("Need to reconnect to server") async def connect_first(): await self.reconnect() self.protocol.send_data(msg) self.loop.create_task(connect_first()) else: self.protocol.send_data(msg) return fut if not is_subscribe else (fut, waitQ)
[ "def", "_send_request", "(", "self", ",", "method", ",", "params", "=", "[", "]", ",", "is_subscribe", "=", "False", ")", ":", "# pick a new ID", "self", ".", "next_id", "+=", "1", "req_id", "=", "self", ".", "next_id", "# serialize as JSON", "msg", "=", "{", "'id'", ":", "req_id", ",", "'method'", ":", "method", ",", "'params'", ":", "params", "}", "# subscriptions are a Q, normal requests are a future", "if", "is_subscribe", ":", "waitQ", "=", "asyncio", ".", "Queue", "(", ")", "self", ".", "subscriptions", "[", "method", "]", ".", "append", "(", "waitQ", ")", "fut", "=", "asyncio", ".", "Future", "(", "loop", "=", "self", ".", "loop", ")", "self", ".", "inflight", "[", "req_id", "]", "=", "(", "msg", ",", "fut", ")", "# send it via the transport, which serializes it", "if", "not", "self", ".", "protocol", ":", "logger", ".", "debug", "(", "\"Need to reconnect to server\"", ")", "async", "def", "connect_first", "(", ")", ":", "await", "self", ".", "reconnect", "(", ")", "self", ".", "protocol", ".", "send_data", "(", "msg", ")", "self", ".", "loop", ".", "create_task", "(", "connect_first", "(", ")", ")", "else", ":", "# typical case, send request immediatedly, response is a future", "self", ".", "protocol", ".", "send_data", "(", "msg", ")", "return", "fut", "if", "not", "is_subscribe", "else", "(", "fut", ",", "waitQ", ")" ]
Send a new request to the server. Serialized the JSON and tracks id numbers and optional callbacks.
[ "Send", "a", "new", "request", "to", "the", "server", ".", "Serialized", "the", "JSON", "and", "tracks", "id", "numbers", "and", "optional", "callbacks", "." ]
train
https://github.com/coinkite/connectrum/blob/99948f92cc5c3ecb1a8a70146294014e608e50fc/connectrum/client.py#L176-L210
coinkite/connectrum
connectrum/client.py
StratumClient._got_response
def _got_response(self, msg): ''' Decode and dispatch responses from the server. Has already been unframed and deserialized into an object. ''' #logger.debug("MSG: %r" % msg) resp_id = msg.get('id', None) if resp_id is None: # subscription traffic comes with method set, but no req id. method = msg.get('method', None) if not method: logger.error("Incoming server message had no ID nor method in it", msg) return # not obvious, but result is on params, not result, for subscriptions result = msg.get('params', None) logger.debug("Traffic on subscription: %s" % method) subs = self.subscriptions.get(method) for q in subs: self.loop.create_task(q.put(result)) return assert 'method' not in msg result = msg.get('result') # fetch and forget about the request inf = self.inflight.pop(resp_id) if not inf: logger.error("Incoming server message had unknown ID in it: %s" % resp_id) return # it's a future which is done now req, rv = inf if 'error' in msg: err = msg['error'] logger.info("Error response: '%s'" % err) rv.set_exception(ElectrumErrorResponse(err, req)) else: rv.set_result(result)
python
def _got_response(self, msg): resp_id = msg.get('id', None) if resp_id is None: method = msg.get('method', None) if not method: logger.error("Incoming server message had no ID nor method in it", msg) return result = msg.get('params', None) logger.debug("Traffic on subscription: %s" % method) subs = self.subscriptions.get(method) for q in subs: self.loop.create_task(q.put(result)) return assert 'method' not in msg result = msg.get('result') inf = self.inflight.pop(resp_id) if not inf: logger.error("Incoming server message had unknown ID in it: %s" % resp_id) return req, rv = inf if 'error' in msg: err = msg['error'] logger.info("Error response: '%s'" % err) rv.set_exception(ElectrumErrorResponse(err, req)) else: rv.set_result(result)
[ "def", "_got_response", "(", "self", ",", "msg", ")", ":", "#logger.debug(\"MSG: %r\" % msg)", "resp_id", "=", "msg", ".", "get", "(", "'id'", ",", "None", ")", "if", "resp_id", "is", "None", ":", "# subscription traffic comes with method set, but no req id.", "method", "=", "msg", ".", "get", "(", "'method'", ",", "None", ")", "if", "not", "method", ":", "logger", ".", "error", "(", "\"Incoming server message had no ID nor method in it\"", ",", "msg", ")", "return", "# not obvious, but result is on params, not result, for subscriptions", "result", "=", "msg", ".", "get", "(", "'params'", ",", "None", ")", "logger", ".", "debug", "(", "\"Traffic on subscription: %s\"", "%", "method", ")", "subs", "=", "self", ".", "subscriptions", ".", "get", "(", "method", ")", "for", "q", "in", "subs", ":", "self", ".", "loop", ".", "create_task", "(", "q", ".", "put", "(", "result", ")", ")", "return", "assert", "'method'", "not", "in", "msg", "result", "=", "msg", ".", "get", "(", "'result'", ")", "# fetch and forget about the request", "inf", "=", "self", ".", "inflight", ".", "pop", "(", "resp_id", ")", "if", "not", "inf", ":", "logger", ".", "error", "(", "\"Incoming server message had unknown ID in it: %s\"", "%", "resp_id", ")", "return", "# it's a future which is done now", "req", ",", "rv", "=", "inf", "if", "'error'", "in", "msg", ":", "err", "=", "msg", "[", "'error'", "]", "logger", ".", "info", "(", "\"Error response: '%s'\"", "%", "err", ")", "rv", ".", "set_exception", "(", "ElectrumErrorResponse", "(", "err", ",", "req", ")", ")", "else", ":", "rv", ".", "set_result", "(", "result", ")" ]
Decode and dispatch responses from the server. Has already been unframed and deserialized into an object.
[ "Decode", "and", "dispatch", "responses", "from", "the", "server", "." ]
train
https://github.com/coinkite/connectrum/blob/99948f92cc5c3ecb1a8a70146294014e608e50fc/connectrum/client.py#L212-L260
coinkite/connectrum
connectrum/client.py
StratumClient.subscribe
def subscribe(self, method, *params): ''' Perform a remote command which will stream events/data to us. Expects a method name, which look like: server.peers.subscribe .. and sometimes take arguments, all of which are positional. Returns a tuple: (Future, asyncio.Queue). The future will have the result of the initial call, and the queue will receive additional responses as they happen. ''' assert '.' in method assert method.endswith('subscribe') return self._send_request(method, params, is_subscribe=True)
python
def subscribe(self, method, *params): assert '.' in method assert method.endswith('subscribe') return self._send_request(method, params, is_subscribe=True)
[ "def", "subscribe", "(", "self", ",", "method", ",", "*", "params", ")", ":", "assert", "'.'", "in", "method", "assert", "method", ".", "endswith", "(", "'subscribe'", ")", "return", "self", ".", "_send_request", "(", "method", ",", "params", ",", "is_subscribe", "=", "True", ")" ]
Perform a remote command which will stream events/data to us. Expects a method name, which look like: server.peers.subscribe .. and sometimes take arguments, all of which are positional. Returns a tuple: (Future, asyncio.Queue). The future will have the result of the initial call, and the queue will receive additional responses as they happen.
[ "Perform", "a", "remote", "command", "which", "will", "stream", "events", "/", "data", "to", "us", "." ]
train
https://github.com/coinkite/connectrum/blob/99948f92cc5c3ecb1a8a70146294014e608e50fc/connectrum/client.py#L279-L294
coinkite/connectrum
connectrum/svr_info.py
ServerInfo.get_port
def get_port(self, for_protocol): ''' Return (hostname, port number, ssl) pair for the protocol. Assuming only one port per host. ''' assert len(for_protocol) == 1, "expect single letter code" use_ssl = for_protocol in ('s', 'g') if 'port' in self: return self['hostname'], int(self['port']), use_ssl rv = next(i for i in self['ports'] if i[0] == for_protocol) port = None if len(rv) >= 2: try: port = int(rv[1:]) except: pass port = port or DEFAULT_PORTS[for_protocol] return self['hostname'], port, use_ssl
python
def get_port(self, for_protocol): assert len(for_protocol) == 1, "expect single letter code" use_ssl = for_protocol in ('s', 'g') if 'port' in self: return self['hostname'], int(self['port']), use_ssl rv = next(i for i in self['ports'] if i[0] == for_protocol) port = None if len(rv) >= 2: try: port = int(rv[1:]) except: pass port = port or DEFAULT_PORTS[for_protocol] return self['hostname'], port, use_ssl
[ "def", "get_port", "(", "self", ",", "for_protocol", ")", ":", "assert", "len", "(", "for_protocol", ")", "==", "1", ",", "\"expect single letter code\"", "use_ssl", "=", "for_protocol", "in", "(", "'s'", ",", "'g'", ")", "if", "'port'", "in", "self", ":", "return", "self", "[", "'hostname'", "]", ",", "int", "(", "self", "[", "'port'", "]", ")", ",", "use_ssl", "rv", "=", "next", "(", "i", "for", "i", "in", "self", "[", "'ports'", "]", "if", "i", "[", "0", "]", "==", "for_protocol", ")", "port", "=", "None", "if", "len", "(", "rv", ")", ">=", "2", ":", "try", ":", "port", "=", "int", "(", "rv", "[", "1", ":", "]", ")", "except", ":", "pass", "port", "=", "port", "or", "DEFAULT_PORTS", "[", "for_protocol", "]", "return", "self", "[", "'hostname'", "]", ",", "port", ",", "use_ssl" ]
Return (hostname, port number, ssl) pair for the protocol. Assuming only one port per host.
[ "Return", "(", "hostname", "port", "number", "ssl", ")", "pair", "for", "the", "protocol", ".", "Assuming", "only", "one", "port", "per", "host", "." ]
train
https://github.com/coinkite/connectrum/blob/99948f92cc5c3ecb1a8a70146294014e608e50fc/connectrum/svr_info.py#L115-L136
coinkite/connectrum
connectrum/svr_info.py
KnownServers.from_json
def from_json(self, fname): ''' Read contents of a CSV containing a list of servers. ''' with open(fname, 'rt') as fp: for row in json.load(fp): nn = ServerInfo.from_dict(row) self[str(nn)] = nn
python
def from_json(self, fname): with open(fname, 'rt') as fp: for row in json.load(fp): nn = ServerInfo.from_dict(row) self[str(nn)] = nn
[ "def", "from_json", "(", "self", ",", "fname", ")", ":", "with", "open", "(", "fname", ",", "'rt'", ")", "as", "fp", ":", "for", "row", "in", "json", ".", "load", "(", "fp", ")", ":", "nn", "=", "ServerInfo", ".", "from_dict", "(", "row", ")", "self", "[", "str", "(", "nn", ")", "]", "=", "nn" ]
Read contents of a CSV containing a list of servers.
[ "Read", "contents", "of", "a", "CSV", "containing", "a", "list", "of", "servers", "." ]
train
https://github.com/coinkite/connectrum/blob/99948f92cc5c3ecb1a8a70146294014e608e50fc/connectrum/svr_info.py#L171-L178
coinkite/connectrum
connectrum/svr_info.py
KnownServers.from_irc
def from_irc(self, irc_nickname=None, irc_password=None): ''' Connect to the IRC channel and find all servers presently connected. Slow; takes 30+ seconds but authoritative and current. OBSOLETE. ''' if have_bottom: from .findall import IrcListener # connect and fetch current set of servers who are # on #electrum channel at freenode bot = IrcListener(irc_nickname=irc_nickname, irc_password=irc_password) results = bot.loop.run_until_complete(bot.collect_data()) bot.loop.close() # merge by nick name self.update(results) else: return(False)
python
def from_irc(self, irc_nickname=None, irc_password=None): if have_bottom: from .findall import IrcListener bot = IrcListener(irc_nickname=irc_nickname, irc_password=irc_password) results = bot.loop.run_until_complete(bot.collect_data()) bot.loop.close() self.update(results) else: return(False)
[ "def", "from_irc", "(", "self", ",", "irc_nickname", "=", "None", ",", "irc_password", "=", "None", ")", ":", "if", "have_bottom", ":", "from", ".", "findall", "import", "IrcListener", "# connect and fetch current set of servers who are", "# on #electrum channel at freenode", "bot", "=", "IrcListener", "(", "irc_nickname", "=", "irc_nickname", ",", "irc_password", "=", "irc_password", ")", "results", "=", "bot", ".", "loop", ".", "run_until_complete", "(", "bot", ".", "collect_data", "(", ")", ")", "bot", ".", "loop", ".", "close", "(", ")", "# merge by nick name", "self", ".", "update", "(", "results", ")", "else", ":", "return", "(", "False", ")" ]
Connect to the IRC channel and find all servers presently connected. Slow; takes 30+ seconds but authoritative and current. OBSOLETE.
[ "Connect", "to", "the", "IRC", "channel", "and", "find", "all", "servers", "presently", "connected", "." ]
train
https://github.com/coinkite/connectrum/blob/99948f92cc5c3ecb1a8a70146294014e608e50fc/connectrum/svr_info.py#L180-L201
coinkite/connectrum
connectrum/svr_info.py
KnownServers.add_single
def add_single(self, hostname, ports, nickname=None, **kws): ''' Explicitly add a single entry. Hostname is a FQDN and ports is either a single int (assumed to be TCP port) or Electrum protocol/port number specification with spaces in between. ''' nickname = nickname or hostname self[hostname.lower()] = ServerInfo(nickname, hostname, ports, **kws)
python
def add_single(self, hostname, ports, nickname=None, **kws): nickname = nickname or hostname self[hostname.lower()] = ServerInfo(nickname, hostname, ports, **kws)
[ "def", "add_single", "(", "self", ",", "hostname", ",", "ports", ",", "nickname", "=", "None", ",", "*", "*", "kws", ")", ":", "nickname", "=", "nickname", "or", "hostname", "self", "[", "hostname", ".", "lower", "(", ")", "]", "=", "ServerInfo", "(", "nickname", ",", "hostname", ",", "ports", ",", "*", "*", "kws", ")" ]
Explicitly add a single entry. Hostname is a FQDN and ports is either a single int (assumed to be TCP port) or Electrum protocol/port number specification with spaces in between.
[ "Explicitly", "add", "a", "single", "entry", ".", "Hostname", "is", "a", "FQDN", "and", "ports", "is", "either", "a", "single", "int", "(", "assumed", "to", "be", "TCP", "port", ")", "or", "Electrum", "protocol", "/", "port", "number", "specification", "with", "spaces", "in", "between", "." ]
train
https://github.com/coinkite/connectrum/blob/99948f92cc5c3ecb1a8a70146294014e608e50fc/connectrum/svr_info.py#L203-L211
coinkite/connectrum
connectrum/svr_info.py
KnownServers.save_json
def save_json(self, fname='servers.json'): ''' Write out to a CSV file. ''' rows = sorted(self.keys()) with open(fname, 'wt') as fp: json.dump([self[k] for k in rows], fp, indent=1)
python
def save_json(self, fname='servers.json'): rows = sorted(self.keys()) with open(fname, 'wt') as fp: json.dump([self[k] for k in rows], fp, indent=1)
[ "def", "save_json", "(", "self", ",", "fname", "=", "'servers.json'", ")", ":", "rows", "=", "sorted", "(", "self", ".", "keys", "(", ")", ")", "with", "open", "(", "fname", ",", "'wt'", ")", "as", "fp", ":", "json", ".", "dump", "(", "[", "self", "[", "k", "]", "for", "k", "in", "rows", "]", ",", "fp", ",", "indent", "=", "1", ")" ]
Write out to a CSV file.
[ "Write", "out", "to", "a", "CSV", "file", "." ]
train
https://github.com/coinkite/connectrum/blob/99948f92cc5c3ecb1a8a70146294014e608e50fc/connectrum/svr_info.py#L238-L244
coinkite/connectrum
connectrum/svr_info.py
KnownServers.select
def select(self, **kws): ''' Find all servers with indicated protocol support. Shuffled. Filter by TOR support, and pruning level. ''' lst = [i for i in self.values() if i.select(**kws)] random.shuffle(lst) return lst
python
def select(self, **kws): lst = [i for i in self.values() if i.select(**kws)] random.shuffle(lst) return lst
[ "def", "select", "(", "self", ",", "*", "*", "kws", ")", ":", "lst", "=", "[", "i", "for", "i", "in", "self", ".", "values", "(", ")", "if", "i", ".", "select", "(", "*", "*", "kws", ")", "]", "random", ".", "shuffle", "(", "lst", ")", "return", "lst" ]
Find all servers with indicated protocol support. Shuffled. Filter by TOR support, and pruning level.
[ "Find", "all", "servers", "with", "indicated", "protocol", "support", ".", "Shuffled", "." ]
train
https://github.com/coinkite/connectrum/blob/99948f92cc5c3ecb1a8a70146294014e608e50fc/connectrum/svr_info.py#L249-L259
mitodl/pylti
pylti/common.py
_post_patched_request
def _post_patched_request(consumers, lti_key, body, url, method, content_type): """ Authorization header needs to be capitalized for some LTI clients this function ensures that header is capitalized :param body: body of the call :param client: OAuth Client :param url: outcome url :return: response """ # pylint: disable=too-many-locals, too-many-arguments oauth_server = LTIOAuthServer(consumers) oauth_server.add_signature_method(SignatureMethod_HMAC_SHA1_Unicode()) lti_consumer = oauth_server.lookup_consumer(lti_key) lti_cert = oauth_server.lookup_cert(lti_key) secret = lti_consumer.secret consumer = oauth2.Consumer(key=lti_key, secret=secret) client = oauth2.Client(consumer) if lti_cert: client.add_certificate(key=lti_cert, cert=lti_cert, domain='') log.debug("cert %s", lti_cert) import httplib2 http = httplib2.Http # pylint: disable=protected-access normalize = http._normalize_headers def my_normalize(self, headers): """ This function patches Authorization header """ ret = normalize(self, headers) if 'authorization' in ret: ret['Authorization'] = ret.pop('authorization') log.debug("headers") log.debug(headers) return ret http._normalize_headers = my_normalize monkey_patch_function = normalize response, content = client.request( url, method, body=body.encode('utf-8'), headers={'Content-Type': content_type}) http = httplib2.Http # pylint: disable=protected-access http._normalize_headers = monkey_patch_function log.debug("key %s", lti_key) log.debug("secret %s", secret) log.debug("url %s", url) log.debug("response %s", response) log.debug("content %s", format(content)) return response, content
python
def _post_patched_request(consumers, lti_key, body, url, method, content_type): oauth_server = LTIOAuthServer(consumers) oauth_server.add_signature_method(SignatureMethod_HMAC_SHA1_Unicode()) lti_consumer = oauth_server.lookup_consumer(lti_key) lti_cert = oauth_server.lookup_cert(lti_key) secret = lti_consumer.secret consumer = oauth2.Consumer(key=lti_key, secret=secret) client = oauth2.Client(consumer) if lti_cert: client.add_certificate(key=lti_cert, cert=lti_cert, domain='') log.debug("cert %s", lti_cert) import httplib2 http = httplib2.Http normalize = http._normalize_headers def my_normalize(self, headers): ret = normalize(self, headers) if 'authorization' in ret: ret['Authorization'] = ret.pop('authorization') log.debug("headers") log.debug(headers) return ret http._normalize_headers = my_normalize monkey_patch_function = normalize response, content = client.request( url, method, body=body.encode('utf-8'), headers={'Content-Type': content_type}) http = httplib2.Http http._normalize_headers = monkey_patch_function log.debug("key %s", lti_key) log.debug("secret %s", secret) log.debug("url %s", url) log.debug("response %s", response) log.debug("content %s", format(content)) return response, content
[ "def", "_post_patched_request", "(", "consumers", ",", "lti_key", ",", "body", ",", "url", ",", "method", ",", "content_type", ")", ":", "# pylint: disable=too-many-locals, too-many-arguments", "oauth_server", "=", "LTIOAuthServer", "(", "consumers", ")", "oauth_server", ".", "add_signature_method", "(", "SignatureMethod_HMAC_SHA1_Unicode", "(", ")", ")", "lti_consumer", "=", "oauth_server", ".", "lookup_consumer", "(", "lti_key", ")", "lti_cert", "=", "oauth_server", ".", "lookup_cert", "(", "lti_key", ")", "secret", "=", "lti_consumer", ".", "secret", "consumer", "=", "oauth2", ".", "Consumer", "(", "key", "=", "lti_key", ",", "secret", "=", "secret", ")", "client", "=", "oauth2", ".", "Client", "(", "consumer", ")", "if", "lti_cert", ":", "client", ".", "add_certificate", "(", "key", "=", "lti_cert", ",", "cert", "=", "lti_cert", ",", "domain", "=", "''", ")", "log", ".", "debug", "(", "\"cert %s\"", ",", "lti_cert", ")", "import", "httplib2", "http", "=", "httplib2", ".", "Http", "# pylint: disable=protected-access", "normalize", "=", "http", ".", "_normalize_headers", "def", "my_normalize", "(", "self", ",", "headers", ")", ":", "\"\"\" This function patches Authorization header \"\"\"", "ret", "=", "normalize", "(", "self", ",", "headers", ")", "if", "'authorization'", "in", "ret", ":", "ret", "[", "'Authorization'", "]", "=", "ret", ".", "pop", "(", "'authorization'", ")", "log", ".", "debug", "(", "\"headers\"", ")", "log", ".", "debug", "(", "headers", ")", "return", "ret", "http", ".", "_normalize_headers", "=", "my_normalize", "monkey_patch_function", "=", "normalize", "response", ",", "content", "=", "client", ".", "request", "(", "url", ",", "method", ",", "body", "=", "body", ".", "encode", "(", "'utf-8'", ")", ",", "headers", "=", "{", "'Content-Type'", ":", "content_type", "}", ")", "http", "=", "httplib2", ".", "Http", "# pylint: disable=protected-access", "http", ".", "_normalize_headers", "=", "monkey_patch_function", "log", ".", "debug", "(", "\"key %s\"", ",", "lti_key", ")", "log", ".", "debug", "(", "\"secret %s\"", ",", "secret", ")", "log", ".", "debug", "(", "\"url %s\"", ",", "url", ")", "log", ".", "debug", "(", "\"response %s\"", ",", "response", ")", "log", ".", "debug", "(", "\"content %s\"", ",", "format", "(", "content", ")", ")", "return", "response", ",", "content" ]
Authorization header needs to be capitalized for some LTI clients this function ensures that header is capitalized :param body: body of the call :param client: OAuth Client :param url: outcome url :return: response
[ "Authorization", "header", "needs", "to", "be", "capitalized", "for", "some", "LTI", "clients", "this", "function", "ensures", "that", "header", "is", "capitalized" ]
train
https://github.com/mitodl/pylti/blob/18a608282e0d5bc941beb2eaaeea3b7ad484b399/pylti/common.py#L145-L203
mitodl/pylti
pylti/common.py
post_message
def post_message(consumers, lti_key, url, body): """ Posts a signed message to LTI consumer :param consumers: consumers from config :param lti_key: key to find appropriate consumer :param url: post url :param body: xml body :return: success """ content_type = 'application/xml' method = 'POST' (_, content) = _post_patched_request( consumers, lti_key, body, url, method, content_type, ) is_success = b"<imsx_codeMajor>success</imsx_codeMajor>" in content log.debug("is success %s", is_success) return is_success
python
def post_message(consumers, lti_key, url, body): content_type = 'application/xml' method = 'POST' (_, content) = _post_patched_request( consumers, lti_key, body, url, method, content_type, ) is_success = b"<imsx_codeMajor>success</imsx_codeMajor>" in content log.debug("is success %s", is_success) return is_success
[ "def", "post_message", "(", "consumers", ",", "lti_key", ",", "url", ",", "body", ")", ":", "content_type", "=", "'application/xml'", "method", "=", "'POST'", "(", "_", ",", "content", ")", "=", "_post_patched_request", "(", "consumers", ",", "lti_key", ",", "body", ",", "url", ",", "method", ",", "content_type", ",", ")", "is_success", "=", "b\"<imsx_codeMajor>success</imsx_codeMajor>\"", "in", "content", "log", ".", "debug", "(", "\"is success %s\"", ",", "is_success", ")", "return", "is_success" ]
Posts a signed message to LTI consumer :param consumers: consumers from config :param lti_key: key to find appropriate consumer :param url: post url :param body: xml body :return: success
[ "Posts", "a", "signed", "message", "to", "LTI", "consumer" ]
train
https://github.com/mitodl/pylti/blob/18a608282e0d5bc941beb2eaaeea3b7ad484b399/pylti/common.py#L206-L229
mitodl/pylti
pylti/common.py
post_message2
def post_message2(consumers, lti_key, url, body, method='POST', content_type='application/xml'): """ Posts a signed message to LTI consumer using LTI 2.0 format :param: consumers: consumers from config :param: lti_key: key to find appropriate consumer :param: url: post url :param: body: xml body :return: success """ # pylint: disable=too-many-arguments (response, _) = _post_patched_request( consumers, lti_key, body, url, method, content_type, ) is_success = response.status == 200 log.debug("is success %s", is_success) return is_success
python
def post_message2(consumers, lti_key, url, body, method='POST', content_type='application/xml'): (response, _) = _post_patched_request( consumers, lti_key, body, url, method, content_type, ) is_success = response.status == 200 log.debug("is success %s", is_success) return is_success
[ "def", "post_message2", "(", "consumers", ",", "lti_key", ",", "url", ",", "body", ",", "method", "=", "'POST'", ",", "content_type", "=", "'application/xml'", ")", ":", "# pylint: disable=too-many-arguments", "(", "response", ",", "_", ")", "=", "_post_patched_request", "(", "consumers", ",", "lti_key", ",", "body", ",", "url", ",", "method", ",", "content_type", ",", ")", "is_success", "=", "response", ".", "status", "==", "200", "log", ".", "debug", "(", "\"is success %s\"", ",", "is_success", ")", "return", "is_success" ]
Posts a signed message to LTI consumer using LTI 2.0 format :param: consumers: consumers from config :param: lti_key: key to find appropriate consumer :param: url: post url :param: body: xml body :return: success
[ "Posts", "a", "signed", "message", "to", "LTI", "consumer", "using", "LTI", "2", ".", "0", "format" ]
train
https://github.com/mitodl/pylti/blob/18a608282e0d5bc941beb2eaaeea3b7ad484b399/pylti/common.py#L232-L256
mitodl/pylti
pylti/common.py
verify_request_common
def verify_request_common(consumers, url, method, headers, params): """ Verifies that request is valid :param consumers: consumers from config file :param url: request url :param method: request method :param headers: request headers :param params: request params :return: is request valid """ log.debug("consumers %s", consumers) log.debug("url %s", url) log.debug("method %s", method) log.debug("headers %s", headers) log.debug("params %s", params) oauth_server = LTIOAuthServer(consumers) oauth_server.add_signature_method( SignatureMethod_PLAINTEXT_Unicode()) oauth_server.add_signature_method( SignatureMethod_HMAC_SHA1_Unicode()) # Check header for SSL before selecting the url if headers.get('X-Forwarded-Proto', 'http') == 'https': url = url.replace('http:', 'https:', 1) oauth_request = Request_Fix_Duplicate.from_request( method, url, headers=dict(headers), parameters=params ) if not oauth_request: log.info('Received non oauth request on oauth protected page') raise LTIException('This page requires a valid oauth session ' 'or request') try: # pylint: disable=protected-access oauth_consumer_key = oauth_request.get_parameter('oauth_consumer_key') consumer = oauth_server.lookup_consumer(oauth_consumer_key) if not consumer: raise oauth2.Error('Invalid consumer.') oauth_server.verify_request(oauth_request, consumer, None) except oauth2.Error: # Rethrow our own for nice error handling (don't print # error message as it will contain the key raise LTIException("OAuth error: Please check your key and secret") return True
python
def verify_request_common(consumers, url, method, headers, params): log.debug("consumers %s", consumers) log.debug("url %s", url) log.debug("method %s", method) log.debug("headers %s", headers) log.debug("params %s", params) oauth_server = LTIOAuthServer(consumers) oauth_server.add_signature_method( SignatureMethod_PLAINTEXT_Unicode()) oauth_server.add_signature_method( SignatureMethod_HMAC_SHA1_Unicode()) if headers.get('X-Forwarded-Proto', 'http') == 'https': url = url.replace('http:', 'https:', 1) oauth_request = Request_Fix_Duplicate.from_request( method, url, headers=dict(headers), parameters=params ) if not oauth_request: log.info('Received non oauth request on oauth protected page') raise LTIException('This page requires a valid oauth session ' 'or request') try: oauth_consumer_key = oauth_request.get_parameter('oauth_consumer_key') consumer = oauth_server.lookup_consumer(oauth_consumer_key) if not consumer: raise oauth2.Error('Invalid consumer.') oauth_server.verify_request(oauth_request, consumer, None) except oauth2.Error: raise LTIException("OAuth error: Please check your key and secret") return True
[ "def", "verify_request_common", "(", "consumers", ",", "url", ",", "method", ",", "headers", ",", "params", ")", ":", "log", ".", "debug", "(", "\"consumers %s\"", ",", "consumers", ")", "log", ".", "debug", "(", "\"url %s\"", ",", "url", ")", "log", ".", "debug", "(", "\"method %s\"", ",", "method", ")", "log", ".", "debug", "(", "\"headers %s\"", ",", "headers", ")", "log", ".", "debug", "(", "\"params %s\"", ",", "params", ")", "oauth_server", "=", "LTIOAuthServer", "(", "consumers", ")", "oauth_server", ".", "add_signature_method", "(", "SignatureMethod_PLAINTEXT_Unicode", "(", ")", ")", "oauth_server", ".", "add_signature_method", "(", "SignatureMethod_HMAC_SHA1_Unicode", "(", ")", ")", "# Check header for SSL before selecting the url", "if", "headers", ".", "get", "(", "'X-Forwarded-Proto'", ",", "'http'", ")", "==", "'https'", ":", "url", "=", "url", ".", "replace", "(", "'http:'", ",", "'https:'", ",", "1", ")", "oauth_request", "=", "Request_Fix_Duplicate", ".", "from_request", "(", "method", ",", "url", ",", "headers", "=", "dict", "(", "headers", ")", ",", "parameters", "=", "params", ")", "if", "not", "oauth_request", ":", "log", ".", "info", "(", "'Received non oauth request on oauth protected page'", ")", "raise", "LTIException", "(", "'This page requires a valid oauth session '", "'or request'", ")", "try", ":", "# pylint: disable=protected-access", "oauth_consumer_key", "=", "oauth_request", ".", "get_parameter", "(", "'oauth_consumer_key'", ")", "consumer", "=", "oauth_server", ".", "lookup_consumer", "(", "oauth_consumer_key", ")", "if", "not", "consumer", ":", "raise", "oauth2", ".", "Error", "(", "'Invalid consumer.'", ")", "oauth_server", ".", "verify_request", "(", "oauth_request", ",", "consumer", ",", "None", ")", "except", "oauth2", ".", "Error", ":", "# Rethrow our own for nice error handling (don't print", "# error message as it will contain the key", "raise", "LTIException", "(", "\"OAuth error: Please check your key and secret\"", ")", "return", "True" ]
Verifies that request is valid :param consumers: consumers from config file :param url: request url :param method: request method :param headers: request headers :param params: request params :return: is request valid
[ "Verifies", "that", "request", "is", "valid" ]
train
https://github.com/mitodl/pylti/blob/18a608282e0d5bc941beb2eaaeea3b7ad484b399/pylti/common.py#L259-L308
mitodl/pylti
pylti/common.py
generate_request_xml
def generate_request_xml(message_identifier_id, operation, lis_result_sourcedid, score): # pylint: disable=too-many-locals """ Generates LTI 1.1 XML for posting result to LTI consumer. :param message_identifier_id: :param operation: :param lis_result_sourcedid: :param score: :return: XML string """ root = etree.Element(u'imsx_POXEnvelopeRequest', xmlns=u'http://www.imsglobal.org/services/' u'ltiv1p1/xsd/imsoms_v1p0') header = etree.SubElement(root, 'imsx_POXHeader') header_info = etree.SubElement(header, 'imsx_POXRequestHeaderInfo') version = etree.SubElement(header_info, 'imsx_version') version.text = 'V1.0' message_identifier = etree.SubElement(header_info, 'imsx_messageIdentifier') message_identifier.text = message_identifier_id body = etree.SubElement(root, 'imsx_POXBody') xml_request = etree.SubElement(body, '%s%s' % (operation, 'Request')) record = etree.SubElement(xml_request, 'resultRecord') guid = etree.SubElement(record, 'sourcedGUID') sourcedid = etree.SubElement(guid, 'sourcedId') sourcedid.text = lis_result_sourcedid if score is not None: result = etree.SubElement(record, 'result') result_score = etree.SubElement(result, 'resultScore') language = etree.SubElement(result_score, 'language') language.text = 'en' text_string = etree.SubElement(result_score, 'textString') text_string.text = score.__str__() ret = "<?xml version='1.0' encoding='utf-8'?>\n{}".format( etree.tostring(root, encoding='utf-8').decode('utf-8')) log.debug("XML Response: \n%s", ret) return ret
python
def generate_request_xml(message_identifier_id, operation, lis_result_sourcedid, score): root = etree.Element(u'imsx_POXEnvelopeRequest', xmlns=u'http://www.imsglobal.org/services/' u'ltiv1p1/xsd/imsoms_v1p0') header = etree.SubElement(root, 'imsx_POXHeader') header_info = etree.SubElement(header, 'imsx_POXRequestHeaderInfo') version = etree.SubElement(header_info, 'imsx_version') version.text = 'V1.0' message_identifier = etree.SubElement(header_info, 'imsx_messageIdentifier') message_identifier.text = message_identifier_id body = etree.SubElement(root, 'imsx_POXBody') xml_request = etree.SubElement(body, '%s%s' % (operation, 'Request')) record = etree.SubElement(xml_request, 'resultRecord') guid = etree.SubElement(record, 'sourcedGUID') sourcedid = etree.SubElement(guid, 'sourcedId') sourcedid.text = lis_result_sourcedid if score is not None: result = etree.SubElement(record, 'result') result_score = etree.SubElement(result, 'resultScore') language = etree.SubElement(result_score, 'language') language.text = 'en' text_string = etree.SubElement(result_score, 'textString') text_string.text = score.__str__() ret = "<?xml version='1.0' encoding='utf-8'?>\n{}".format( etree.tostring(root, encoding='utf-8').decode('utf-8')) log.debug("XML Response: \n%s", ret) return ret
[ "def", "generate_request_xml", "(", "message_identifier_id", ",", "operation", ",", "lis_result_sourcedid", ",", "score", ")", ":", "# pylint: disable=too-many-locals", "root", "=", "etree", ".", "Element", "(", "u'imsx_POXEnvelopeRequest'", ",", "xmlns", "=", "u'http://www.imsglobal.org/services/'", "u'ltiv1p1/xsd/imsoms_v1p0'", ")", "header", "=", "etree", ".", "SubElement", "(", "root", ",", "'imsx_POXHeader'", ")", "header_info", "=", "etree", ".", "SubElement", "(", "header", ",", "'imsx_POXRequestHeaderInfo'", ")", "version", "=", "etree", ".", "SubElement", "(", "header_info", ",", "'imsx_version'", ")", "version", ".", "text", "=", "'V1.0'", "message_identifier", "=", "etree", ".", "SubElement", "(", "header_info", ",", "'imsx_messageIdentifier'", ")", "message_identifier", ".", "text", "=", "message_identifier_id", "body", "=", "etree", ".", "SubElement", "(", "root", ",", "'imsx_POXBody'", ")", "xml_request", "=", "etree", ".", "SubElement", "(", "body", ",", "'%s%s'", "%", "(", "operation", ",", "'Request'", ")", ")", "record", "=", "etree", ".", "SubElement", "(", "xml_request", ",", "'resultRecord'", ")", "guid", "=", "etree", ".", "SubElement", "(", "record", ",", "'sourcedGUID'", ")", "sourcedid", "=", "etree", ".", "SubElement", "(", "guid", ",", "'sourcedId'", ")", "sourcedid", ".", "text", "=", "lis_result_sourcedid", "if", "score", "is", "not", "None", ":", "result", "=", "etree", ".", "SubElement", "(", "record", ",", "'result'", ")", "result_score", "=", "etree", ".", "SubElement", "(", "result", ",", "'resultScore'", ")", "language", "=", "etree", ".", "SubElement", "(", "result_score", ",", "'language'", ")", "language", ".", "text", "=", "'en'", "text_string", "=", "etree", ".", "SubElement", "(", "result_score", ",", "'textString'", ")", "text_string", ".", "text", "=", "score", ".", "__str__", "(", ")", "ret", "=", "\"<?xml version='1.0' encoding='utf-8'?>\\n{}\"", ".", "format", "(", "etree", ".", "tostring", "(", "root", ",", "encoding", "=", "'utf-8'", ")", ".", "decode", "(", "'utf-8'", ")", ")", "log", ".", "debug", "(", "\"XML Response: \\n%s\"", ",", "ret", ")", "return", "ret" ]
Generates LTI 1.1 XML for posting result to LTI consumer. :param message_identifier_id: :param operation: :param lis_result_sourcedid: :param score: :return: XML string
[ "Generates", "LTI", "1", ".", "1", "XML", "for", "posting", "result", "to", "LTI", "consumer", "." ]
train
https://github.com/mitodl/pylti/blob/18a608282e0d5bc941beb2eaaeea3b7ad484b399/pylti/common.py#L311-L353
mitodl/pylti
pylti/common.py
LTIOAuthServer.lookup_consumer
def lookup_consumer(self, key): """ Search through keys """ if not self.consumers: log.critical(("No consumers defined in settings." "Have you created a configuration file?")) return None consumer = self.consumers.get(key) if not consumer: log.info("Did not find consumer, using key: %s ", key) return None secret = consumer.get('secret', None) if not secret: log.critical(('Consumer %s, is missing secret' 'in settings file, and needs correction.'), key) return None return oauth2.Consumer(key, secret)
python
def lookup_consumer(self, key): if not self.consumers: log.critical(("No consumers defined in settings." "Have you created a configuration file?")) return None consumer = self.consumers.get(key) if not consumer: log.info("Did not find consumer, using key: %s ", key) return None secret = consumer.get('secret', None) if not secret: log.critical(('Consumer %s, is missing secret' 'in settings file, and needs correction.'), key) return None return oauth2.Consumer(key, secret)
[ "def", "lookup_consumer", "(", "self", ",", "key", ")", ":", "if", "not", "self", ".", "consumers", ":", "log", ".", "critical", "(", "(", "\"No consumers defined in settings.\"", "\"Have you created a configuration file?\"", ")", ")", "return", "None", "consumer", "=", "self", ".", "consumers", ".", "get", "(", "key", ")", "if", "not", "consumer", ":", "log", ".", "info", "(", "\"Did not find consumer, using key: %s \"", ",", "key", ")", "return", "None", "secret", "=", "consumer", ".", "get", "(", "'secret'", ",", "None", ")", "if", "not", "secret", ":", "log", ".", "critical", "(", "(", "'Consumer %s, is missing secret'", "'in settings file, and needs correction.'", ")", ",", "key", ")", "return", "None", "return", "oauth2", ".", "Consumer", "(", "key", ",", "secret", ")" ]
Search through keys
[ "Search", "through", "keys" ]
train
https://github.com/mitodl/pylti/blob/18a608282e0d5bc941beb2eaaeea3b7ad484b399/pylti/common.py#L75-L94
mitodl/pylti
pylti/common.py
LTIOAuthServer.lookup_cert
def lookup_cert(self, key): """ Search through keys """ if not self.consumers: log.critical(("No consumers defined in settings." "Have you created a configuration file?")) return None consumer = self.consumers.get(key) if not consumer: log.info("Did not find consumer, using key: %s ", key) return None cert = consumer.get('cert', None) return cert
python
def lookup_cert(self, key): if not self.consumers: log.critical(("No consumers defined in settings." "Have you created a configuration file?")) return None consumer = self.consumers.get(key) if not consumer: log.info("Did not find consumer, using key: %s ", key) return None cert = consumer.get('cert', None) return cert
[ "def", "lookup_cert", "(", "self", ",", "key", ")", ":", "if", "not", "self", ".", "consumers", ":", "log", ".", "critical", "(", "(", "\"No consumers defined in settings.\"", "\"Have you created a configuration file?\"", ")", ")", "return", "None", "consumer", "=", "self", ".", "consumers", ".", "get", "(", "key", ")", "if", "not", "consumer", ":", "log", ".", "info", "(", "\"Did not find consumer, using key: %s \"", ",", "key", ")", "return", "None", "cert", "=", "consumer", ".", "get", "(", "'cert'", ",", "None", ")", "return", "cert" ]
Search through keys
[ "Search", "through", "keys" ]
train
https://github.com/mitodl/pylti/blob/18a608282e0d5bc941beb2eaaeea3b7ad484b399/pylti/common.py#L96-L110
mitodl/pylti
pylti/common.py
SignatureMethod_HMAC_SHA1_Unicode.check
def check(self, request, consumer, token, signature): """ Returns whether the given signature is the correct signature for the given consumer and token signing the given request. """ built = self.sign(request, consumer, token) if isinstance(signature, STRING_TYPES): signature = signature.encode("utf8") return built == signature
python
def check(self, request, consumer, token, signature): built = self.sign(request, consumer, token) if isinstance(signature, STRING_TYPES): signature = signature.encode("utf8") return built == signature
[ "def", "check", "(", "self", ",", "request", ",", "consumer", ",", "token", ",", "signature", ")", ":", "built", "=", "self", ".", "sign", "(", "request", ",", "consumer", ",", "token", ")", "if", "isinstance", "(", "signature", ",", "STRING_TYPES", ")", ":", "signature", "=", "signature", ".", "encode", "(", "\"utf8\"", ")", "return", "built", "==", "signature" ]
Returns whether the given signature is the correct signature for the given consumer and token signing the given request.
[ "Returns", "whether", "the", "given", "signature", "is", "the", "correct", "signature", "for", "the", "given", "consumer", "and", "token", "signing", "the", "given", "request", "." ]
train
https://github.com/mitodl/pylti/blob/18a608282e0d5bc941beb2eaaeea3b7ad484b399/pylti/common.py#L364-L372
mitodl/pylti
pylti/common.py
LTIBase.name
def name(self): # pylint: disable=no-self-use """ Name returns user's name or user's email or user_id :return: best guess of name to use to greet user """ if 'lis_person_sourcedid' in self.session: return self.session['lis_person_sourcedid'] elif 'lis_person_contact_email_primary' in self.session: return self.session['lis_person_contact_email_primary'] elif 'user_id' in self.session: return self.session['user_id'] else: return ''
python
def name(self): if 'lis_person_sourcedid' in self.session: return self.session['lis_person_sourcedid'] elif 'lis_person_contact_email_primary' in self.session: return self.session['lis_person_contact_email_primary'] elif 'user_id' in self.session: return self.session['user_id'] else: return ''
[ "def", "name", "(", "self", ")", ":", "# pylint: disable=no-self-use", "if", "'lis_person_sourcedid'", "in", "self", ".", "session", ":", "return", "self", ".", "session", "[", "'lis_person_sourcedid'", "]", "elif", "'lis_person_contact_email_primary'", "in", "self", ".", "session", ":", "return", "self", ".", "session", "[", "'lis_person_contact_email_primary'", "]", "elif", "'user_id'", "in", "self", ".", "session", ":", "return", "self", ".", "session", "[", "'user_id'", "]", "else", ":", "return", "''" ]
Name returns user's name or user's email or user_id :return: best guess of name to use to greet user
[ "Name", "returns", "user", "s", "name", "or", "user", "s", "email", "or", "user_id", ":", "return", ":", "best", "guess", "of", "name", "to", "use", "to", "greet", "user" ]
train
https://github.com/mitodl/pylti/blob/18a608282e0d5bc941beb2eaaeea3b7ad484b399/pylti/common.py#L473-L485
mitodl/pylti
pylti/common.py
LTIBase.verify
def verify(self): """ Verify if LTI request is valid, validation depends on @lti wrapper arguments :raises: LTIException """ log.debug('verify request=%s', self.lti_kwargs.get('request')) if self.lti_kwargs.get('request') == 'session': self._verify_session() elif self.lti_kwargs.get('request') == 'initial': self.verify_request() elif self.lti_kwargs.get('request') == 'any': self._verify_any() else: raise LTIException("Unknown request type") return True
python
def verify(self): log.debug('verify request=%s', self.lti_kwargs.get('request')) if self.lti_kwargs.get('request') == 'session': self._verify_session() elif self.lti_kwargs.get('request') == 'initial': self.verify_request() elif self.lti_kwargs.get('request') == 'any': self._verify_any() else: raise LTIException("Unknown request type") return True
[ "def", "verify", "(", "self", ")", ":", "log", ".", "debug", "(", "'verify request=%s'", ",", "self", ".", "lti_kwargs", ".", "get", "(", "'request'", ")", ")", "if", "self", ".", "lti_kwargs", ".", "get", "(", "'request'", ")", "==", "'session'", ":", "self", ".", "_verify_session", "(", ")", "elif", "self", ".", "lti_kwargs", ".", "get", "(", "'request'", ")", "==", "'initial'", ":", "self", ".", "verify_request", "(", ")", "elif", "self", ".", "lti_kwargs", ".", "get", "(", "'request'", ")", "==", "'any'", ":", "self", ".", "_verify_any", "(", ")", "else", ":", "raise", "LTIException", "(", "\"Unknown request type\"", ")", "return", "True" ]
Verify if LTI request is valid, validation depends on @lti wrapper arguments :raises: LTIException
[ "Verify", "if", "LTI", "request", "is", "valid", "validation", "depends", "on", "@lti", "wrapper", "arguments" ]
train
https://github.com/mitodl/pylti/blob/18a608282e0d5bc941beb2eaaeea3b7ad484b399/pylti/common.py#L487-L503
mitodl/pylti
pylti/common.py
LTIBase.is_role
def is_role(self, role): """ Verify if user is in role :param: role: role to verify against :return: if user is in role :exception: LTIException if role is unknown """ log.debug("is_role %s", role) roles = self.session['roles'].split(',') if role in LTI_ROLES: role_list = LTI_ROLES[role] # find the intersection of the roles roles = set(role_list) & set(roles) is_user_role_there = len(roles) >= 1 log.debug( "is_role roles_list=%s role=%s in list=%s", role_list, roles, is_user_role_there ) return is_user_role_there else: raise LTIException("Unknown role {}.".format(role))
python
def is_role(self, role): log.debug("is_role %s", role) roles = self.session['roles'].split(',') if role in LTI_ROLES: role_list = LTI_ROLES[role] roles = set(role_list) & set(roles) is_user_role_there = len(roles) >= 1 log.debug( "is_role roles_list=%s role=%s in list=%s", role_list, roles, is_user_role_there ) return is_user_role_there else: raise LTIException("Unknown role {}.".format(role))
[ "def", "is_role", "(", "self", ",", "role", ")", ":", "log", ".", "debug", "(", "\"is_role %s\"", ",", "role", ")", "roles", "=", "self", ".", "session", "[", "'roles'", "]", ".", "split", "(", "','", ")", "if", "role", "in", "LTI_ROLES", ":", "role_list", "=", "LTI_ROLES", "[", "role", "]", "# find the intersection of the roles", "roles", "=", "set", "(", "role_list", ")", "&", "set", "(", "roles", ")", "is_user_role_there", "=", "len", "(", "roles", ")", ">=", "1", "log", ".", "debug", "(", "\"is_role roles_list=%s role=%s in list=%s\"", ",", "role_list", ",", "roles", ",", "is_user_role_there", ")", "return", "is_user_role_there", "else", ":", "raise", "LTIException", "(", "\"Unknown role {}.\"", ".", "format", "(", "role", ")", ")" ]
Verify if user is in role :param: role: role to verify against :return: if user is in role :exception: LTIException if role is unknown
[ "Verify", "if", "user", "is", "in", "role" ]
train
https://github.com/mitodl/pylti/blob/18a608282e0d5bc941beb2eaaeea3b7ad484b399/pylti/common.py#L550-L571
mitodl/pylti
pylti/common.py
LTIBase._check_role
def _check_role(self): """ Check that user is in role specified as wrapper attribute :exception: LTIRoleException if user is not in roles """ role = u'any' if 'role' in self.lti_kwargs: role = self.lti_kwargs['role'] log.debug( "check_role lti_role=%s decorator_role=%s", self.role, role ) if not (role == u'any' or self.is_role(self, role)): raise LTIRoleException('Not authorized.')
python
def _check_role(self): role = u'any' if 'role' in self.lti_kwargs: role = self.lti_kwargs['role'] log.debug( "check_role lti_role=%s decorator_role=%s", self.role, role ) if not (role == u'any' or self.is_role(self, role)): raise LTIRoleException('Not authorized.')
[ "def", "_check_role", "(", "self", ")", ":", "role", "=", "u'any'", "if", "'role'", "in", "self", ".", "lti_kwargs", ":", "role", "=", "self", ".", "lti_kwargs", "[", "'role'", "]", "log", ".", "debug", "(", "\"check_role lti_role=%s decorator_role=%s\"", ",", "self", ".", "role", ",", "role", ")", "if", "not", "(", "role", "==", "u'any'", "or", "self", ".", "is_role", "(", "self", ",", "role", ")", ")", ":", "raise", "LTIRoleException", "(", "'Not authorized.'", ")" ]
Check that user is in role specified as wrapper attribute :exception: LTIRoleException if user is not in roles
[ "Check", "that", "user", "is", "in", "role", "specified", "as", "wrapper", "attribute" ]
train
https://github.com/mitodl/pylti/blob/18a608282e0d5bc941beb2eaaeea3b7ad484b399/pylti/common.py#L573-L586
mitodl/pylti
pylti/common.py
LTIBase.post_grade
def post_grade(self, grade): """ Post grade to LTI consumer using XML :param: grade: 0 <= grade <= 1 :return: True if post successful and grade valid :exception: LTIPostMessageException if call failed """ message_identifier_id = self.message_identifier_id() operation = 'replaceResult' lis_result_sourcedid = self.lis_result_sourcedid # # edX devbox fix score = float(grade) if 0 <= score <= 1.0: xml = generate_request_xml( message_identifier_id, operation, lis_result_sourcedid, score) ret = post_message(self._consumers(), self.key, self.response_url, xml) if not ret: raise LTIPostMessageException("Post Message Failed") return True return False
python
def post_grade(self, grade): message_identifier_id = self.message_identifier_id() operation = 'replaceResult' lis_result_sourcedid = self.lis_result_sourcedid score = float(grade) if 0 <= score <= 1.0: xml = generate_request_xml( message_identifier_id, operation, lis_result_sourcedid, score) ret = post_message(self._consumers(), self.key, self.response_url, xml) if not ret: raise LTIPostMessageException("Post Message Failed") return True return False
[ "def", "post_grade", "(", "self", ",", "grade", ")", ":", "message_identifier_id", "=", "self", ".", "message_identifier_id", "(", ")", "operation", "=", "'replaceResult'", "lis_result_sourcedid", "=", "self", ".", "lis_result_sourcedid", "# # edX devbox fix", "score", "=", "float", "(", "grade", ")", "if", "0", "<=", "score", "<=", "1.0", ":", "xml", "=", "generate_request_xml", "(", "message_identifier_id", ",", "operation", ",", "lis_result_sourcedid", ",", "score", ")", "ret", "=", "post_message", "(", "self", ".", "_consumers", "(", ")", ",", "self", ".", "key", ",", "self", ".", "response_url", ",", "xml", ")", "if", "not", "ret", ":", "raise", "LTIPostMessageException", "(", "\"Post Message Failed\"", ")", "return", "True", "return", "False" ]
Post grade to LTI consumer using XML :param: grade: 0 <= grade <= 1 :return: True if post successful and grade valid :exception: LTIPostMessageException if call failed
[ "Post", "grade", "to", "LTI", "consumer", "using", "XML" ]
train
https://github.com/mitodl/pylti/blob/18a608282e0d5bc941beb2eaaeea3b7ad484b399/pylti/common.py#L588-L611
mitodl/pylti
pylti/common.py
LTIBase.post_grade2
def post_grade2(self, grade, user=None, comment=''): """ Post grade to LTI consumer using REST/JSON URL munging will is related to: https://openedx.atlassian.net/browse/PLAT-281 :param: grade: 0 <= grade <= 1 :return: True if post successful and grade valid :exception: LTIPostMessageException if call failed """ content_type = 'application/vnd.ims.lis.v2.result+json' if user is None: user = self.user_id lti2_url = self.response_url.replace( "/grade_handler", "/lti_2_0_result_rest_handler/user/{}".format(user)) score = float(grade) if 0 <= score <= 1.0: body = json.dumps({ "@context": "http://purl.imsglobal.org/ctx/lis/v2/Result", "@type": "Result", "resultScore": score, "comment": comment }) ret = post_message2(self._consumers(), self.key, lti2_url, body, method='PUT', content_type=content_type) if not ret: raise LTIPostMessageException("Post Message Failed") return True return False
python
def post_grade2(self, grade, user=None, comment=''): content_type = 'application/vnd.ims.lis.v2.result+json' if user is None: user = self.user_id lti2_url = self.response_url.replace( "/grade_handler", "/lti_2_0_result_rest_handler/user/{}".format(user)) score = float(grade) if 0 <= score <= 1.0: body = json.dumps({ "@context": "http://purl.imsglobal.org/ctx/lis/v2/Result", "@type": "Result", "resultScore": score, "comment": comment }) ret = post_message2(self._consumers(), self.key, lti2_url, body, method='PUT', content_type=content_type) if not ret: raise LTIPostMessageException("Post Message Failed") return True return False
[ "def", "post_grade2", "(", "self", ",", "grade", ",", "user", "=", "None", ",", "comment", "=", "''", ")", ":", "content_type", "=", "'application/vnd.ims.lis.v2.result+json'", "if", "user", "is", "None", ":", "user", "=", "self", ".", "user_id", "lti2_url", "=", "self", ".", "response_url", ".", "replace", "(", "\"/grade_handler\"", ",", "\"/lti_2_0_result_rest_handler/user/{}\"", ".", "format", "(", "user", ")", ")", "score", "=", "float", "(", "grade", ")", "if", "0", "<=", "score", "<=", "1.0", ":", "body", "=", "json", ".", "dumps", "(", "{", "\"@context\"", ":", "\"http://purl.imsglobal.org/ctx/lis/v2/Result\"", ",", "\"@type\"", ":", "\"Result\"", ",", "\"resultScore\"", ":", "score", ",", "\"comment\"", ":", "comment", "}", ")", "ret", "=", "post_message2", "(", "self", ".", "_consumers", "(", ")", ",", "self", ".", "key", ",", "lti2_url", ",", "body", ",", "method", "=", "'PUT'", ",", "content_type", "=", "content_type", ")", "if", "not", "ret", ":", "raise", "LTIPostMessageException", "(", "\"Post Message Failed\"", ")", "return", "True", "return", "False" ]
Post grade to LTI consumer using REST/JSON URL munging will is related to: https://openedx.atlassian.net/browse/PLAT-281 :param: grade: 0 <= grade <= 1 :return: True if post successful and grade valid :exception: LTIPostMessageException if call failed
[ "Post", "grade", "to", "LTI", "consumer", "using", "REST", "/", "JSON", "URL", "munging", "will", "is", "related", "to", ":", "https", ":", "//", "openedx", ".", "atlassian", ".", "net", "/", "browse", "/", "PLAT", "-", "281" ]
train
https://github.com/mitodl/pylti/blob/18a608282e0d5bc941beb2eaaeea3b7ad484b399/pylti/common.py#L613-L644
mitodl/pylti
pylti/flask.py
lti
def lti(app=None, request='any', error=default_error, role='any', *lti_args, **lti_kwargs): """ LTI decorator :param: app - Flask App object (optional). :py:attr:`flask.current_app` is used if no object is passed in. :param: error - Callback if LTI throws exception (optional). :py:attr:`pylti.flask.default_error` is the default. :param: request - Request type from :py:attr:`pylti.common.LTI_REQUEST_TYPE`. (default: any) :param: roles - LTI Role (default: any) :return: wrapper """ def _lti(function): """ Inner LTI decorator :param: function: :return: """ @wraps(function) def wrapper(*args, **kwargs): """ Pass LTI reference to function or return error. """ try: the_lti = LTI(lti_args, lti_kwargs) the_lti.verify() the_lti._check_role() # pylint: disable=protected-access kwargs['lti'] = the_lti return function(*args, **kwargs) except LTIException as lti_exception: error = lti_kwargs.get('error') exception = dict() exception['exception'] = lti_exception exception['kwargs'] = kwargs exception['args'] = args return error(exception=exception) return wrapper lti_kwargs['request'] = request lti_kwargs['error'] = error lti_kwargs['role'] = role if (not app) or isinstance(app, Flask): lti_kwargs['app'] = app return _lti else: # We are wrapping without arguments lti_kwargs['app'] = None return _lti(app)
python
def lti(app=None, request='any', error=default_error, role='any', *lti_args, **lti_kwargs): def _lti(function): @wraps(function) def wrapper(*args, **kwargs): try: the_lti = LTI(lti_args, lti_kwargs) the_lti.verify() the_lti._check_role() kwargs['lti'] = the_lti return function(*args, **kwargs) except LTIException as lti_exception: error = lti_kwargs.get('error') exception = dict() exception['exception'] = lti_exception exception['kwargs'] = kwargs exception['args'] = args return error(exception=exception) return wrapper lti_kwargs['request'] = request lti_kwargs['error'] = error lti_kwargs['role'] = role if (not app) or isinstance(app, Flask): lti_kwargs['app'] = app return _lti else: lti_kwargs['app'] = None return _lti(app)
[ "def", "lti", "(", "app", "=", "None", ",", "request", "=", "'any'", ",", "error", "=", "default_error", ",", "role", "=", "'any'", ",", "*", "lti_args", ",", "*", "*", "lti_kwargs", ")", ":", "def", "_lti", "(", "function", ")", ":", "\"\"\"\n Inner LTI decorator\n\n :param: function:\n :return:\n \"\"\"", "@", "wraps", "(", "function", ")", "def", "wrapper", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "\"\"\"\n Pass LTI reference to function or return error.\n \"\"\"", "try", ":", "the_lti", "=", "LTI", "(", "lti_args", ",", "lti_kwargs", ")", "the_lti", ".", "verify", "(", ")", "the_lti", ".", "_check_role", "(", ")", "# pylint: disable=protected-access", "kwargs", "[", "'lti'", "]", "=", "the_lti", "return", "function", "(", "*", "args", ",", "*", "*", "kwargs", ")", "except", "LTIException", "as", "lti_exception", ":", "error", "=", "lti_kwargs", ".", "get", "(", "'error'", ")", "exception", "=", "dict", "(", ")", "exception", "[", "'exception'", "]", "=", "lti_exception", "exception", "[", "'kwargs'", "]", "=", "kwargs", "exception", "[", "'args'", "]", "=", "args", "return", "error", "(", "exception", "=", "exception", ")", "return", "wrapper", "lti_kwargs", "[", "'request'", "]", "=", "request", "lti_kwargs", "[", "'error'", "]", "=", "error", "lti_kwargs", "[", "'role'", "]", "=", "role", "if", "(", "not", "app", ")", "or", "isinstance", "(", "app", ",", "Flask", ")", ":", "lti_kwargs", "[", "'app'", "]", "=", "app", "return", "_lti", "else", ":", "# We are wrapping without arguments", "lti_kwargs", "[", "'app'", "]", "=", "None", "return", "_lti", "(", "app", ")" ]
LTI decorator :param: app - Flask App object (optional). :py:attr:`flask.current_app` is used if no object is passed in. :param: error - Callback if LTI throws exception (optional). :py:attr:`pylti.flask.default_error` is the default. :param: request - Request type from :py:attr:`pylti.common.LTI_REQUEST_TYPE`. (default: any) :param: roles - LTI Role (default: any) :return: wrapper
[ "LTI", "decorator" ]
train
https://github.com/mitodl/pylti/blob/18a608282e0d5bc941beb2eaaeea3b7ad484b399/pylti/flask.py#L159-L213
mitodl/pylti
pylti/flask.py
LTI._consumers
def _consumers(self): """ Gets consumer's map from app config :return: consumers map """ app_config = self.lti_kwargs['app'].config config = app_config.get('PYLTI_CONFIG', dict()) consumers = config.get('consumers', dict()) return consumers
python
def _consumers(self): app_config = self.lti_kwargs['app'].config config = app_config.get('PYLTI_CONFIG', dict()) consumers = config.get('consumers', dict()) return consumers
[ "def", "_consumers", "(", "self", ")", ":", "app_config", "=", "self", ".", "lti_kwargs", "[", "'app'", "]", ".", "config", "config", "=", "app_config", ".", "get", "(", "'PYLTI_CONFIG'", ",", "dict", "(", ")", ")", "consumers", "=", "config", ".", "get", "(", "'consumers'", ",", "dict", "(", ")", ")", "return", "consumers" ]
Gets consumer's map from app config :return: consumers map
[ "Gets", "consumer", "s", "map", "from", "app", "config" ]
train
https://github.com/mitodl/pylti/blob/18a608282e0d5bc941beb2eaaeea3b7ad484b399/pylti/flask.py#L42-L51
mitodl/pylti
pylti/flask.py
LTI.verify_request
def verify_request(self): """ Verify LTI request :raises: LTIException is request validation failed """ if flask_request.method == 'POST': params = flask_request.form.to_dict() else: params = flask_request.args.to_dict() log.debug(params) log.debug('verify_request?') try: verify_request_common(self._consumers(), flask_request.url, flask_request.method, flask_request.headers, params) log.debug('verify_request success') # All good to go, store all of the LTI params into a # session dict for use in views for prop in LTI_PROPERTY_LIST: if params.get(prop, None): log.debug("params %s=%s", prop, params.get(prop, None)) session[prop] = params[prop] # Set logged in session key session[LTI_SESSION_KEY] = True return True except LTIException: log.debug('verify_request failed') for prop in LTI_PROPERTY_LIST: if session.get(prop, None): del session[prop] session[LTI_SESSION_KEY] = False raise
python
def verify_request(self): if flask_request.method == 'POST': params = flask_request.form.to_dict() else: params = flask_request.args.to_dict() log.debug(params) log.debug('verify_request?') try: verify_request_common(self._consumers(), flask_request.url, flask_request.method, flask_request.headers, params) log.debug('verify_request success') for prop in LTI_PROPERTY_LIST: if params.get(prop, None): log.debug("params %s=%s", prop, params.get(prop, None)) session[prop] = params[prop] session[LTI_SESSION_KEY] = True return True except LTIException: log.debug('verify_request failed') for prop in LTI_PROPERTY_LIST: if session.get(prop, None): del session[prop] session[LTI_SESSION_KEY] = False raise
[ "def", "verify_request", "(", "self", ")", ":", "if", "flask_request", ".", "method", "==", "'POST'", ":", "params", "=", "flask_request", ".", "form", ".", "to_dict", "(", ")", "else", ":", "params", "=", "flask_request", ".", "args", ".", "to_dict", "(", ")", "log", ".", "debug", "(", "params", ")", "log", ".", "debug", "(", "'verify_request?'", ")", "try", ":", "verify_request_common", "(", "self", ".", "_consumers", "(", ")", ",", "flask_request", ".", "url", ",", "flask_request", ".", "method", ",", "flask_request", ".", "headers", ",", "params", ")", "log", ".", "debug", "(", "'verify_request success'", ")", "# All good to go, store all of the LTI params into a", "# session dict for use in views", "for", "prop", "in", "LTI_PROPERTY_LIST", ":", "if", "params", ".", "get", "(", "prop", ",", "None", ")", ":", "log", ".", "debug", "(", "\"params %s=%s\"", ",", "prop", ",", "params", ".", "get", "(", "prop", ",", "None", ")", ")", "session", "[", "prop", "]", "=", "params", "[", "prop", "]", "# Set logged in session key", "session", "[", "LTI_SESSION_KEY", "]", "=", "True", "return", "True", "except", "LTIException", ":", "log", ".", "debug", "(", "'verify_request failed'", ")", "for", "prop", "in", "LTI_PROPERTY_LIST", ":", "if", "session", ".", "get", "(", "prop", ",", "None", ")", ":", "del", "session", "[", "prop", "]", "session", "[", "LTI_SESSION_KEY", "]", "=", "False", "raise" ]
Verify LTI request :raises: LTIException is request validation failed
[ "Verify", "LTI", "request", ":", "raises", ":", "LTIException", "is", "request", "validation", "failed" ]
train
https://github.com/mitodl/pylti/blob/18a608282e0d5bc941beb2eaaeea3b7ad484b399/pylti/flask.py#L53-L87
mitodl/pylti
pylti/flask.py
LTI.response_url
def response_url(self): """ Returns remapped lis_outcome_service_url uses PYLTI_URL_FIX map to support edX dev-stack :return: remapped lis_outcome_service_url """ url = "" url = self.session['lis_outcome_service_url'] app_config = self.lti_kwargs['app'].config urls = app_config.get('PYLTI_URL_FIX', dict()) # url remapping is useful for using devstack # devstack reports httpS://localhost:8000/ and listens on HTTP for prefix, mapping in urls.items(): if url.startswith(prefix): for _from, _to in mapping.items(): url = url.replace(_from, _to) return url
python
def response_url(self): url = "" url = self.session['lis_outcome_service_url'] app_config = self.lti_kwargs['app'].config urls = app_config.get('PYLTI_URL_FIX', dict()) for prefix, mapping in urls.items(): if url.startswith(prefix): for _from, _to in mapping.items(): url = url.replace(_from, _to) return url
[ "def", "response_url", "(", "self", ")", ":", "url", "=", "\"\"", "url", "=", "self", ".", "session", "[", "'lis_outcome_service_url'", "]", "app_config", "=", "self", ".", "lti_kwargs", "[", "'app'", "]", ".", "config", "urls", "=", "app_config", ".", "get", "(", "'PYLTI_URL_FIX'", ",", "dict", "(", ")", ")", "# url remapping is useful for using devstack", "# devstack reports httpS://localhost:8000/ and listens on HTTP", "for", "prefix", ",", "mapping", "in", "urls", ".", "items", "(", ")", ":", "if", "url", ".", "startswith", "(", "prefix", ")", ":", "for", "_from", ",", "_to", "in", "mapping", ".", "items", "(", ")", ":", "url", "=", "url", ".", "replace", "(", "_from", ",", "_to", ")", "return", "url" ]
Returns remapped lis_outcome_service_url uses PYLTI_URL_FIX map to support edX dev-stack :return: remapped lis_outcome_service_url
[ "Returns", "remapped", "lis_outcome_service_url", "uses", "PYLTI_URL_FIX", "map", "to", "support", "edX", "dev", "-", "stack" ]
train
https://github.com/mitodl/pylti/blob/18a608282e0d5bc941beb2eaaeea3b7ad484b399/pylti/flask.py#L90-L107
mitodl/pylti
pylti/flask.py
LTI._verify_any
def _verify_any(self): """ Verify that an initial request has been made, or failing that, that the request is in the session :raises: LTIException """ log.debug('verify_any enter') # Check to see if there is a new LTI launch request incoming newrequest = False if flask_request.method == 'POST': params = flask_request.form.to_dict() initiation = "basic-lti-launch-request" if params.get("lti_message_type", None) == initiation: newrequest = True # Scrub the session of the old authentication for prop in LTI_PROPERTY_LIST: if session.get(prop, None): del session[prop] session[LTI_SESSION_KEY] = False # Attempt the appropriate validation # Both of these methods raise LTIException as necessary if newrequest: self.verify_request() else: self._verify_session()
python
def _verify_any(self): log.debug('verify_any enter') newrequest = False if flask_request.method == 'POST': params = flask_request.form.to_dict() initiation = "basic-lti-launch-request" if params.get("lti_message_type", None) == initiation: newrequest = True for prop in LTI_PROPERTY_LIST: if session.get(prop, None): del session[prop] session[LTI_SESSION_KEY] = False if newrequest: self.verify_request() else: self._verify_session()
[ "def", "_verify_any", "(", "self", ")", ":", "log", ".", "debug", "(", "'verify_any enter'", ")", "# Check to see if there is a new LTI launch request incoming", "newrequest", "=", "False", "if", "flask_request", ".", "method", "==", "'POST'", ":", "params", "=", "flask_request", ".", "form", ".", "to_dict", "(", ")", "initiation", "=", "\"basic-lti-launch-request\"", "if", "params", ".", "get", "(", "\"lti_message_type\"", ",", "None", ")", "==", "initiation", ":", "newrequest", "=", "True", "# Scrub the session of the old authentication", "for", "prop", "in", "LTI_PROPERTY_LIST", ":", "if", "session", ".", "get", "(", "prop", ",", "None", ")", ":", "del", "session", "[", "prop", "]", "session", "[", "LTI_SESSION_KEY", "]", "=", "False", "# Attempt the appropriate validation", "# Both of these methods raise LTIException as necessary", "if", "newrequest", ":", "self", ".", "verify_request", "(", ")", "else", ":", "self", ".", "_verify_session", "(", ")" ]
Verify that an initial request has been made, or failing that, that the request is in the session :raises: LTIException
[ "Verify", "that", "an", "initial", "request", "has", "been", "made", "or", "failing", "that", "that", "the", "request", "is", "in", "the", "session", ":", "raises", ":", "LTIException" ]
train
https://github.com/mitodl/pylti/blob/18a608282e0d5bc941beb2eaaeea3b7ad484b399/pylti/flask.py#L109-L135
mitodl/pylti
pylti/flask.py
LTI.close_session
def close_session(): """ Invalidates session """ for prop in LTI_PROPERTY_LIST: if session.get(prop, None): del session[prop] session[LTI_SESSION_KEY] = False
python
def close_session(): for prop in LTI_PROPERTY_LIST: if session.get(prop, None): del session[prop] session[LTI_SESSION_KEY] = False
[ "def", "close_session", "(", ")", ":", "for", "prop", "in", "LTI_PROPERTY_LIST", ":", "if", "session", ".", "get", "(", "prop", ",", "None", ")", ":", "del", "session", "[", "prop", "]", "session", "[", "LTI_SESSION_KEY", "]", "=", "False" ]
Invalidates session
[ "Invalidates", "session" ]
train
https://github.com/mitodl/pylti/blob/18a608282e0d5bc941beb2eaaeea3b7ad484b399/pylti/flask.py#L149-L156
mitodl/pylti
pylti/chalice.py
LTI._consumers
def _consumers(self): """ Gets consumers from Lambda environment variables prefixed with CONSUMER_KEY_SECRET_. For example, given a consumer key of foo and a shared secret of bar, you should have an environment variable CONSUMER_KEY_SECRET_foo=bar. :return: consumers map :raises: LTIException if environment variables are not found """ consumers = {} for env in os.environ: if env.startswith('CONSUMER_KEY_SECRET_'): key = env[20:] # Strip off the CONSUMER_KEY_SECRET_ prefix # TODO: remove below after live test # consumers[key] = {"secret": os.environ[env], "cert": 'NA'} consumers[key] = {"secret": os.environ[env], "cert": None} if not consumers: raise LTIException("No consumers found. Chalice stores " "consumers in Lambda environment variables. " "Have you created the environment variables?") return consumers
python
def _consumers(self): consumers = {} for env in os.environ: if env.startswith('CONSUMER_KEY_SECRET_'): key = env[20:] consumers[key] = {"secret": os.environ[env], "cert": None} if not consumers: raise LTIException("No consumers found. Chalice stores " "consumers in Lambda environment variables. " "Have you created the environment variables?") return consumers
[ "def", "_consumers", "(", "self", ")", ":", "consumers", "=", "{", "}", "for", "env", "in", "os", ".", "environ", ":", "if", "env", ".", "startswith", "(", "'CONSUMER_KEY_SECRET_'", ")", ":", "key", "=", "env", "[", "20", ":", "]", "# Strip off the CONSUMER_KEY_SECRET_ prefix", "# TODO: remove below after live test", "# consumers[key] = {\"secret\": os.environ[env], \"cert\": 'NA'}", "consumers", "[", "key", "]", "=", "{", "\"secret\"", ":", "os", ".", "environ", "[", "env", "]", ",", "\"cert\"", ":", "None", "}", "if", "not", "consumers", ":", "raise", "LTIException", "(", "\"No consumers found. Chalice stores \"", "\"consumers in Lambda environment variables. \"", "\"Have you created the environment variables?\"", ")", "return", "consumers" ]
Gets consumers from Lambda environment variables prefixed with CONSUMER_KEY_SECRET_. For example, given a consumer key of foo and a shared secret of bar, you should have an environment variable CONSUMER_KEY_SECRET_foo=bar. :return: consumers map :raises: LTIException if environment variables are not found
[ "Gets", "consumers", "from", "Lambda", "environment", "variables", "prefixed", "with", "CONSUMER_KEY_SECRET_", ".", "For", "example", "given", "a", "consumer", "key", "of", "foo", "and", "a", "shared", "secret", "of", "bar", "you", "should", "have", "an", "environment", "variable", "CONSUMER_KEY_SECRET_foo", "=", "bar", "." ]
train
https://github.com/mitodl/pylti/blob/18a608282e0d5bc941beb2eaaeea3b7ad484b399/pylti/chalice.py#L50-L71
mitodl/pylti
pylti/chalice.py
LTI.verify_request
def verify_request(self): """ Verify LTI request :raises: LTIException if request validation failed """ request = self.lti_kwargs['app'].current_request if request.method == 'POST': # Chalice expects JSON and does not nativly support forms data in # a post body. The below is copied from the parsing of query # strings as implimented in match_route of Chalice local.py parsed_url = request.raw_body.decode() parsed_qs = parse_qs(parsed_url, keep_blank_values=True) params = {k: v[0] for k, v in parsed_qs .items()} else: params = request.query_params log.debug(params) log.debug('verify_request?') try: # Chalice does not have a url property therefore building it. protocol = request.headers.get('x-forwarded-proto', 'http') hostname = request.headers['host'] path = request.context['path'] url = urlunparse((protocol, hostname, path, "", "", "")) verify_request_common(self._consumers(), url, request.method, request.headers, params) log.debug('verify_request success') # All good to go, store all of the LTI params into a # session dict for use in views for prop in LTI_PROPERTY_LIST: if params.get(prop, None): log.debug("params %s=%s", prop, params.get(prop, None)) self.session[prop] = params[prop] # Set logged in session key self.session[LTI_SESSION_KEY] = True return True except LTIException: log.debug('verify_request failed') for prop in LTI_PROPERTY_LIST: if self.session.get(prop, None): del self.session[prop] self.session[LTI_SESSION_KEY] = False raise
python
def verify_request(self): request = self.lti_kwargs['app'].current_request if request.method == 'POST': parsed_url = request.raw_body.decode() parsed_qs = parse_qs(parsed_url, keep_blank_values=True) params = {k: v[0] for k, v in parsed_qs .items()} else: params = request.query_params log.debug(params) log.debug('verify_request?') try: protocol = request.headers.get('x-forwarded-proto', 'http') hostname = request.headers['host'] path = request.context['path'] url = urlunparse((protocol, hostname, path, "", "", "")) verify_request_common(self._consumers(), url, request.method, request.headers, params) log.debug('verify_request success') for prop in LTI_PROPERTY_LIST: if params.get(prop, None): log.debug("params %s=%s", prop, params.get(prop, None)) self.session[prop] = params[prop] self.session[LTI_SESSION_KEY] = True return True except LTIException: log.debug('verify_request failed') for prop in LTI_PROPERTY_LIST: if self.session.get(prop, None): del self.session[prop] self.session[LTI_SESSION_KEY] = False raise
[ "def", "verify_request", "(", "self", ")", ":", "request", "=", "self", ".", "lti_kwargs", "[", "'app'", "]", ".", "current_request", "if", "request", ".", "method", "==", "'POST'", ":", "# Chalice expects JSON and does not nativly support forms data in", "# a post body. The below is copied from the parsing of query", "# strings as implimented in match_route of Chalice local.py", "parsed_url", "=", "request", ".", "raw_body", ".", "decode", "(", ")", "parsed_qs", "=", "parse_qs", "(", "parsed_url", ",", "keep_blank_values", "=", "True", ")", "params", "=", "{", "k", ":", "v", "[", "0", "]", "for", "k", ",", "v", "in", "parsed_qs", ".", "items", "(", ")", "}", "else", ":", "params", "=", "request", ".", "query_params", "log", ".", "debug", "(", "params", ")", "log", ".", "debug", "(", "'verify_request?'", ")", "try", ":", "# Chalice does not have a url property therefore building it.", "protocol", "=", "request", ".", "headers", ".", "get", "(", "'x-forwarded-proto'", ",", "'http'", ")", "hostname", "=", "request", ".", "headers", "[", "'host'", "]", "path", "=", "request", ".", "context", "[", "'path'", "]", "url", "=", "urlunparse", "(", "(", "protocol", ",", "hostname", ",", "path", ",", "\"\"", ",", "\"\"", ",", "\"\"", ")", ")", "verify_request_common", "(", "self", ".", "_consumers", "(", ")", ",", "url", ",", "request", ".", "method", ",", "request", ".", "headers", ",", "params", ")", "log", ".", "debug", "(", "'verify_request success'", ")", "# All good to go, store all of the LTI params into a", "# session dict for use in views", "for", "prop", "in", "LTI_PROPERTY_LIST", ":", "if", "params", ".", "get", "(", "prop", ",", "None", ")", ":", "log", ".", "debug", "(", "\"params %s=%s\"", ",", "prop", ",", "params", ".", "get", "(", "prop", ",", "None", ")", ")", "self", ".", "session", "[", "prop", "]", "=", "params", "[", "prop", "]", "# Set logged in session key", "self", ".", "session", "[", "LTI_SESSION_KEY", "]", "=", "True", "return", "True", "except", "LTIException", ":", "log", ".", "debug", "(", "'verify_request failed'", ")", "for", "prop", "in", "LTI_PROPERTY_LIST", ":", "if", "self", ".", "session", ".", "get", "(", "prop", ",", "None", ")", ":", "del", "self", ".", "session", "[", "prop", "]", "self", ".", "session", "[", "LTI_SESSION_KEY", "]", "=", "False", "raise" ]
Verify LTI request :raises: LTIException if request validation failed
[ "Verify", "LTI", "request" ]
train
https://github.com/mitodl/pylti/blob/18a608282e0d5bc941beb2eaaeea3b7ad484b399/pylti/chalice.py#L73-L119
django-danceschool/django-danceschool
danceschool/core/views.py
EventRegistrationSelectView.get_queryset
def get_queryset(self): ''' Recent events are listed in link form. ''' return Event.objects.filter( Q(startTime__gte=timezone.now() - timedelta(days=90)) & ( Q(series__isnull=False) | Q(publicevent__isnull=False) ) ).annotate(count=Count('eventregistration')).annotate(**self.get_annotations()).exclude( Q(count=0) & Q(status__in=[ Event.RegStatus.hidden, Event.RegStatus.regHidden, Event.RegStatus.disabled ]) )
python
def get_queryset(self): return Event.objects.filter( Q(startTime__gte=timezone.now() - timedelta(days=90)) & ( Q(series__isnull=False) | Q(publicevent__isnull=False) ) ).annotate(count=Count('eventregistration')).annotate(**self.get_annotations()).exclude( Q(count=0) & Q(status__in=[ Event.RegStatus.hidden, Event.RegStatus.regHidden, Event.RegStatus.disabled ]) )
[ "def", "get_queryset", "(", "self", ")", ":", "return", "Event", ".", "objects", ".", "filter", "(", "Q", "(", "startTime__gte", "=", "timezone", ".", "now", "(", ")", "-", "timedelta", "(", "days", "=", "90", ")", ")", "&", "(", "Q", "(", "series__isnull", "=", "False", ")", "|", "Q", "(", "publicevent__isnull", "=", "False", ")", ")", ")", ".", "annotate", "(", "count", "=", "Count", "(", "'eventregistration'", ")", ")", ".", "annotate", "(", "*", "*", "self", ".", "get_annotations", "(", ")", ")", ".", "exclude", "(", "Q", "(", "count", "=", "0", ")", "&", "Q", "(", "status__in", "=", "[", "Event", ".", "RegStatus", ".", "hidden", ",", "Event", ".", "RegStatus", ".", "regHidden", ",", "Event", ".", "RegStatus", ".", "disabled", "]", ")", ")" ]
Recent events are listed in link form.
[ "Recent", "events", "are", "listed", "in", "link", "form", "." ]
train
https://github.com/django-danceschool/django-danceschool/blob/bb08cbf39017a812a5a94bdb4ea34170bf1a30ba/danceschool/core/views.py#L53-L64
django-danceschool/django-danceschool
danceschool/core/views.py
EventRegistrationSummaryView.get_context_data
def get_context_data(self, **kwargs): ''' Add the list of registrations for the given series ''' # Update the site session data so that registration processes know to send return links to # the view class registrations page. set_return_page() is in SiteHistoryMixin. self.set_return_page('viewregistrations',_('View Registrations'),event_id=self.object.id) context = { 'event': self.object, 'registrations': EventRegistration.objects.filter( event=self.object, cancelled=False ).order_by('registration__customer__user__first_name', 'registration__customer__user__last_name'), } context.update(kwargs) return super(EventRegistrationSummaryView, self).get_context_data(**context)
python
def get_context_data(self, **kwargs): self.set_return_page('viewregistrations',_('View Registrations'),event_id=self.object.id) context = { 'event': self.object, 'registrations': EventRegistration.objects.filter( event=self.object, cancelled=False ).order_by('registration__customer__user__first_name', 'registration__customer__user__last_name'), } context.update(kwargs) return super(EventRegistrationSummaryView, self).get_context_data(**context)
[ "def", "get_context_data", "(", "self", ",", "*", "*", "kwargs", ")", ":", "# Update the site session data so that registration processes know to send return links to", "# the view class registrations page. set_return_page() is in SiteHistoryMixin.", "self", ".", "set_return_page", "(", "'viewregistrations'", ",", "_", "(", "'View Registrations'", ")", ",", "event_id", "=", "self", ".", "object", ".", "id", ")", "context", "=", "{", "'event'", ":", "self", ".", "object", ",", "'registrations'", ":", "EventRegistration", ".", "objects", ".", "filter", "(", "event", "=", "self", ".", "object", ",", "cancelled", "=", "False", ")", ".", "order_by", "(", "'registration__customer__user__first_name'", ",", "'registration__customer__user__last_name'", ")", ",", "}", "context", ".", "update", "(", "kwargs", ")", "return", "super", "(", "EventRegistrationSummaryView", ",", "self", ")", ".", "get_context_data", "(", "*", "*", "context", ")" ]
Add the list of registrations for the given series
[ "Add", "the", "list", "of", "registrations", "for", "the", "given", "series" ]
train
https://github.com/django-danceschool/django-danceschool/blob/bb08cbf39017a812a5a94bdb4ea34170bf1a30ba/danceschool/core/views.py#L90-L105
django-danceschool/django-danceschool
danceschool/core/views.py
SubmissionRedirectView.get_context_data
def get_context_data(self, **kwargs): ''' The URL to redirect to can be explicitly specified, or it can come from the site session history, or it can be the default admin success page as specified in the site settings. ''' context = super(SubmissionRedirectView, self).get_context_data(**kwargs) redirect_url = unquote(self.request.GET.get('redirect_url', '')) if not redirect_url: redirect_url = self.get_return_page().get('url','') if not redirect_url: try: redirect_url = Page.objects.get( pk=getConstant('general__defaultAdminSuccessPage') ).get_absolute_url(settings.LANGUAGE_CODE) except ObjectDoesNotExist: redirect_url = '/' context.update({ 'redirect_url': redirect_url, 'seconds': self.request.GET.get('seconds', 5), }) return context
python
def get_context_data(self, **kwargs): context = super(SubmissionRedirectView, self).get_context_data(**kwargs) redirect_url = unquote(self.request.GET.get('redirect_url', '')) if not redirect_url: redirect_url = self.get_return_page().get('url','') if not redirect_url: try: redirect_url = Page.objects.get( pk=getConstant('general__defaultAdminSuccessPage') ).get_absolute_url(settings.LANGUAGE_CODE) except ObjectDoesNotExist: redirect_url = '/' context.update({ 'redirect_url': redirect_url, 'seconds': self.request.GET.get('seconds', 5), }) return context
[ "def", "get_context_data", "(", "self", ",", "*", "*", "kwargs", ")", ":", "context", "=", "super", "(", "SubmissionRedirectView", ",", "self", ")", ".", "get_context_data", "(", "*", "*", "kwargs", ")", "redirect_url", "=", "unquote", "(", "self", ".", "request", ".", "GET", ".", "get", "(", "'redirect_url'", ",", "''", ")", ")", "if", "not", "redirect_url", ":", "redirect_url", "=", "self", ".", "get_return_page", "(", ")", ".", "get", "(", "'url'", ",", "''", ")", "if", "not", "redirect_url", ":", "try", ":", "redirect_url", "=", "Page", ".", "objects", ".", "get", "(", "pk", "=", "getConstant", "(", "'general__defaultAdminSuccessPage'", ")", ")", ".", "get_absolute_url", "(", "settings", ".", "LANGUAGE_CODE", ")", "except", "ObjectDoesNotExist", ":", "redirect_url", "=", "'/'", "context", ".", "update", "(", "{", "'redirect_url'", ":", "redirect_url", ",", "'seconds'", ":", "self", ".", "request", ".", "GET", ".", "get", "(", "'seconds'", ",", "5", ")", ",", "}", ")", "return", "context" ]
The URL to redirect to can be explicitly specified, or it can come from the site session history, or it can be the default admin success page as specified in the site settings.
[ "The", "URL", "to", "redirect", "to", "can", "be", "explicitly", "specified", "or", "it", "can", "come", "from", "the", "site", "session", "history", "or", "it", "can", "be", "the", "default", "admin", "success", "page", "as", "specified", "in", "the", "site", "settings", "." ]
train
https://github.com/django-danceschool/django-danceschool/blob/bb08cbf39017a812a5a94bdb4ea34170bf1a30ba/danceschool/core/views.py#L115-L140
django-danceschool/django-danceschool
danceschool/core/views.py
ViewInvoiceView.get
def get(self, request, *args, **kwargs): ''' Invoices can be viewed only if the validation string is provided, unless the user is logged in and has view_all_invoice permissions ''' user_has_validation_string = self.get_object().validationString user_has_permissions = request.user.has_perm('core.view_all_invoices') if request.GET.get('v', None) == user_has_validation_string or user_has_permissions: return super(ViewInvoiceView, self).get(request, *args, **kwargs) return self.handle_no_permission()
python
def get(self, request, *args, **kwargs): user_has_validation_string = self.get_object().validationString user_has_permissions = request.user.has_perm('core.view_all_invoices') if request.GET.get('v', None) == user_has_validation_string or user_has_permissions: return super(ViewInvoiceView, self).get(request, *args, **kwargs) return self.handle_no_permission()
[ "def", "get", "(", "self", ",", "request", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "user_has_validation_string", "=", "self", ".", "get_object", "(", ")", ".", "validationString", "user_has_permissions", "=", "request", ".", "user", ".", "has_perm", "(", "'core.view_all_invoices'", ")", "if", "request", ".", "GET", ".", "get", "(", "'v'", ",", "None", ")", "==", "user_has_validation_string", "or", "user_has_permissions", ":", "return", "super", "(", "ViewInvoiceView", ",", "self", ")", ".", "get", "(", "request", ",", "*", "args", ",", "*", "*", "kwargs", ")", "return", "self", ".", "handle_no_permission", "(", ")" ]
Invoices can be viewed only if the validation string is provided, unless the user is logged in and has view_all_invoice permissions
[ "Invoices", "can", "be", "viewed", "only", "if", "the", "validation", "string", "is", "provided", "unless", "the", "user", "is", "logged", "in", "and", "has", "view_all_invoice", "permissions" ]
train
https://github.com/django-danceschool/django-danceschool/blob/bb08cbf39017a812a5a94bdb4ea34170bf1a30ba/danceschool/core/views.py#L150-L159
django-danceschool/django-danceschool
danceschool/core/views.py
InvoiceNotificationView.dispatch
def dispatch(self, request, *args, **kwargs): ''' Get the set of invoices for which to permit notifications ''' if 'pk' in self.kwargs: try: self.invoices = Invoice.objects.filter(pk=self.kwargs.get('pk'))[:] except ValueError: raise Http404() if not self.invoices: raise Http404() else: ids = request.GET.get('invoices', '') try: self.invoices = Invoice.objects.filter(id__in=[x for x in ids.split(',')])[:] except ValueError: return HttpResponseBadRequest(_('Invalid invoice identifiers specified.')) if not self.invoices: return HttpResponseBadRequest(_('No invoice identifiers specified.')) toNotify = [] cannotNotify = [] for invoice in self.invoices: if invoice.get_default_recipients(): toNotify.append(invoice) else: cannotNotify.append(invoice) self.toNotify = toNotify self.cannotNotify = cannotNotify return super(InvoiceNotificationView, self).dispatch(request, *args, **kwargs)
python
def dispatch(self, request, *args, **kwargs): if 'pk' in self.kwargs: try: self.invoices = Invoice.objects.filter(pk=self.kwargs.get('pk'))[:] except ValueError: raise Http404() if not self.invoices: raise Http404() else: ids = request.GET.get('invoices', '') try: self.invoices = Invoice.objects.filter(id__in=[x for x in ids.split(',')])[:] except ValueError: return HttpResponseBadRequest(_('Invalid invoice identifiers specified.')) if not self.invoices: return HttpResponseBadRequest(_('No invoice identifiers specified.')) toNotify = [] cannotNotify = [] for invoice in self.invoices: if invoice.get_default_recipients(): toNotify.append(invoice) else: cannotNotify.append(invoice) self.toNotify = toNotify self.cannotNotify = cannotNotify return super(InvoiceNotificationView, self).dispatch(request, *args, **kwargs)
[ "def", "dispatch", "(", "self", ",", "request", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "if", "'pk'", "in", "self", ".", "kwargs", ":", "try", ":", "self", ".", "invoices", "=", "Invoice", ".", "objects", ".", "filter", "(", "pk", "=", "self", ".", "kwargs", ".", "get", "(", "'pk'", ")", ")", "[", ":", "]", "except", "ValueError", ":", "raise", "Http404", "(", ")", "if", "not", "self", ".", "invoices", ":", "raise", "Http404", "(", ")", "else", ":", "ids", "=", "request", ".", "GET", ".", "get", "(", "'invoices'", ",", "''", ")", "try", ":", "self", ".", "invoices", "=", "Invoice", ".", "objects", ".", "filter", "(", "id__in", "=", "[", "x", "for", "x", "in", "ids", ".", "split", "(", "','", ")", "]", ")", "[", ":", "]", "except", "ValueError", ":", "return", "HttpResponseBadRequest", "(", "_", "(", "'Invalid invoice identifiers specified.'", ")", ")", "if", "not", "self", ".", "invoices", ":", "return", "HttpResponseBadRequest", "(", "_", "(", "'No invoice identifiers specified.'", ")", ")", "toNotify", "=", "[", "]", "cannotNotify", "=", "[", "]", "for", "invoice", "in", "self", ".", "invoices", ":", "if", "invoice", ".", "get_default_recipients", "(", ")", ":", "toNotify", ".", "append", "(", "invoice", ")", "else", ":", "cannotNotify", ".", "append", "(", "invoice", ")", "self", ".", "toNotify", "=", "toNotify", "self", ".", "cannotNotify", "=", "cannotNotify", "return", "super", "(", "InvoiceNotificationView", ",", "self", ")", ".", "dispatch", "(", "request", ",", "*", "args", ",", "*", "*", "kwargs", ")" ]
Get the set of invoices for which to permit notifications
[ "Get", "the", "set", "of", "invoices", "for", "which", "to", "permit", "notifications" ]
train
https://github.com/django-danceschool/django-danceschool/blob/bb08cbf39017a812a5a94bdb4ea34170bf1a30ba/danceschool/core/views.py#L199-L230
django-danceschool/django-danceschool
danceschool/core/views.py
InvoiceNotificationView.get_form_kwargs
def get_form_kwargs(self): ''' Pass the set of invoices to the form for creation ''' kwargs = super(InvoiceNotificationView, self).get_form_kwargs() kwargs['invoices'] = self.toNotify return kwargs
python
def get_form_kwargs(self): kwargs = super(InvoiceNotificationView, self).get_form_kwargs() kwargs['invoices'] = self.toNotify return kwargs
[ "def", "get_form_kwargs", "(", "self", ")", ":", "kwargs", "=", "super", "(", "InvoiceNotificationView", ",", "self", ")", ".", "get_form_kwargs", "(", ")", "kwargs", "[", "'invoices'", "]", "=", "self", ".", "toNotify", "return", "kwargs" ]
Pass the set of invoices to the form for creation
[ "Pass", "the", "set", "of", "invoices", "to", "the", "form", "for", "creation" ]
train
https://github.com/django-danceschool/django-danceschool/blob/bb08cbf39017a812a5a94bdb4ea34170bf1a30ba/danceschool/core/views.py#L232-L236
django-danceschool/django-danceschool
danceschool/core/views.py
SendEmailView.dispatch
def dispatch(self, request, *args, **kwargs): ''' If a list of customers or groups was passed, then parse it ''' ids = request.GET.get('customers') groups = request.GET.get('customergroup') self.customers = None if ids or groups: # Initial filter applies to no one but allows appending by logical or filters = Q(id__isnull=True) if ids: filters = filters | Q(id__in=[int(x) for x in ids.split(',')]) if groups: filters = filters | Q(groups__id__in=[int(x) for x in groups.split(',')]) try: self.customers = Customer.objects.filter(filters) except ValueError: return HttpResponseBadRequest(_('Invalid customer ids passed')) return super(SendEmailView, self).dispatch(request, *args, **kwargs)
python
def dispatch(self, request, *args, **kwargs): ids = request.GET.get('customers') groups = request.GET.get('customergroup') self.customers = None if ids or groups: filters = Q(id__isnull=True) if ids: filters = filters | Q(id__in=[int(x) for x in ids.split(',')]) if groups: filters = filters | Q(groups__id__in=[int(x) for x in groups.split(',')]) try: self.customers = Customer.objects.filter(filters) except ValueError: return HttpResponseBadRequest(_('Invalid customer ids passed')) return super(SendEmailView, self).dispatch(request, *args, **kwargs)
[ "def", "dispatch", "(", "self", ",", "request", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "ids", "=", "request", ".", "GET", ".", "get", "(", "'customers'", ")", "groups", "=", "request", ".", "GET", ".", "get", "(", "'customergroup'", ")", "self", ".", "customers", "=", "None", "if", "ids", "or", "groups", ":", "# Initial filter applies to no one but allows appending by logical or", "filters", "=", "Q", "(", "id__isnull", "=", "True", ")", "if", "ids", ":", "filters", "=", "filters", "|", "Q", "(", "id__in", "=", "[", "int", "(", "x", ")", "for", "x", "in", "ids", ".", "split", "(", "','", ")", "]", ")", "if", "groups", ":", "filters", "=", "filters", "|", "Q", "(", "groups__id__in", "=", "[", "int", "(", "x", ")", "for", "x", "in", "groups", ".", "split", "(", "','", ")", "]", ")", "try", ":", "self", ".", "customers", "=", "Customer", ".", "objects", ".", "filter", "(", "filters", ")", "except", "ValueError", ":", "return", "HttpResponseBadRequest", "(", "_", "(", "'Invalid customer ids passed'", ")", ")", "return", "super", "(", "SendEmailView", ",", "self", ")", ".", "dispatch", "(", "request", ",", "*", "args", ",", "*", "*", "kwargs", ")" ]
If a list of customers or groups was passed, then parse it
[ "If", "a", "list", "of", "customers", "or", "groups", "was", "passed", "then", "parse", "it" ]
train
https://github.com/django-danceschool/django-danceschool/blob/bb08cbf39017a812a5a94bdb4ea34170bf1a30ba/danceschool/core/views.py#L589-L609
django-danceschool/django-danceschool
danceschool/core/views.py
SendEmailView.get_form_kwargs
def get_form_kwargs(self, **kwargs): ''' Get the list of recent months and recent series to pass to the form ''' numMonths = 12 lastStart = ( Event.objects.annotate(Min('eventoccurrence__startTime')) .order_by('-eventoccurrence__startTime__min') .values_list('eventoccurrence__startTime__min', flat=True) .first() ) if lastStart: month = lastStart.month year = lastStart.year else: month = timezone.now().month year = timezone.now().year months = [('', _('None'))] for i in range(0, numMonths): newmonth = (month - i - 1) % 12 + 1 newyear = year if month - i - 1 < 0: newyear = year - 1 newdate = datetime(year=newyear, month=newmonth, day=1) newdateStr = newdate.strftime("%m-%Y") monthStr = newdate.strftime("%B, %Y") months.append((newdateStr, monthStr)) cutoff = timezone.now() - timedelta(days=120) allEvents = Event.objects.filter(startTime__gte=cutoff).order_by('-startTime') recentSeries = [('', 'None')] + [(x.id, '%s %s: %s' % (month_name[x.month], x.year, x.name)) for x in allEvents] kwargs = super(SendEmailView, self).get_form_kwargs(**kwargs) kwargs.update({ "months": months, "recentseries": recentSeries, "customers": self.customers, }) return kwargs
python
def get_form_kwargs(self, **kwargs): numMonths = 12 lastStart = ( Event.objects.annotate(Min('eventoccurrence__startTime')) .order_by('-eventoccurrence__startTime__min') .values_list('eventoccurrence__startTime__min', flat=True) .first() ) if lastStart: month = lastStart.month year = lastStart.year else: month = timezone.now().month year = timezone.now().year months = [('', _('None'))] for i in range(0, numMonths): newmonth = (month - i - 1) % 12 + 1 newyear = year if month - i - 1 < 0: newyear = year - 1 newdate = datetime(year=newyear, month=newmonth, day=1) newdateStr = newdate.strftime("%m-%Y") monthStr = newdate.strftime("%B, %Y") months.append((newdateStr, monthStr)) cutoff = timezone.now() - timedelta(days=120) allEvents = Event.objects.filter(startTime__gte=cutoff).order_by('-startTime') recentSeries = [('', 'None')] + [(x.id, '%s %s: %s' % (month_name[x.month], x.year, x.name)) for x in allEvents] kwargs = super(SendEmailView, self).get_form_kwargs(**kwargs) kwargs.update({ "months": months, "recentseries": recentSeries, "customers": self.customers, }) return kwargs
[ "def", "get_form_kwargs", "(", "self", ",", "*", "*", "kwargs", ")", ":", "numMonths", "=", "12", "lastStart", "=", "(", "Event", ".", "objects", ".", "annotate", "(", "Min", "(", "'eventoccurrence__startTime'", ")", ")", ".", "order_by", "(", "'-eventoccurrence__startTime__min'", ")", ".", "values_list", "(", "'eventoccurrence__startTime__min'", ",", "flat", "=", "True", ")", ".", "first", "(", ")", ")", "if", "lastStart", ":", "month", "=", "lastStart", ".", "month", "year", "=", "lastStart", ".", "year", "else", ":", "month", "=", "timezone", ".", "now", "(", ")", ".", "month", "year", "=", "timezone", ".", "now", "(", ")", ".", "year", "months", "=", "[", "(", "''", ",", "_", "(", "'None'", ")", ")", "]", "for", "i", "in", "range", "(", "0", ",", "numMonths", ")", ":", "newmonth", "=", "(", "month", "-", "i", "-", "1", ")", "%", "12", "+", "1", "newyear", "=", "year", "if", "month", "-", "i", "-", "1", "<", "0", ":", "newyear", "=", "year", "-", "1", "newdate", "=", "datetime", "(", "year", "=", "newyear", ",", "month", "=", "newmonth", ",", "day", "=", "1", ")", "newdateStr", "=", "newdate", ".", "strftime", "(", "\"%m-%Y\"", ")", "monthStr", "=", "newdate", ".", "strftime", "(", "\"%B, %Y\"", ")", "months", ".", "append", "(", "(", "newdateStr", ",", "monthStr", ")", ")", "cutoff", "=", "timezone", ".", "now", "(", ")", "-", "timedelta", "(", "days", "=", "120", ")", "allEvents", "=", "Event", ".", "objects", ".", "filter", "(", "startTime__gte", "=", "cutoff", ")", ".", "order_by", "(", "'-startTime'", ")", "recentSeries", "=", "[", "(", "''", ",", "'None'", ")", "]", "+", "[", "(", "x", ".", "id", ",", "'%s %s: %s'", "%", "(", "month_name", "[", "x", ".", "month", "]", ",", "x", ".", "year", ",", "x", ".", "name", ")", ")", "for", "x", "in", "allEvents", "]", "kwargs", "=", "super", "(", "SendEmailView", ",", "self", ")", ".", "get_form_kwargs", "(", "*", "*", "kwargs", ")", "kwargs", ".", "update", "(", "{", "\"months\"", ":", "months", ",", "\"recentseries\"", ":", "recentSeries", ",", "\"customers\"", ":", "self", ".", "customers", ",", "}", ")", "return", "kwargs" ]
Get the list of recent months and recent series to pass to the form
[ "Get", "the", "list", "of", "recent", "months", "and", "recent", "series", "to", "pass", "to", "the", "form" ]
train
https://github.com/django-danceschool/django-danceschool/blob/bb08cbf39017a812a5a94bdb4ea34170bf1a30ba/danceschool/core/views.py#L611-L652
django-danceschool/django-danceschool
danceschool/core/views.py
SendEmailView.get_initial
def get_initial(self): ''' If the user already submitted the form and decided to return from the confirmation page, then re-populate the form ''' initial = super(SendEmailView, self).get_initial() form_data = self.request.session.get(EMAIL_VALIDATION_STR, {}).get('form_data', {}) if form_data: initial.update(form_data) return initial
python
def get_initial(self): initial = super(SendEmailView, self).get_initial() form_data = self.request.session.get(EMAIL_VALIDATION_STR, {}).get('form_data', {}) if form_data: initial.update(form_data) return initial
[ "def", "get_initial", "(", "self", ")", ":", "initial", "=", "super", "(", "SendEmailView", ",", "self", ")", ".", "get_initial", "(", ")", "form_data", "=", "self", ".", "request", ".", "session", ".", "get", "(", "EMAIL_VALIDATION_STR", ",", "{", "}", ")", ".", "get", "(", "'form_data'", ",", "{", "}", ")", "if", "form_data", ":", "initial", ".", "update", "(", "form_data", ")", "return", "initial" ]
If the user already submitted the form and decided to return from the confirmation page, then re-populate the form
[ "If", "the", "user", "already", "submitted", "the", "form", "and", "decided", "to", "return", "from", "the", "confirmation", "page", "then", "re", "-", "populate", "the", "form" ]
train
https://github.com/django-danceschool/django-danceschool/blob/bb08cbf39017a812a5a94bdb4ea34170bf1a30ba/danceschool/core/views.py#L654-L664
django-danceschool/django-danceschool
danceschool/core/views.py
SendEmailView.form_valid
def form_valid(self, form): ''' Pass form data to the confirmation view ''' form.cleaned_data.pop('template', None) self.request.session[EMAIL_VALIDATION_STR] = {'form_data': form.cleaned_data} return HttpResponseRedirect(reverse('emailConfirmation'))
python
def form_valid(self, form): form.cleaned_data.pop('template', None) self.request.session[EMAIL_VALIDATION_STR] = {'form_data': form.cleaned_data} return HttpResponseRedirect(reverse('emailConfirmation'))
[ "def", "form_valid", "(", "self", ",", "form", ")", ":", "form", ".", "cleaned_data", ".", "pop", "(", "'template'", ",", "None", ")", "self", ".", "request", ".", "session", "[", "EMAIL_VALIDATION_STR", "]", "=", "{", "'form_data'", ":", "form", ".", "cleaned_data", "}", "return", "HttpResponseRedirect", "(", "reverse", "(", "'emailConfirmation'", ")", ")" ]
Pass form data to the confirmation view
[ "Pass", "form", "data", "to", "the", "confirmation", "view" ]
train
https://github.com/django-danceschool/django-danceschool/blob/bb08cbf39017a812a5a94bdb4ea34170bf1a30ba/danceschool/core/views.py#L675-L679
django-danceschool/django-danceschool
danceschool/core/views.py
RepeatEventsView.form_valid
def form_valid(self, form): ''' For each object in the queryset, create the duplicated objects ''' startDate = form.cleaned_data.get('startDate') repeatEvery = form.cleaned_data.get('repeatEvery') periodicity = form.cleaned_data.get('periodicity') quantity = form.cleaned_data.get('quantity') endDate = form.cleaned_data.get('endDate') # Create a list of start dates, based on the passed values of repeatEvery, # periodicity, quantity and endDate. This list will be iterated through to # create the new instances for each event. if periodicity == 'D': delta = {'days': repeatEvery} elif periodicity == 'W': delta = {'weeks': repeatEvery} elif periodicity == 'M': delta = {'months': repeatEvery} repeat_list = [] this_date = startDate if quantity: for k in range(0, quantity): repeat_list.append(this_date) this_date = this_date + relativedelta(**delta) elif endDate: while (this_date <= endDate): repeat_list.append(this_date) this_date = this_date + relativedelta(**delta) # Now, loop through the events in the queryset to create duplicates of them for event in self.queryset: # For each new occurrence, we determine the new startime by the distance from # midnight of the first occurrence date, where the first occurrence date is # replaced by the date given in repeat list old_min_time = event.localStartTime.replace(hour=0, minute=0, second=0, microsecond=0) old_occurrence_data = [ (x.startTime - old_min_time, x.endTime - old_min_time, x.cancelled) for x in event.eventoccurrence_set.all() ] old_role_data = [(x.role, x.capacity) for x in event.eventrole_set.all()] for instance_date in repeat_list: # Ensure that time zones are treated properly combined_datetime = datetime.combine(instance_date, datetime.min.time()) new_datetime = ensure_timezone(combined_datetime, old_min_time.tzinfo) # Removing the pk and ID allow new instances of the event to # be created upon saving with automatically generated ids. event.id = None event.pk = None event.save() # Create new occurrences for occurrence in old_occurrence_data: EventOccurrence.objects.create( event=event, startTime=new_datetime + occurrence[0], endTime=new_datetime + occurrence[1], cancelled=occurrence[2], ) # Create new event-specific role data for role in old_role_data: EventRole.objects.create( event=event, role=role[0], capacity=role[1], ) # Need to save twice to ensure that startTime etc. get # updated properly. event.save() return super(RepeatEventsView, self).form_valid(form)
python
def form_valid(self, form): startDate = form.cleaned_data.get('startDate') repeatEvery = form.cleaned_data.get('repeatEvery') periodicity = form.cleaned_data.get('periodicity') quantity = form.cleaned_data.get('quantity') endDate = form.cleaned_data.get('endDate') if periodicity == 'D': delta = {'days': repeatEvery} elif periodicity == 'W': delta = {'weeks': repeatEvery} elif periodicity == 'M': delta = {'months': repeatEvery} repeat_list = [] this_date = startDate if quantity: for k in range(0, quantity): repeat_list.append(this_date) this_date = this_date + relativedelta(**delta) elif endDate: while (this_date <= endDate): repeat_list.append(this_date) this_date = this_date + relativedelta(**delta) for event in self.queryset: old_min_time = event.localStartTime.replace(hour=0, minute=0, second=0, microsecond=0) old_occurrence_data = [ (x.startTime - old_min_time, x.endTime - old_min_time, x.cancelled) for x in event.eventoccurrence_set.all() ] old_role_data = [(x.role, x.capacity) for x in event.eventrole_set.all()] for instance_date in repeat_list: combined_datetime = datetime.combine(instance_date, datetime.min.time()) new_datetime = ensure_timezone(combined_datetime, old_min_time.tzinfo) event.id = None event.pk = None event.save() for occurrence in old_occurrence_data: EventOccurrence.objects.create( event=event, startTime=new_datetime + occurrence[0], endTime=new_datetime + occurrence[1], cancelled=occurrence[2], ) for role in old_role_data: EventRole.objects.create( event=event, role=role[0], capacity=role[1], ) event.save() return super(RepeatEventsView, self).form_valid(form)
[ "def", "form_valid", "(", "self", ",", "form", ")", ":", "startDate", "=", "form", ".", "cleaned_data", ".", "get", "(", "'startDate'", ")", "repeatEvery", "=", "form", ".", "cleaned_data", ".", "get", "(", "'repeatEvery'", ")", "periodicity", "=", "form", ".", "cleaned_data", ".", "get", "(", "'periodicity'", ")", "quantity", "=", "form", ".", "cleaned_data", ".", "get", "(", "'quantity'", ")", "endDate", "=", "form", ".", "cleaned_data", ".", "get", "(", "'endDate'", ")", "# Create a list of start dates, based on the passed values of repeatEvery,", "# periodicity, quantity and endDate. This list will be iterated through to", "# create the new instances for each event.", "if", "periodicity", "==", "'D'", ":", "delta", "=", "{", "'days'", ":", "repeatEvery", "}", "elif", "periodicity", "==", "'W'", ":", "delta", "=", "{", "'weeks'", ":", "repeatEvery", "}", "elif", "periodicity", "==", "'M'", ":", "delta", "=", "{", "'months'", ":", "repeatEvery", "}", "repeat_list", "=", "[", "]", "this_date", "=", "startDate", "if", "quantity", ":", "for", "k", "in", "range", "(", "0", ",", "quantity", ")", ":", "repeat_list", ".", "append", "(", "this_date", ")", "this_date", "=", "this_date", "+", "relativedelta", "(", "*", "*", "delta", ")", "elif", "endDate", ":", "while", "(", "this_date", "<=", "endDate", ")", ":", "repeat_list", ".", "append", "(", "this_date", ")", "this_date", "=", "this_date", "+", "relativedelta", "(", "*", "*", "delta", ")", "# Now, loop through the events in the queryset to create duplicates of them", "for", "event", "in", "self", ".", "queryset", ":", "# For each new occurrence, we determine the new startime by the distance from", "# midnight of the first occurrence date, where the first occurrence date is", "# replaced by the date given in repeat list", "old_min_time", "=", "event", ".", "localStartTime", ".", "replace", "(", "hour", "=", "0", ",", "minute", "=", "0", ",", "second", "=", "0", ",", "microsecond", "=", "0", ")", "old_occurrence_data", "=", "[", "(", "x", ".", "startTime", "-", "old_min_time", ",", "x", ".", "endTime", "-", "old_min_time", ",", "x", ".", "cancelled", ")", "for", "x", "in", "event", ".", "eventoccurrence_set", ".", "all", "(", ")", "]", "old_role_data", "=", "[", "(", "x", ".", "role", ",", "x", ".", "capacity", ")", "for", "x", "in", "event", ".", "eventrole_set", ".", "all", "(", ")", "]", "for", "instance_date", "in", "repeat_list", ":", "# Ensure that time zones are treated properly", "combined_datetime", "=", "datetime", ".", "combine", "(", "instance_date", ",", "datetime", ".", "min", ".", "time", "(", ")", ")", "new_datetime", "=", "ensure_timezone", "(", "combined_datetime", ",", "old_min_time", ".", "tzinfo", ")", "# Removing the pk and ID allow new instances of the event to", "# be created upon saving with automatically generated ids.", "event", ".", "id", "=", "None", "event", ".", "pk", "=", "None", "event", ".", "save", "(", ")", "# Create new occurrences", "for", "occurrence", "in", "old_occurrence_data", ":", "EventOccurrence", ".", "objects", ".", "create", "(", "event", "=", "event", ",", "startTime", "=", "new_datetime", "+", "occurrence", "[", "0", "]", ",", "endTime", "=", "new_datetime", "+", "occurrence", "[", "1", "]", ",", "cancelled", "=", "occurrence", "[", "2", "]", ",", ")", "# Create new event-specific role data", "for", "role", "in", "old_role_data", ":", "EventRole", ".", "objects", ".", "create", "(", "event", "=", "event", ",", "role", "=", "role", "[", "0", "]", ",", "capacity", "=", "role", "[", "1", "]", ",", ")", "# Need to save twice to ensure that startTime etc. get", "# updated properly.", "event", ".", "save", "(", ")", "return", "super", "(", "RepeatEventsView", ",", "self", ")", ".", "form_valid", "(", "form", ")" ]
For each object in the queryset, create the duplicated objects
[ "For", "each", "object", "in", "the", "queryset", "create", "the", "duplicated", "objects" ]
train
https://github.com/django-danceschool/django-danceschool/blob/bb08cbf39017a812a5a94bdb4ea34170bf1a30ba/danceschool/core/views.py#L934-L1013
django-danceschool/django-danceschool
danceschool/guestlist/models.py
GuestList.recentEvents
def recentEvents(self): ''' Get the set of recent and upcoming events to which this list applies. ''' return Event.objects.filter( Q(pk__in=self.individualEvents.values_list('pk',flat=True)) | Q(session__in=self.eventSessions.all()) | Q(publicevent__category__in=self.eventCategories.all()) | Q(series__category__in=self.seriesCategories.all()) ).filter( Q(startTime__lte=timezone.now() + timedelta(days=60)) & Q(endTime__gte=timezone.now() - timedelta(days=60)) )
python
def recentEvents(self): return Event.objects.filter( Q(pk__in=self.individualEvents.values_list('pk',flat=True)) | Q(session__in=self.eventSessions.all()) | Q(publicevent__category__in=self.eventCategories.all()) | Q(series__category__in=self.seriesCategories.all()) ).filter( Q(startTime__lte=timezone.now() + timedelta(days=60)) & Q(endTime__gte=timezone.now() - timedelta(days=60)) )
[ "def", "recentEvents", "(", "self", ")", ":", "return", "Event", ".", "objects", ".", "filter", "(", "Q", "(", "pk__in", "=", "self", ".", "individualEvents", ".", "values_list", "(", "'pk'", ",", "flat", "=", "True", ")", ")", "|", "Q", "(", "session__in", "=", "self", ".", "eventSessions", ".", "all", "(", ")", ")", "|", "Q", "(", "publicevent__category__in", "=", "self", ".", "eventCategories", ".", "all", "(", ")", ")", "|", "Q", "(", "series__category__in", "=", "self", ".", "seriesCategories", ".", "all", "(", ")", ")", ")", ".", "filter", "(", "Q", "(", "startTime__lte", "=", "timezone", ".", "now", "(", ")", "+", "timedelta", "(", "days", "=", "60", ")", ")", "&", "Q", "(", "endTime__gte", "=", "timezone", ".", "now", "(", ")", "-", "timedelta", "(", "days", "=", "60", ")", ")", ")" ]
Get the set of recent and upcoming events to which this list applies.
[ "Get", "the", "set", "of", "recent", "and", "upcoming", "events", "to", "which", "this", "list", "applies", "." ]
train
https://github.com/django-danceschool/django-danceschool/blob/bb08cbf39017a812a5a94bdb4ea34170bf1a30ba/danceschool/guestlist/models.py#L33-L45
django-danceschool/django-danceschool
danceschool/guestlist/models.py
GuestList.currentEvent
def currentEvent(self): ''' Return the first event that hasn't ended yet, or if there are no future events, the last one to end. ''' currentEvent = self.recentEvents.filter(endTime__gte=timezone.now()).order_by('startTime').first() if not currentEvent: currentEvent = self.recentEvents.filter( endTime__lte=timezone.now() ).order_by('-endTime').first() return currentEvent
python
def currentEvent(self): currentEvent = self.recentEvents.filter(endTime__gte=timezone.now()).order_by('startTime').first() if not currentEvent: currentEvent = self.recentEvents.filter( endTime__lte=timezone.now() ).order_by('-endTime').first() return currentEvent
[ "def", "currentEvent", "(", "self", ")", ":", "currentEvent", "=", "self", ".", "recentEvents", ".", "filter", "(", "endTime__gte", "=", "timezone", ".", "now", "(", ")", ")", ".", "order_by", "(", "'startTime'", ")", ".", "first", "(", ")", "if", "not", "currentEvent", ":", "currentEvent", "=", "self", ".", "recentEvents", ".", "filter", "(", "endTime__lte", "=", "timezone", ".", "now", "(", ")", ")", ".", "order_by", "(", "'-endTime'", ")", ".", "first", "(", ")", "return", "currentEvent" ]
Return the first event that hasn't ended yet, or if there are no future events, the last one to end.
[ "Return", "the", "first", "event", "that", "hasn", "t", "ended", "yet", "or", "if", "there", "are", "no", "future", "events", "the", "last", "one", "to", "end", "." ]
train
https://github.com/django-danceschool/django-danceschool/blob/bb08cbf39017a812a5a94bdb4ea34170bf1a30ba/danceschool/guestlist/models.py#L48-L58
django-danceschool/django-danceschool
danceschool/guestlist/models.py
GuestList.appliesToEvent
def appliesToEvent(self, event): ''' Check whether this guest list is applicable to an event. ''' return ( event in self.individualEvents.all() or event.session in self.eventSessions.all() or event.category in self.seriesCategories.all() or event.category in self.eventCategories.all() )
python
def appliesToEvent(self, event): return ( event in self.individualEvents.all() or event.session in self.eventSessions.all() or event.category in self.seriesCategories.all() or event.category in self.eventCategories.all() )
[ "def", "appliesToEvent", "(", "self", ",", "event", ")", ":", "return", "(", "event", "in", "self", ".", "individualEvents", ".", "all", "(", ")", "or", "event", ".", "session", "in", "self", ".", "eventSessions", ".", "all", "(", ")", "or", "event", ".", "category", "in", "self", ".", "seriesCategories", ".", "all", "(", ")", "or", "event", ".", "category", "in", "self", ".", "eventCategories", ".", "all", "(", ")", ")" ]
Check whether this guest list is applicable to an event.
[ "Check", "whether", "this", "guest", "list", "is", "applicable", "to", "an", "event", "." ]
train
https://github.com/django-danceschool/django-danceschool/blob/bb08cbf39017a812a5a94bdb4ea34170bf1a30ba/danceschool/guestlist/models.py#L60-L67