code
stringlengths 52
7.75k
| docs
stringlengths 1
5.85k
|
---|---|
def getsystemhooks(self, page=1, per_page=20):
data = {'page': page, 'per_page': per_page}
request = requests.get(
self.hook_url, params=data, headers=self.headers,
verify=self.verify_ssl, auth=self.auth, timeout=self.timeout)
if request.status_code == 200:
return request.json()
else:
return False | Get all system hooks
:param page: Page number
:param per_page: Records per page
:return: list of hooks |
def addsystemhook(self, url):
data = {"url": url}
request = requests.post(
self.hook_url, headers=self.headers, data=data,
verify=self.verify_ssl, auth=self.auth, timeout=self.timeout)
if request.status_code == 201:
return True
else:
return False | Add a system hook
:param url: url of the hook
:return: True if success |
def deletesystemhook(self, hook_id):
data = {"id": hook_id}
request = requests.delete(
'{0}/{1}'.format(self.hook_url, hook_id), data=data,
headers=self.headers, verify=self.verify_ssl, auth=self.auth, timeout=self.timeout)
if request.status_code == 200:
return True
else:
return False | Delete a project hook
:param hook_id: hook id
:return: True if success |
def createbranch(self, project_id, branch, ref):
data = {"id": project_id, "branch_name": branch, "ref": ref}
request = requests.post(
'{0}/{1}/repository/branches'.format(self.projects_url, project_id),
headers=self.headers, data=data, verify=self.verify_ssl, auth=self.auth, timeout=self.timeout)
if request.status_code == 201:
return request.json()
else:
return False | Create branch from commit SHA or existing branch
:param project_id: The ID of a project
:param branch: The name of the branch
:param ref: Create branch from commit SHA or existing branch
:return: True if success, False if not |
def protectbranch(self, project_id, branch):
request = requests.put(
'{0}/{1}/repository/branches/{2}/protect'.format(self.projects_url, project_id, branch),
headers=self.headers, verify=self.verify_ssl, auth=self.auth, timeout=self.timeout)
if request.status_code == 200:
return True
else:
return False | Protect a branch from changes
:param project_id: project id
:param branch: branch id
:return: True if success |
def createforkrelation(self, project_id, from_project_id):
data = {'id': project_id, 'forked_from_id': from_project_id}
request = requests.post(
'{0}/{1}/fork/{2}'.format(self.projects_url, project_id, from_project_id),
headers=self.headers, data=data, verify=self.verify_ssl,
auth=self.auth, timeout=self.timeout)
if request.status_code == 201:
return True
else:
return False | Create a fork relation.
This DO NOT create a fork but only adds a link as fork the relation between 2 repositories
:param project_id: project id
:param from_project_id: from id
:return: true if success |
def removeforkrelation(self, project_id):
request = requests.delete(
'{0}/{1}/fork'.format(self.projects_url, project_id),
headers=self.headers, verify=self.verify_ssl,
auth=self.auth, timeout=self.timeout)
if request.status_code == 200:
return True
else:
return False | Remove an existing fork relation. this DO NOT remove the fork,only the relation between them
:param project_id: project id
:return: true if success |
def createfork(self, project_id):
request = requests.post(
'{0}/fork/{1}'.format(self.projects_url, project_id),
timeout=self.timeout, verify=self.verify_ssl)
if request.status_code == 200:
return True
else:
return False | Forks a project into the user namespace of the authenticated user.
:param project_id: Project ID to fork
:return: True if succeed |
def createissue(self, project_id, title, **kwargs):
data = {'id': id, 'title': title}
if kwargs:
data.update(kwargs)
request = requests.post(
'{0}/{1}/issues'.format(self.projects_url, project_id),
headers=self.headers, data=data, verify=self.verify_ssl, auth=self.auth, timeout=self.timeout)
if request.status_code == 201:
return request.json()
else:
return False | Create a new issue
:param project_id: project id
:param title: title of the issue
:return: dict with the issue created |
def editissue(self, project_id, issue_id, **kwargs):
data = {'id': project_id, 'issue_id': issue_id}
if kwargs:
data.update(kwargs)
request = requests.put(
'{0}/{1}/issues/{2}'.format(self.projects_url, project_id, issue_id),
headers=self.headers, data=data, verify=self.verify_ssl, auth=self.auth, timeout=self.timeout)
if request.status_code == 200:
return request.json()
else:
return False | Edit an existing issue data
:param project_id: project id
:param issue_id: issue id
:return: true if success |
def enable_deploy_key(self, project, key_id):
url = '/projects/{project}/deploy_keys/{key_id}/enable'.format(
project=project, key_id=key_id)
return self.post(url, default_response={}) | Enables a deploy key for a project.
>>> gitlab = Gitlab(host='http://localhost:10080', verify_ssl=False)
>>> gitlab.login(user='root', password='5iveL!fe')
>>> gitlab.enable_deploy_key(15, 5)
:param project: The ID or URL-encoded path of the project owned by the authenticated user
:param key_id: The ID of the deploy key
:return: A dictionary containing deploy key details
:raise: HttpError: If invalid response returned |
def adddeploykey(self, project_id, title, key):
data = {'id': project_id, 'title': title, 'key': key}
request = requests.post(
'{0}/{1}/keys'.format(self.projects_url, project_id),
headers=self.headers, data=data, verify=self.verify_ssl, auth=self.auth, timeout=self.timeout)
if request.status_code == 201:
return request.json()
else:
return False | Creates a new deploy key for a project.
:param project_id: project id
:param title: title of the key
:param key: the key itself
:return: true if success, false if not |
def creategroup(self, name, path, **kwargs):
data = {'name': name, 'path': path}
if kwargs:
data.update(kwargs)
request = requests.post(
self.groups_url, data=data, headers=self.headers,
verify=self.verify_ssl, auth=self.auth, timeout=self.timeout)
if request.status_code == 201:
return request.json()
else:
msg = request.json()['message']
raise exceptions.HttpError(msg) | Creates a new group
:param name: The name of the group
:param path: The path for the group
:param kwargs: Any param the the Gitlab API supports
:return: dict of the new group |
def moveproject(self, group_id, project_id):
request = requests.post(
'{0}/{1}/projects/{2}'.format(self.groups_url, group_id, project_id),
headers=self.headers, verify=self.verify_ssl, auth=self.auth, timeout=self.timeout)
if request.status_code == 201:
return request.json()
else:
return False | Move a given project into a given group
:param group_id: ID of the destination group
:param project_id: ID of the project to be moved
:return: dict of the updated project |
def getmergerequests(self, project_id, page=1, per_page=20, state=None):
data = {'page': page, 'per_page': per_page, 'state': state}
request = requests.get(
'{0}/{1}/merge_requests'.format(self.projects_url, project_id),
params=data, headers=self.headers, verify=self.verify_ssl, auth=self.auth, timeout=self.timeout)
if request.status_code == 200:
return request.json()
else:
return False | Get all the merge requests for a project.
:param project_id: ID of the project to retrieve merge requests for
:param page: Page Number
:param per_page: Records per page
:param state: Passes merge request state to filter them by it
:return: list with all the merge requests |
def getmergerequest(self, project_id, mergerequest_id):
request = requests.get(
'{0}/{1}/merge_request/{2}'.format(self.projects_url, project_id, mergerequest_id),
headers=self.headers, verify=self.verify_ssl, auth=self.auth, timeout=self.timeout)
if request.status_code == 200:
return request.json()
else:
return False | Get information about a specific merge request.
:param project_id: ID of the project
:param mergerequest_id: ID of the merge request
:return: dict of the merge request |
def createmergerequest(self, project_id, sourcebranch, targetbranch,
title, target_project_id=None, assignee_id=None):
data = {
'source_branch': sourcebranch,
'target_branch': targetbranch,
'title': title,
'assignee_id': assignee_id,
'target_project_id': target_project_id
}
request = requests.post(
'{0}/{1}/merge_requests'.format(self.projects_url, project_id),
data=data, headers=self.headers, verify=self.verify_ssl, auth=self.auth, timeout=self.timeout)
if request.status_code == 201:
return request.json()
else:
return False | Create a new merge request.
:param project_id: ID of the project originating the merge request
:param sourcebranch: name of the branch to merge from
:param targetbranch: name of the branch to merge to
:param title: Title of the merge request
:param assignee_id: Assignee user ID
:return: dict of the new merge request |
def acceptmergerequest(self, project_id, mergerequest_id, merge_commit_message=None):
data = {'merge_commit_message': merge_commit_message}
request = requests.put(
'{0}/{1}/merge_request/{2}/merge'.format(self.projects_url, project_id, mergerequest_id),
data=data, headers=self.headers, verify=self.verify_ssl, auth=self.auth, timeout=self.timeout)
if request.status_code == 200:
return request.json()
else:
return False | Update an existing merge request.
:param project_id: ID of the project originating the merge request
:param mergerequest_id: ID of the merge request to accept
:param merge_commit_message: Custom merge commit message
:return: dict of the modified merge request |
def addcommenttomergerequest(self, project_id, mergerequest_id, note):
request = requests.post(
'{0}/{1}/merge_request/{2}/comments'.format(self.projects_url, project_id, mergerequest_id),
data={'note': note}, headers=self.headers, verify=self.verify_ssl, auth=self.auth, timeout=self.timeout)
return request.status_code == 201 | Add a comment to a merge request.
:param project_id: ID of the project originating the merge request
:param mergerequest_id: ID of the merge request to comment on
:param note: Text of comment
:return: True if success |
def createsnippet(self, project_id, title, file_name, code, visibility_level=0):
data = {'id': project_id, 'title': title, 'file_name': file_name, 'code': code}
if visibility_level in [0, 10, 20]:
data['visibility_level'] = visibility_level
request = requests.post(
'{0}/{1}/snippets'.format(self.projects_url, project_id),
data=data, verify=self.verify_ssl, auth=self.auth, headers=self.headers, timeout=self.timeout)
if request.status_code == 201:
return request.json()
else:
return False | Creates an snippet
:param project_id: project id to create the snippet under
:param title: title of the snippet
:param file_name: filename for the snippet
:param code: content of the snippet
:param visibility_level: snippets can be either private (0), internal(10) or public(20)
:return: True if correct, false if failed |
def deletesnippet(self, project_id, snippet_id):
request = requests.delete(
'{0}/{1}/snippets/{2}'.format(self.projects_url, project_id, snippet_id),
headers=self.headers, verify=self.verify_ssl, auth=self.auth, timeout=self.timeout)
return request.status_code == 200 | Deletes a given snippet
:param project_id: project_id
:param snippet_id: snippet id
:return: True if success |
def unprotectrepositorybranch(self, project_id, branch):
request = requests.put(
'{0}/{1}/repository/branches/{2}/unprotect'.format(self.projects_url, project_id, branch),
headers=self.headers, verify=self.verify_ssl, auth=self.auth, timeout=self.timeout)
if request.status_code == 200:
return request.json()
else:
return | Unprotects a single project repository branch. This is an idempotent function,
unprotecting an already unprotected repository branch still returns a 200 OK status code.
:param project_id: project id
:param branch: branch to unprotect
:return: dict with the branch |
def createrepositorytag(self, project_id, tag_name, ref, message=None):
data = {'id': project_id, 'tag_name': tag_name, 'ref': ref, 'message': message}
request = requests.post(
'{0}/{1}/repository/tags'.format(self.projects_url, project_id), data=data,
verify=self.verify_ssl, auth=self.auth, headers=self.headers, timeout=self.timeout)
if request.status_code == 201:
return request.json()
else:
return False | Creates new tag in the repository that points to the supplied ref
:param project_id: project id
:param tag_name: tag
:param ref: sha1 of the commit or branch to tag
:param message: message
:return: dict |
def delete_repository_tag(self, project_id, tag_name):
return self.delete('/projects/{project_id}/repository/tags/{tag_name}'.format(
project_id=project_id, tag_name=tag_name)) | Deletes a tag of a repository with given name.
:param project_id: The ID of a project
:param tag_name: The name of a tag
:return: Dictionary containing delete tag
:raise: HttpError: If invalid response returned |
def addcommenttocommit(self, project_id, author, sha, path, line, note):
data = {
'author': author,
'note': note,
'path': path,
'line': line,
'line_type': 'new'
}
request = requests.post(
'{0}/{1}/repository/commits/{2}/comments'.format(self.projects_url, project_id, sha),
headers=self.headers, data=data, verify=self.verify_ssl)
if request.status_code == 201:
return True
else:
return False | Adds an inline comment to a specific commit
:param project_id: project id
:param author: The author info as returned by create mergerequest
:param sha: The name of a repository branch or tag or if not given the default branch
:param path: The file path
:param line: The line number
:param note: Text of comment
:return: True or False |
def getrepositorytree(self, project_id, **kwargs):
data = {}
if kwargs:
data.update(kwargs)
request = requests.get(
'{0}/{1}/repository/tree'.format(self.projects_url, project_id), params=data,
verify=self.verify_ssl, auth=self.auth, headers=self.headers, timeout=self.timeout)
if request.status_code == 200:
return request.json()
else:
return False | Get a list of repository files and directories in a project.
:param project_id: The ID of a project
:param path: The path inside repository. Used to get contend of subdirectories
:param ref_name: The name of a repository branch or tag or if not given the default branch
:return: dict with the tree |
def getrawfile(self, project_id, sha1, filepath):
data = {'filepath': filepath}
request = requests.get(
'{0}/{1}/repository/blobs/{2}'.format(self.projects_url, project_id, sha1),
params=data, verify=self.verify_ssl, auth=self.auth, timeout=self.timeout,
headers=self.headers)
if request.status_code == 200:
return request.content
else:
return False | Get the raw file contents for a file by commit SHA and path.
:param project_id: The ID of a project
:param sha1: The commit or branch name
:param filepath: The path the file
:return: raw file contents |
def getrawblob(self, project_id, sha1):
request = requests.get(
'{0}/{1}/repository/raw_blobs/{2}'.format(self.projects_url, project_id, sha1),
verify=self.verify_ssl, auth=self.auth, headers=self.headers, timeout=self.timeout)
if request.status_code == 200:
return request.content
else:
return False | Get the raw file contents for a blob by blob SHA.
:param project_id: The ID of a project
:param sha1: the commit sha
:return: raw blob |
def compare_branches_tags_commits(self, project_id, from_id, to_id):
data = {'from': from_id, 'to': to_id}
request = requests.get(
'{0}/{1}/repository/compare'.format(self.projects_url, project_id),
params=data, verify=self.verify_ssl, auth=self.auth, timeout=self.timeout,
headers=self.headers)
if request.status_code == 200:
return request.json()
else:
return False | Compare branches, tags or commits
:param project_id: The ID of a project
:param from_id: the commit sha or branch name
:param to_id: the commit sha or branch name
:return: commit list and diff between two branches tags or commits provided by name
:raise: HttpError: If invalid response returned |
def searchproject(self, search, page=1, per_page=20):
data = {'page': page, 'per_page': per_page}
request = requests.get("{0}/{1}".format(self.search_url, search), params=data,
verify=self.verify_ssl, auth=self.auth, headers=self.headers, timeout=self.timeout)
if request.status_code == 200:
return request.json()
else:
return False | Search for projects by name which are accessible to the authenticated user
:param search: Query to search for
:param page: Page number
:param per_page: Records per page
:return: list of results |
def getfilearchive(self, project_id, filepath=None):
if not filepath:
filepath = ''
request = requests.get(
'{0}/{1}/repository/archive'.format(self.projects_url, project_id),
verify=self.verify_ssl, auth=self.auth, headers=self.headers, timeout=self.timeout)
if request.status_code == 200:
if filepath == "":
filepath = request.headers['content-disposition'].split(';')[1].split('=')[1].strip('"')
with open(filepath, 'wb') as filesave:
filesave.write(request.content)
# TODO: Catch oserror exceptions as no permissions and such
# TODO: change the filepath to a path and keep always the filename?
return True
else:
msg = request.json()['message']
raise exceptions.HttpError(msg) | Get an archive of the repository
:param project_id: project id
:param filepath: path to save the file to
:return: True if the file was saved to the filepath |
def deletegroup(self, group_id):
request = requests.delete(
'{0}/{1}'.format(self.groups_url, group_id),
headers=self.headers, verify=self.verify_ssl, auth=self.auth, timeout=self.timeout)
return request.status_code == 200 | Deletes an group by ID
:param group_id: id of the group to delete
:return: True if it deleted, False if it couldn't. False could happen for several reasons, but there isn't a good way of differentiating them |
def getgroupmembers(self, group_id, page=1, per_page=20):
data = {'page': page, 'per_page': per_page}
request = requests.get(
'{0}/{1}/members'.format(self.groups_url, group_id), params=data,
headers=self.headers, verify=self.verify_ssl, auth=self.auth, timeout=self.timeout)
if request.status_code == 200:
return request.json()
else:
return False | Lists the members of a given group id
:param group_id: the group id
:param page: which page to return (default is 1)
:param per_page: number of items to return per page (default is 20)
:return: the group's members |
def addgroupmember(self, group_id, user_id, access_level):
if not isinstance(access_level, int):
if access_level.lower() == 'owner':
access_level = 50
elif access_level.lower() == 'master':
access_level = 40
elif access_level.lower() == 'developer':
access_level = 30
elif access_level.lower() == 'reporter':
access_level = 20
elif access_level.lower() == 'guest':
access_level = 10
else:
return False
data = {'id': group_id, 'user_id': user_id, 'access_level': access_level}
request = requests.post(
'{0}/{1}/members'.format(self.groups_url, group_id),
headers=self.headers, data=data, verify=self.verify_ssl, auth=self.auth, timeout=self.timeout)
return request.status_code == 201 | Adds a project member to a project
:param user_id: user id
:param access_level: access level, see gitlab help to know more
:return: True if success |
def deletegroupmember(self, group_id, user_id):
request = requests.delete(
'{0}/{1}/members/{2}'.format(self.groups_url, group_id, user_id),
headers=self.headers, verify=self.verify_ssl, auth=self.auth, timeout=self.timeout)
if request.status_code == 200:
return True | Delete a group member
:param group_id: group id to remove the member from
:param user_id: user id
:return: always true |
def addldapgrouplink(self, group_id, cn, group_access, provider):
data = {'id': group_id, 'cn': cn, 'group_access': group_access, 'provider': provider}
request = requests.post(
'{0}/{1}/ldap_group_links'.format(self.groups_url, group_id),
headers=self.headers, data=data, verify=self.verify_ssl)
return request.status_code == 201 | Add LDAP group link
:param id: The ID of a group
:param cn: The CN of a LDAP group
:param group_access: Minimum access level for members of the LDAP group
:param provider: LDAP provider for the LDAP group (when using several providers)
:return: True if success |
def deleteldapgrouplink(self, group_id, cn, provider=None):
url = '{base}/{gid}/ldap_group_links/{provider}{cn}'.format(
base=self.groups_url, gid=group_id, cn=cn,
provider=('{0}/'.format(provider) if provider else ''))
request = requests.delete(url, headers=self.headers, verify=self.verify_ssl)
return request.status_code == 200 | Deletes a LDAP group link (for a specific LDAP provider if given)
:param group_id: The ID of a group
:param cn: The CN of a LDAP group
:param provider: Name of a LDAP provider
:return: True if success |
def createissuewallnote(self, project_id, issue_id, content):
data = {'body': content}
request = requests.post(
'{0}/{1}/issues/{2}/notes'.format(self.projects_url, project_id, issue_id),
verify=self.verify_ssl, auth=self.auth, headers=self.headers, data=data, timeout=self.timeout)
if request.status_code == 201:
return request.json()
else:
return False | Create a new note
:param project_id: Project ID
:param issue_id: Issue ID
:param content: Contents
:return: Json or False |
def createfile(self, project_id, file_path, branch_name, encoding, content, commit_message):
data = {
'file_path': file_path,
'branch_name': branch_name,
'encoding': encoding,
'content': content,
'commit_message': commit_message
}
request = requests.post(
'{0}/{1}/repository/files'.format(self.projects_url, project_id),
verify=self.verify_ssl, auth=self.auth, headers=self.headers, data=data, timeout=self.timeout)
return request.status_code == 201 | Creates a new file in the repository
:param project_id: project id
:param file_path: Full path to new file. Ex. lib/class.rb
:param branch_name: The name of branch
:param content: File content
:param commit_message: Commit message
:return: true if success, false if not |
def updatefile(self, project_id, file_path, branch_name, content, commit_message):
data = {
'file_path': file_path,
'branch_name': branch_name,
'content': content,
'commit_message': commit_message
}
request = requests.put(
'{0}/{1}/repository/files'.format(self.projects_url, project_id),
headers=self.headers, data=data, verify=self.verify_ssl, auth=self.auth, timeout=self.timeout)
return request.status_code == 200 | Updates an existing file in the repository
:param project_id: project id
:param file_path: Full path to new file. Ex. lib/class.rb
:param branch_name: The name of branch
:param content: File content
:param commit_message: Commit message
:return: true if success, false if not |
def getfile(self, project_id, file_path, ref):
data = {'file_path': file_path, 'ref': ref}
request = requests.get(
'{0}/{1}/repository/files'.format(self.projects_url, project_id),
headers=self.headers, data=data, verify=self.verify_ssl, auth=self.auth, timeout=self.timeout)
if request.status_code == 200:
return request.json()
else:
return False | Allows you to receive information about file in repository like name, size, content.
Note that file content is Base64 encoded.
:param project_id: project_id
:param file_path: Full path to file. Ex. lib/class.rb
:param ref: The name of branch, tag or commit
:return: |
def deletefile(self, project_id, file_path, branch_name, commit_message):
data = {
'file_path': file_path,
'branch_name': branch_name,
'commit_message': commit_message
}
request = requests.delete(
'{0}/{1}/repository/files'.format(self.projects_url, project_id),
headers=self.headers, data=data, verify=self.verify_ssl, auth=self.auth, timeout=self.timeout)
return request.status_code == 200 | Deletes existing file in the repository
:param project_id: project id
:param file_path: Full path to new file. Ex. lib/class.rb
:param branch_name: The name of branch
:param commit_message: Commit message
:return: true if success, false if not |
def setgitlabciservice(self, project_id, token, project_url):
data = {'token': token, 'project_url': project_url}
request = requests.put(
'{0}/{1}/services/gitlab-ci'.format(self.projects_url, project_id),
verify=self.verify_ssl, auth=self.auth, headers=self.headers, data=data, timeout=self.timeout)
return request.status_code == 200 | Set GitLab CI service for project
:param project_id: project id
:param token: CI project token
:param project_url: CI project url
:return: true if success, false if not |
def deletegitlabciservice(self, project_id, token, project_url):
request = requests.delete(
'{0}/{1}/services/gitlab-ci'.format(self.projects_url, project_id),
headers=self.headers, verify=self.verify_ssl, auth=self.auth, timeout=self.timeout)
return request.status_code == 200 | Delete GitLab CI service settings
:param project_id: Project ID
:param token: Token
:param project_url: Project URL
:return: true if success, false if not |
def createlabel(self, project_id, name, color):
data = {'name': name, 'color': color}
request = requests.post(
'{0}/{1}/labels'.format(self.projects_url, project_id), data=data,
verify=self.verify_ssl, auth=self.auth, headers=self.headers, timeout=self.timeout)
if request.status_code == 201:
return request.json()
else:
return False | Creates a new label for given repository with given name and color.
:param project_id: The ID of a project
:param name: The name of the label
:param color: Color of the label given in 6-digit hex notation with leading '#' sign (e.g. #FFAABB)
:return: |
def deletelabel(self, project_id, name):
data = {'name': name}
request = requests.delete(
'{0}/{1}/labels'.format(self.projects_url, project_id), data=data,
verify=self.verify_ssl, auth=self.auth, headers=self.headers, timeout=self.timeout)
return request.status_code == 200 | Deletes a label given by its name.
:param project_id: The ID of a project
:param name: The name of the label
:return: True if succeed |
def editlabel(self, project_id, name, new_name=None, color=None):
data = {'name': name, 'new_name': new_name, 'color': color}
request = requests.put(
'{0}/{1}/labels'.format(self.projects_url, project_id), data=data,
verify=self.verify_ssl, auth=self.auth, headers=self.headers, timeout=self.timeout)
if request.status_code == 200:
return request.json()
else:
return False | Updates an existing label with new name or now color.
At least one parameter is required, to update the label.
:param project_id: The ID of a project
:param name: The name of the label
:return: True if succeed |
def getnamespaces(self, search=None, page=1, per_page=20):
data = {'page': page, 'per_page': per_page}
if search:
data['search'] = search
request = requests.get(
self.namespaces_url, params=data, headers=self.headers, verify=self.verify_ssl)
if request.status_code == 200:
return request.json()
else:
return False | Return a namespace list
:param search: Optional search query
:param page: Which page to return (default is 1)
:param per_page: Number of items to return per page (default is 20)
:return: returns a list of namespaces, false if there is an error |
def deprecated(func):
def deprecation_warning(*args, **kwargs):
warnings.warn('Call to deprecated function {name}. Please consult our documentation at '
'http://pyapi-gitlab.readthedocs.io/en/latest/#gitlab.Gitlab.{name}'.format(name=func.__name__),
category=DeprecationWarning)
return func(*args, **kwargs)
deprecation_warning.__name__ = func.__name__
deprecation_warning.__doc__ = func.__doc__
deprecation_warning.__dict__ = func.__dict__
return deprecation_warning | This is a decorator which can be used to mark functions as deprecated. It will result in a warning being emitted
when the function is used.
:param func: The function to run
:return: function |
def get_class_that_defined_method(fun):
if inspect.ismethod(fun):
for cls in inspect.getmro(fun.__self__.__class__):
if cls.__dict__.get(fun.__name__) is fun:
return cls
fun = fun.__func__ # fallback to __qualname__ parsing
if inspect.isfunction(fun):
cls = getattr(inspect.getmodule(fun),
fun.__qualname__.split('.<locals>', 1)[0].rsplit('.', 1)[0], None)
if isinstance(cls, type):
return cls
return getattr(fun, '__objclass__', None) | Tries to find the class that defined the specified method. Will not work for nested classes
(locals).
Args:
fun: Function / Method
Returns:
Returns the class which defines the given method / function. |
def get_field_mro(cls, field_name):
res = set()
if hasattr(cls, '__mro__'):
for _class in inspect.getmro(cls):
values_ = getattr(_class, field_name, None)
if values_ is not None:
res = res.union(set(make_list(values_)))
return res | Goes up the mro and looks for the specified field. |
def make_list(item_or_items):
if item_or_items is None:
return None
if isinstance(item_or_items, list):
return item_or_items
if hasattr(item_or_items, '__iter__') and not isinstance(item_or_items, str):
return list(item_or_items)
return [item_or_items] | Makes a list out of the given items.
Examples:
>>> make_list(1)
[1]
>>> make_list('str')
['str']
>>> make_list(('i', 'am', 'a', 'tuple'))
['i', 'am', 'a', 'tuple']
>>> print(make_list(None))
None
>>> # An instance of lists is unchanged
>>> l = ['i', 'am', 'a', 'list']
>>> l_res = make_list(l)
>>> l_res
['i', 'am', 'a', 'list']
>>> l_res is l
True
Args:
item_or_items: A single value or an iterable.
Returns:
Returns the given argument as an list. |
def auto_str_ignore(ignore_list):
def _decorator(cls):
ignored = make_list(ignore_list)
cls.__auto_str_ignore__ = ignored
return cls
return _decorator | Use this decorator to suppress any fields that should not be part of the dynamically created
`__str__` or `__repr__` function of `auto_str`.
Args:
ignore_list: List or item of the fields to suppress by `auto_str`.
Returns:
Returns a decorator.
Example:
>>> @auto_str()
... @auto_str_ignore(["l", "d"])
... class Demo(object):
... def __init__(self, i=0, s="a", l=None, d=None):
... self.i = i
... self.s = s
... self.l = l
... self.d = d
>>> dut = Demo(10, 'abc', [1, 2, 3], {'a': 1, 'b': 2})
>>> print(str(dut))
Demo(i=10, s='abc') |
def raise_on_major_version_mismatch(work_request, local_version):
request_major_version = get_major_version(work_request.version)
local_major_version = get_major_version(local_version)
if request_major_version != local_major_version:
raise ValueError("Received major version mismatch. request:{} local:{}".format(
work_request.version, local_version
))
else:
logging.info("Ignoring non-major version mismatch request:{} local:{}".format(
work_request.version, local_version)) | Raises error if major version is different. Other wise logs the difference.
:param work_request: WorkRequest: request that had a different version
:param local_version: str: our version string that does not match message.version |
def connect(self):
logging.info("Connecting to {} with user {}.".format(self.host, self.username))
credentials = pika.PlainCredentials(self.username, self.password)
connection_params = pika.ConnectionParameters(host=self.host,
credentials=credentials,
heartbeat_interval=self.heartbeat_interval)
self.connection = pika.BlockingConnection(connection_params) | Create internal connection to AMQP service. |
def close(self):
if self.connection:
logging.info("Closing connection to {}.".format(self.host))
self.connection.close()
self.connection = None | Close internal connection to AMQP if connected. |
def create_channel(self, queue_name):
channel = self.connection.channel()
channel.queue_declare(queue=queue_name, durable=True)
return channel | Create a chanel named queue_name. Must be connected before calling this method.
:param queue_name: str: name of the queue to create
:return: pika.channel.Channel: channel we can send/receive messages to/from |
def delete_queue(self, queue_name):
self.connect()
channel = self.connection.channel()
channel.queue_delete(queue=queue_name)
self.close() | Delete a queue with the specified name.
:param queue_name:
:return: |
def send_durable_message(self, queue_name, body):
self.connect()
channel = self.create_channel(queue_name)
channel.basic_publish(exchange='',
routing_key=queue_name,
body=body,
properties=pika.BasicProperties(
delivery_mode=2, # make message persistent
))
self.close() | Connect to queue_name, post a durable message with body, disconnect from queue_name.
:param queue_name: str: name of the queue we want to put a message on
:param body: content of the message we want to send |
def send_durable_exchange_message(self, exchange_name, body):
self.connect()
channel = self.connection.channel()
# Fanout will send message to multiple subscribers
channel.exchange_declare(exchange=exchange_name, type='fanout')
result = channel.basic_publish(exchange=exchange_name, routing_key='', body=body,
properties=pika.BasicProperties(
delivery_mode=2, # make message persistent
))
self.close()
return result | Send a message with the specified body to an exchange.
:param exchange_name: str: name of the exchange to send the message into
:param body: str: contents of the message
:return Bool: True when delivery confirmed |
def receive_loop_with_callback(self, queue_name, callback):
self.connect()
channel = self.create_channel(queue_name)
channel.basic_qos(prefetch_count=1)
channel.basic_consume(callback, queue=queue_name)
channel.start_consuming() | Process incoming messages with callback until close is called.
:param queue_name: str: name of the queue to poll
:param callback: func(ch, method, properties, body) called with data when data arrives
:return: |
def send(self, command, payload):
request = WorkRequest(command, payload)
logging.info("Sending {} message to queue {}.".format(request.command, self.queue_name))
# setting protocol to version 2 to be compatible with python2
self.connection.send_durable_message(self.queue_name, pickle.dumps(request, protocol=2))
logging.info("Sent {} message.".format(request.command, self.queue_name)) | Send a WorkRequest to containing command and payload to the queue specified in config.
:param command: str: name of the command we want run by WorkQueueProcessor
:param payload: object: pickable data to be used when running the command |
def add_command_by_method_name(self, command, obj):
func = getattr(obj, command)
if func and callable(func):
self.add_command(command, func)
else:
raise ValueError("Object missing {} method.".format(command)) | Lookup method named command in obj and call that method when the command is received.
Raises ValueError if obj doesn't have a method named command
:param command: str: name of the comand to wait for
:param obj: object: must have a member function with the exact name of the command |
def shutdown(self, payload=None):
logging.info("Work queue shutdown.")
self.connection.close()
self.receiving_messages = False | Close the connection/shutdown the messaging loop.
:param payload: None: not used. Here to allow using this method with add_command. |
def process_messages_loop(self):
self.receiving_messages = True
try:
self.process_messages_loop_internal()
except pika.exceptions.ConnectionClosed as ex:
logging.error("Connection closed {}.".format(ex))
raise | Processes incoming WorkRequest messages one at a time via functions specified by add_command. |
def process_messages_loop_internal(self):
logging.info("Starting work queue loop.")
self.connection.receive_loop_with_callback(self.queue_name, self.process_message) | Busy loop that processes incoming WorkRequest messages via functions specified by add_command.
Terminates if a command runs shutdown method |
def process_message(self, ch, method, properties, body):
self.work_request = pickle.loads(body)
ch.basic_ack(delivery_tag=method.delivery_tag)
self.process_work_request() | Callback method that is fired for every message that comes in while we are in process_messages_loop.
:param ch: channel message was sent on
:param method: pika.Basic.Deliver
:param properties: pika.BasicProperties
:param body: str: payload of the message (picked WorkRequest) |
def process_messages_loop_internal(self):
while self.receiving_messages:
# connect to AMQP server and listen for 1 message then disconnect
self.work_request = None
self.connection.receive_loop_with_callback(self.queue_name, self.save_work_request_and_close)
if self.work_request:
self.process_work_request() | Busy loop that processes incoming WorkRequest messages via functions specified by add_command.
Disconnects while servicing a message, reconnects once finished processing a message
Terminates if a command runs shutdown method |
def save_work_request_and_close(self, ch, method, properties, body):
self.work_request = pickle.loads(body)
ch.basic_ack(delivery_tag=method.delivery_tag)
ch.stop_consuming()
self.connection.close() | Save message body and close connection |
def send(self, payload):
result = self.connection.send_durable_exchange_message(self.exchange_name, payload)
logging.info("Sent message to exchange.".format(self.exchange_name))
return result | Send a payload to exchange to containing command and payload to the queue specified in config.
:param command: str: name of the command we want run by WorkQueueProcessor
:param payload: str: string data that will be put into the exchange's message body
:return Bool: True when delivery confirmed |
def get_related_galleries(gallery, count=5):
# just get the first cat. If they assigned to more than one, tough
try:
cat = gallery.sections.all()[0]
related = cat.gallery_categories.filter(published=True).exclude(id=gallery.id).order_by('-id')[:count]
except:
related = None
return {'related': related, 'MEDIA_URL': settings.MEDIA_URL} | Gets latest related galleries from same section as originating gallery.
Count defaults to five but can be overridden.
Usage: {% get_related_galleries gallery <10> %} |
def make_lando_router(config, obj, queue_name):
return MessageRouter(config, obj, queue_name, VM_LANDO_INCOMING_MESSAGES,
processor_constructor=WorkQueueProcessor) | Makes MessageRouter which can listen to queue_name sending messages to the VM version of lando.
:param config: WorkerConfig/ServerConfig: settings for connecting to the queue
:param obj: object: implements lando specific methods
:param queue_name: str: name of the queue we will listen on.
:return MessageRouter |
def make_k8s_lando_router(config, obj, queue_name):
return MessageRouter(config, obj, queue_name, K8S_LANDO_INCOMING_MESSAGES,
processor_constructor=WorkQueueProcessor) | Makes MessageRouter which can listen to queue_name sending messages to the k8s version of lando.
:param config: WorkerConfig/ServerConfig: settings for connecting to the queue
:param obj: object: implements lando specific methods
:param queue_name: str: name of the queue we will listen on.
:return MessageRouter |
def make_worker_router(config, obj, queue_name):
return MessageRouter(config, obj, queue_name, VM_LANDO_WORKER_INCOMING_MESSAGES,
processor_constructor=DisconnectingWorkQueueProcessor) | Makes MessageRouter which can listen to queue_name sending lando_worker specific messages to obj.
:param config: WorkerConfig/ServerConfig: settings for connecting to the queue
:param obj: object: implements lando_worker specific methods
:param queue_name: str: name of the queue we will listen on. |
def load_healthchecks(self):
self.load_default_healthchecks()
if getattr(settings, 'AUTODISCOVER_HEALTHCHECKS', True):
self.autodiscover_healthchecks()
self._registry_loaded = True | Loads healthchecks. |
def load_default_healthchecks(self):
default_healthchecks = getattr(settings, 'HEALTHCHECKS', DEFAULT_HEALTHCHECKS)
for healthcheck in default_healthchecks:
healthcheck = import_string(healthcheck)
self.register_healthcheck(healthcheck) | Loads healthchecks specified in settings.HEALTHCHECKS as dotted import
paths to the classes. Defaults are listed in `DEFAULT_HEALTHCHECKS`. |
def run_healthchecks(self):
if not self._registry_loaded:
self.load_healthchecks()
def get_healthcheck_name(hc):
if hasattr(hc, 'name'):
return hc.name
return hc.__name__
responses = []
for healthcheck in self._registry:
try:
if inspect.isclass(healthcheck):
healthcheck = healthcheck()
response = healthcheck()
if isinstance(response, bool):
response = HealthcheckResponse(
name=get_healthcheck_name(healthcheck),
status=response,
)
except Exception as e:
response = HealthcheckResponse(
name=get_healthcheck_name(healthcheck),
status=False,
exception=str(e),
exception_class=e.__class__.__name__,
)
responses.append(response)
return responses | Runs all registered healthchecks and returns a list of
HealthcheckResponse. |
def rr_cache(max_size=128):
def actual_decorator(func):
return _cached_func(func, caches.RRCache, max_size)
return actual_decorator | Random Replacement cache decorator, implementing :class:`faste.caches.RRCache`
:keyword int max_size: max cache size |
def lru_cache(max_size=128):
def actual_decorator(func):
return _cached_func(func, caches.LRUCache, max_size)
return actual_decorator | Least Recently Used cache decorator, implementing :class:`faste.caches.LRUCache`
:keyword max_size: max cache size |
def lfu_cache(max_size=128):
def actual_decorator(func):
return _cached_func(func, caches.LFUCache, max_size)
return actual_decorator | Least Frequently Used cache decorator, implementing :class:`faste.caches.LFUCache`
:keyword max_size: max cache size |
def timed_cache(timeout, max_size=128):
def actual_decorator(func):
return _cached_func(func, caches.TimeoutCache, timeout, max_size=max_size)
return actual_decorator | Time based decorator, implementing :class:`faste.caches.TimeoutCache`
:param int timeout: Cache key timeout
:param int max_size: (keyword) max size. |
def advance_by(self, amount):
if amount < 0:
raise ValueError("cannot retreat time reference: amount {} < 0"
.format(amount))
self.__delta += amount | Advance the time reference by the given amount.
:param `float` amount: number of seconds to advance.
:raise `ValueError`: if *amount* is negative. |
def advance_to(self, timestamp):
now = self.__original_time()
if timestamp < now:
raise ValueError("cannot retreat time reference: "
"target {} < now {}"
.format(timestamp, now))
self.__delta = timestamp - now | Advance the time reference so that now is the given timestamp.
:param `float` timestamp: the new current timestamp.
:raise `ValueError`: if *timestamp* is in the past. |
def job_step_complete(self, job_request_payload):
if job_request_payload.success_command == JobCommands.STORE_JOB_OUTPUT_COMPLETE:
raise ValueError("Programmer error use use job_step_store_output_complete instead.")
payload = JobStepCompletePayload(job_request_payload)
self.send(job_request_payload.success_command, payload) | Send message that the job step is complete using payload data.
Raises ValueError if used for StoreJobOutputPayload message type.
:param job_request_payload: StageJobPayload|RunJobPayload payload from complete job |
def job_step_store_output_complete(self, job_request_payload, output_project_info):
if job_request_payload.success_command != JobCommands.STORE_JOB_OUTPUT_COMPLETE:
raise ValueError("Programmer error only use job_step_store_output_complete for store_output_complete.")
payload = JobStepStoreOutputCompletePayload(job_request_payload, output_project_info)
self.send(job_request_payload.success_command, payload) | Send message that the store output job step is complete using payload data.
Raises ValueError if used for non-StoreJobOutputPayload message type.
:param job_request_payload: StoreJobOutputPayload payload from complete job
:param output_project_info: object: info about the project created |
def job_step_error(self, job_request_payload, message):
payload = JobStepErrorPayload(job_request_payload, message)
self.send(job_request_payload.error_command, payload) | Send message that the job step failed using payload data.
:param job_request_payload: StageJobPayload|RunJobPayload|StoreJobOutputPayload payload from job with error
:param message: description of the error |
def stage_job(self, credentials, job_details, input_files, vm_instance_name):
self._send(JobCommands.STAGE_JOB, StageJobPayload(credentials, job_details, input_files, vm_instance_name)) | Request that a job be staged on a worker(ie. download some files)
:param credentials: jobapi.Credentials: user's credentials used to download input_files
:param job_details: object: details about job(id, name, created date, workflow version)
:param input_files: [InputFile]: list of files to download
:param vm_instance_name: str: name of the instance lando_worker is running on (this passed back in the response) |
def run_job(self, job_details, workflow, vm_instance_name):
self._send(JobCommands.RUN_JOB, RunJobPayload(job_details, workflow, vm_instance_name)) | Execute a workflow on a worker.
:param job_details: object: details about job(id, name, created date, workflow version)
:param workflow: jobapi.Workflow: url to workflow and parameters to use
:param vm_instance_name: name of the instance lando_worker is running on (this passed back in the response) |
def store_job_output(self, credentials, job_details, vm_instance_name):
payload = StoreJobOutputPayload(credentials, job_details, vm_instance_name)
self._send(JobCommands.STORE_JOB_OUTPUT, payload) | Store the output of a finished job.
:param credentials: jobapi.Credentials: user's credentials used to upload resulting files
:param job_details: object: details about job(id, name, created date, workflow version)
:param vm_instance_name: name of the instance lando_worker is running on (this passed back in the response) |
def reset_frequencies(self, frequency=0):
frequency = max(frequency, 0)
for key in self._store.keys():
self._store[key] = (self._store[key][0], frequency)
return frequency | Resets all stored frequencies for the cache
:keyword int frequency: Frequency to reset to, must be >= 0 |
def oldest(self):
if len(self._store) == 0:
return
kv = min(self._store.items(), key=lambda x: x[1][1])
return kv[0], kv[1][0] | Gets key, value pair for oldest item in cache
:returns: tuple |
def time_left(self, key):
self._evict_old()
if key not in self._store:
raise KeyError("key {0!r} does not exist in cache".format(key))
return self.timeout - (time.time() - self._store[key][1]) | Gets the amount of time an item has left in the cache (in seconds), before it is evicted.
:param key: Key to check time for.
:returns: int |
def more_than_one_index(s, brackets=2):
'''
Search for two sets of [] []
@param s: string to search
'''
start = 0
brackets_num = 0
while start != -1 and brackets_num < brackets:
start = s.find("[", start)
if start == -1:
break
start = s.find("]", start)
brackets_num += 1
if start != -1:
return True
return Falsf more_than_one_index(s, brackets=2):
'''
Search for two sets of [] []
@param s: string to search
'''
start = 0
brackets_num = 0
while start != -1 and brackets_num < brackets:
start = s.find("[", start)
if start == -1:
break
start = s.find("]", start)
brackets_num += 1
if start != -1:
return True
return False | Search for two sets of [] []
@param s: string to search |
def get_key(s):
'''
Get data between [ and ] remove ' if exist
@param s: string to process
'''
start = s.find("[")
end = s.find("]")
if start == -1 or end == -1:
return None
if s[start + 1] == "'":
start += 1
if s[end - 1] == "'":
end -= 1
return s[start + 1:endf get_key(s):
'''
Get data between [ and ] remove ' if exist
@param s: string to process
'''
start = s.find("[")
end = s.find("]")
if start == -1 or end == -1:
return None
if s[start + 1] == "'":
start += 1
if s[end - 1] == "'":
end -= 1
return s[start + 1:end] | Get data between [ and ] remove ' if exist
@param s: string to process |
def includeme(config):
settings = config.registry.settings
swagger_versions = get_swagger_versions(settings)
# for rendering /swagger.yaml
config.add_renderer(
'yaml', 'pyramid_swagger.api.YamlRendererFactory',
)
# Add the SwaggerSchema to settings to make it available to the validation
# tween and `register_api_doc_endpoints`
settings['pyramid_swagger.schema12'] = None
settings['pyramid_swagger.schema20'] = None
# Store under two keys so that 1.2 and 2.0 can co-exist.
if SWAGGER_12 in swagger_versions:
settings['pyramid_swagger.schema12'] = get_swagger_schema(settings)
if SWAGGER_20 in swagger_versions:
settings['pyramid_swagger.schema20'] = get_swagger_spec(settings)
config.add_tween(
"pyramid_swagger.tween.validation_tween_factory",
under=pyramid.tweens.EXCVIEW
)
config.add_renderer('pyramid_swagger', PyramidSwaggerRendererFactory())
if settings.get('pyramid_swagger.enable_api_doc_views', True):
if SWAGGER_12 in swagger_versions:
register_api_doc_endpoints(
config,
settings['pyramid_swagger.schema12'].get_api_doc_endpoints())
if SWAGGER_20 in swagger_versions:
register_api_doc_endpoints(
config,
build_swagger_20_swagger_schema_views(config),
base_path=settings.get('pyramid_swagger.base_path_api_docs', '')) | :type config: :class:`pyramid.config.Configurator` |
def validate_swagger_schema(schema_dir, resource_listing):
schema_filepath = os.path.join(schema_dir, API_DOCS_FILENAME)
swagger_spec_validator.validator12.validate_spec(
resource_listing,
urlparse.urljoin('file:', pathname2url(os.path.abspath(schema_filepath))),
) | Validate the structure of Swagger schemas against the spec.
**Valid only for Swagger v1.2 spec**
Note: It is possible that resource_listing is not present in
the schema_dir. The path is passed in the call so that ssv
can fetch the api-declaration files from the path.
:param resource_listing: Swagger Spec v1.2 resource listing
:type resource_listing: dict
:param schema_dir: A path to Swagger spec directory
:type schema_dir: string
:raises: :py:class:`swagger_spec_validator.SwaggerValidationError` |
def build_param_schema(schema, param_type):
properties = filter_params_by_type(schema, param_type)
if not properties:
return
# Generate a jsonschema that describes the set of all query parameters. We
# can then validate this against dict(request.params).
return {
'type': 'object',
'properties': dict((p['name'], p) for p in properties),
# Allow extra headers. Most HTTP requests will have headers which
# are outside the scope of the spec (like `Host`, or `User-Agent`)
'additionalProperties': param_type == 'header',
} | Turn a swagger endpoint schema into an equivalent one to validate our
request.
As an example, this would take this swagger schema:
{
"paramType": "query",
"name": "query",
"description": "Location to query",
"type": "string",
"required": true
}
To this jsonschema:
{
"type": "object",
"additionalProperties": "False",
"properties:": {
"description": "Location to query",
"type": "string",
"required": true
}
}
Which we can then validate against a JSON object we construct from the
pyramid request. |
def type_validator(validator, types, instance, schema):
if schema.get('type') == 'File':
return []
return _validators.type_draft3(validator, types, instance, schema) | Swagger 1.2 supports parameters of 'type': 'File'. Skip validation of
the 'type' field in this case. |
def required_validator(validator, req, instance, schema):
if schema.get('paramType'):
if req is True and not instance:
return [ValidationError("%s is required" % schema['name'])]
return []
return _validators.required_draft4(validator, req, instance, schema) | Swagger 1.2 expects `required` to be a bool in the Parameter object, but
a list of properties in a Model object. |
def get_body_validator(models):
return validators.extend(
Draft4Validator,
{
'paramType': ignore,
'name': ignore,
'type': build_swagger_type_validator(models),
'required': required_validator,
}
) | Returns a validator for the request body, based on a
:class:`jsonschema.validators.Draft4Validator`, with extra validations
added for swaggers extensions to jsonschema.
:param models: a mapping of reference to models
:returns: a :class:`jsonschema.validators.Validator` which can validate
the request body. |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.