code
stringlengths
26
870k
docstring
stringlengths
1
65.6k
func_name
stringlengths
1
194
language
stringclasses
1 value
repo
stringlengths
8
68
path
stringlengths
5
194
url
stringlengths
46
254
license
stringclasses
4 values
def normalize_from_url(self, path): """ Normalize from_url to be used for matching. Normalize the path to always start with one slash, and end without a slash, so we can match both, with and without a trailing slash. """ path = path.rstrip("/") path = "/" + path.lstrip("/") return path
Normalize from_url to be used for matching. Normalize the path to always start with one slash, and end without a slash, so we can match both, with and without a trailing slash.
normalize_from_url
python
readthedocs/readthedocs.org
readthedocs/redirects/models.py
https://github.com/readthedocs/readthedocs.org/blob/master/readthedocs/redirects/models.py
MIT
def normalize_to_url(self, path): """ Normalize to_url to be used for redirecting. Normalize the path to always start with one slash, if the path is not an absolute URL. Otherwise, return the path as is. """ if re.match("^https?://", path): return path path = "/" + path.lstrip("/") return path
Normalize to_url to be used for redirecting. Normalize the path to always start with one slash, if the path is not an absolute URL. Otherwise, return the path as is.
normalize_to_url
python
readthedocs/readthedocs.org
readthedocs/redirects/models.py
https://github.com/readthedocs/readthedocs.org/blob/master/readthedocs/redirects/models.py
MIT
def redirects_to_external_domain(self): """Check if the redirect is to an external domain.""" return bool(re.match("^https?://", self.to_url))
Check if the redirect is to an external domain.
redirects_to_external_domain
python
readthedocs/readthedocs.org
readthedocs/redirects/models.py
https://github.com/readthedocs/readthedocs.org/blob/master/readthedocs/redirects/models.py
MIT
def get_full_path(self, filename, language=None, version_slug=None, allow_crossdomain=False): """ Return a full path for a given filename. This will include version and language information. No protocol/domain is returned. """ # Handle explicit http redirects if allow_crossdomain and re.match("^https?://", filename): return filename return Resolver().resolve_path( project=self.project, language=language, version_slug=version_slug, filename=filename, )
Return a full path for a given filename. This will include version and language information. No protocol/domain is returned.
get_full_path
python
readthedocs/readthedocs.org
readthedocs/redirects/models.py
https://github.com/readthedocs/readthedocs.org/blob/master/readthedocs/redirects/models.py
MIT
def get_redirect_path(self, filename, path=None, language=None, version_slug=None): """ Resolve the redirect for the given filename. .. note:: This method doesn't check if the current path matches ``from_url``, that should be done before calling this method using ``Redirect.objects.get_matching_redirect_with_path``. :param filename: The filename being served. :param path: The whole path from the request. :param language: The language of the project. :param version_slug: The slug of the current version. """ method = getattr( self, "redirect_{type}".format( type=self.redirect_type, ), ) return method(filename=filename, path=path, language=language, version_slug=version_slug)
Resolve the redirect for the given filename. .. note:: This method doesn't check if the current path matches ``from_url``, that should be done before calling this method using ``Redirect.objects.get_matching_redirect_with_path``. :param filename: The filename being served. :param path: The whole path from the request. :param language: The language of the project. :param version_slug: The slug of the current version.
get_redirect_path
python
readthedocs/readthedocs.org
readthedocs/redirects/models.py
https://github.com/readthedocs/readthedocs.org/blob/master/readthedocs/redirects/models.py
MIT
def _will_cause_infinite_redirect(self, current_path): """ Check if this redirect will cause an infinite redirect for the given path. We detect infinite redirects of the form: /dir/* -> /dir/subdir/:splat For example, /dir/test.html will redirect to /dir/subdir/test.html, and if the file doesn't exist, it will redirect to /dir/subdir/subdir/test.html and then to /dir/subdir/subdir/subdir/test.html and so on. We do this by checking if we will redirect to a subdirectory of the current path, and if the current path already starts with the path we will redirect to. """ if self.from_url.endswith("*") and ":splat" in self.to_url: to_url_without_splat = self.to_url.split(":splat", maxsplit=1)[0] redirects_to_subpath = to_url_without_splat.startswith(self.from_url_without_rest) if redirects_to_subpath and current_path.startswith(to_url_without_splat): return True return False
Check if this redirect will cause an infinite redirect for the given path. We detect infinite redirects of the form: /dir/* -> /dir/subdir/:splat For example, /dir/test.html will redirect to /dir/subdir/test.html, and if the file doesn't exist, it will redirect to /dir/subdir/subdir/test.html and then to /dir/subdir/subdir/subdir/test.html and so on. We do this by checking if we will redirect to a subdirectory of the current path, and if the current path already starts with the path we will redirect to.
_will_cause_infinite_redirect
python
readthedocs/readthedocs.org
readthedocs/redirects/models.py
https://github.com/readthedocs/readthedocs.org/blob/master/readthedocs/redirects/models.py
MIT
def forward(apps, schema_editor): """Calculate new ``from_url_without_rest`` attribute.""" Redirect = apps.get_model("redirects", "Redirect") queryset = Redirect.objects.filter( redirect_type="exact", from_url__endswith="$rest", ) for redirect in queryset: redirect.from_url_without_rest = redirect.from_url.replace("$rest", "") or None redirect.save()
Calculate new ``from_url_without_rest`` attribute.
forward
python
readthedocs/readthedocs.org
readthedocs/redirects/migrations/0004_denormalize-from-url.py
https://github.com/readthedocs/readthedocs.org/blob/master/readthedocs/redirects/migrations/0004_denormalize-from-url.py
MIT
def forwards_func(apps, schema_editor): """ Migrate redirects to the new modeling. Migrating the syntax of redirects is done outside the migration, since models from migrations don't have access to some methods required for the migration. """ Redirect = apps.get_model("redirects", "Redirect") Project = apps.get_model("projects", "Project") # Enable all redirects. Redirect.objects.filter(enabled=None).update(enabled=True) # Rename Sphinx redirects. Redirect.objects.filter(redirect_type="sphinx_html").update(redirect_type="clean_url_to_html") Redirect.objects.filter(redirect_type="sphinx_htmldir").update( redirect_type="html_to_clean_url" ) # Set positions with the same order as updated_dt. for project in Project.objects.filter(redirects__isnull=False).distinct(): for i, redirect_pk in enumerate( project.redirects.order_by("-update_dt").values_list("pk", flat=True).all() ): Redirect.objects.filter(pk=redirect_pk).update(position=i)
Migrate redirects to the new modeling. Migrating the syntax of redirects is done outside the migration, since models from migrations don't have access to some methods required for the migration.
forwards_func
python
readthedocs/readthedocs.org
readthedocs/redirects/migrations/0007_migrate_to_new_syntax.py
https://github.com/readthedocs/readthedocs.org/blob/master/readthedocs/redirects/migrations/0007_migrate_to_new_syntax.py
MIT
def lowercase(d): """Convert all dictionary keys to lowercase.""" return {k.lower(): i for k, i in d.items()}
Convert all dictionary keys to lowercase.
_safe_json_loads.lowercase
python
readthedocs/readthedocs.org
readthedocs/telemetry/collectors.py
https://github.com/readthedocs/readthedocs.org/blob/master/readthedocs/telemetry/collectors.py
MIT
def _safe_json_loads(content, default=None): def lowercase(d): """Convert all dictionary keys to lowercase.""" return {k.lower(): i for k, i in d.items()} # pylint: disable=broad-except try: # Use ``object_hook`` parameter to lowercase all the keys of the dictionary. # This helps us to have our data normalized and improve queries. return json.loads(content, object_hook=lowercase) except Exception: log.info( "Error while loading JSON content.", exc_info=True, ) return default
Convert all dictionary keys to lowercase.
_safe_json_loads
python
readthedocs/readthedocs.org
readthedocs/telemetry/collectors.py
https://github.com/readthedocs/readthedocs.org/blob/master/readthedocs/telemetry/collectors.py
MIT
def collect(self): """ Collect all relevant data from the runnig build. Data that can be extracted from the database (project/organization) isn't collected here. """ # NOTE: we could run each command inside a try/except block to have a # more granular protection and be able to save data from those commands # that didn't fail. Otherwise, if one command fails, all the data for # this Build is lost. data = {} data["config"] = {"user": self.config.source_config} data["os"] = self._get_operating_system() data["python"] = self._get_python_version() user_apt_packages, all_apt_packages = self._get_apt_packages() conda_packages = self._get_all_conda_packages() if self.config.is_using_conda else {} data["packages"] = { "pip": { "user": self._get_user_pip_packages(), "all": self._get_all_pip_packages(), }, "conda": { "all": conda_packages, }, "apt": { "user": user_apt_packages, "all": all_apt_packages, }, } data["doctool"] = self._get_doctool() return data
Collect all relevant data from the runnig build. Data that can be extracted from the database (project/organization) isn't collected here.
collect
python
readthedocs/readthedocs.org
readthedocs/telemetry/collectors.py
https://github.com/readthedocs/readthedocs.org/blob/master/readthedocs/telemetry/collectors.py
MIT
def _get_all_conda_packages(self): """ Get all the packages installed by the user using conda. This includes top level and transitive dependencies. The output of ``conda list`` is in the form of:: [ { "base_url": "https://conda.anaconda.org/conda-forge", "build_number": 0, "build_string": "py_0", "channel": "conda-forge", "dist_name": "alabaster-0.7.12-py_0", "name": "alabaster", "platform": "noarch", "version": "0.7.12" }, { "base_url": "https://conda.anaconda.org/conda-forge", "build_number": 0, "build_string": "pyh9f0ad1d_0", "channel": "conda-forge", "dist_name": "asn1crypto-1.4.0-pyh9f0ad1d_0", "name": "asn1crypto", "platform": "noarch", "version": "1.4.0" } ] """ code, stdout, _ = self.run("conda", "list", "--json", "--name", self.version.slug) if code == 0 and stdout: packages = self._safe_json_loads(stdout, []) packages = [ { "name": package["name"], "channel": package["channel"], "version": package["version"], } for package in packages ] return packages return []
Get all the packages installed by the user using conda. This includes top level and transitive dependencies. The output of ``conda list`` is in the form of:: [ { "base_url": "https://conda.anaconda.org/conda-forge", "build_number": 0, "build_string": "py_0", "channel": "conda-forge", "dist_name": "alabaster-0.7.12-py_0", "name": "alabaster", "platform": "noarch", "version": "0.7.12" }, { "base_url": "https://conda.anaconda.org/conda-forge", "build_number": 0, "build_string": "pyh9f0ad1d_0", "channel": "conda-forge", "dist_name": "asn1crypto-1.4.0-pyh9f0ad1d_0", "name": "asn1crypto", "platform": "noarch", "version": "1.4.0" } ]
_get_all_conda_packages
python
readthedocs/readthedocs.org
readthedocs/telemetry/collectors.py
https://github.com/readthedocs/readthedocs.org/blob/master/readthedocs/telemetry/collectors.py
MIT
def _get_user_pip_packages(self): """ Get all the packages to be installed defined by the user. It parses all the requirements files specified in the config file by the user (python.install.requirements) using ``dparse`` --a 3rd party package. If the version of the package is explicit (==) it saves that particular version. Otherwise, if it's not defined, it saves ``undefined`` and if it's a non deterministic operation (like >=, <= or ~=) it saves ``unknown`` in the version. """ results = [] # pylint: disable=too-many-nested-blocks for install in self.config.python.install: if isinstance(install, PythonInstallRequirements): if install.requirements: cmd = ["cat", install.requirements] _, stdout, _ = self.run(*cmd, cwd=self.checkout_path) df = dparse.parse( stdout, file_type=dparse.filetypes.requirements_txt ).serialize() dependencies = df.get("dependencies", []) for requirement in dependencies: name = requirement.get("name", "").lower() if not name: continue # If the user defines a specific version in the # requirements file, we save it Otherwise, we don't # because we don't know which version will be # installed. version = "undefined" specs = str(requirement.get("specs", "")) if specs: if specs.startswith("=="): version = specs.replace("==", "", 1) else: version = "unknown" results.append( { "name": name, "version": version, } ) return results
Get all the packages to be installed defined by the user. It parses all the requirements files specified in the config file by the user (python.install.requirements) using ``dparse`` --a 3rd party package. If the version of the package is explicit (==) it saves that particular version. Otherwise, if it's not defined, it saves ``undefined`` and if it's a non deterministic operation (like >=, <= or ~=) it saves ``unknown`` in the version.
_get_user_pip_packages
python
readthedocs/readthedocs.org
readthedocs/telemetry/collectors.py
https://github.com/readthedocs/readthedocs.org/blob/master/readthedocs/telemetry/collectors.py
MIT
def _get_all_pip_packages(self): """ Get all the packages installed by pip. This includes top level and transitive dependencies. The output of ``pip list`` is in the form of:: [ { "name": "requests-mock", "version": "1.8.0" }, { "name": "requests-toolbelt", "version": "0.9.1" }, { "name": "rstcheck", "version": "3.3.1" }, { "name": "selectolax", "version": "0.2.10" }, { "name": "slumber", "version": "0.7.1" } ] """ cmd = [ "python", "-m", "pip", "list", "--pre", "--local", "--format", "json", ] code, stdout, _ = self.run(*cmd) if code == 0 and stdout: return self._safe_json_loads(stdout, []) return []
Get all the packages installed by pip. This includes top level and transitive dependencies. The output of ``pip list`` is in the form of:: [ { "name": "requests-mock", "version": "1.8.0" }, { "name": "requests-toolbelt", "version": "0.9.1" }, { "name": "rstcheck", "version": "3.3.1" }, { "name": "selectolax", "version": "0.2.10" }, { "name": "slumber", "version": "0.7.1" } ]
_get_all_pip_packages
python
readthedocs/readthedocs.org
readthedocs/telemetry/collectors.py
https://github.com/readthedocs/readthedocs.org/blob/master/readthedocs/telemetry/collectors.py
MIT
def _get_operating_system(self): """ Get the current operating system. The output of ``lsb_release --description`` is in the form of:: Description: Ubuntu 20.04.3 LTS """ code, stdout, _ = self.run("lsb_release", "--description") stdout = stdout.strip() if code == 0 and stdout: parts = stdout.split("\t") if len(parts) == 2: return parts[1] return ""
Get the current operating system. The output of ``lsb_release --description`` is in the form of:: Description: Ubuntu 20.04.3 LTS
_get_operating_system
python
readthedocs/readthedocs.org
readthedocs/telemetry/collectors.py
https://github.com/readthedocs/readthedocs.org/blob/master/readthedocs/telemetry/collectors.py
MIT
def _get_apt_packages(self): """ Get the list of installed apt packages (global and from the user). The current source of user installed packages is the config file, but we have only the name, so we take the version from the list of all installed packages. """ all_apt_packages = self._get_all_apt_packages() all_apt_packages_dict = { package["name"]: package["version"] for package in all_apt_packages } user_apt_packages = self._get_user_apt_packages() for package in user_apt_packages: package["version"] = all_apt_packages_dict.get(package["name"], "") return user_apt_packages, all_apt_packages
Get the list of installed apt packages (global and from the user). The current source of user installed packages is the config file, but we have only the name, so we take the version from the list of all installed packages.
_get_apt_packages
python
readthedocs/readthedocs.org
readthedocs/telemetry/collectors.py
https://github.com/readthedocs/readthedocs.org/blob/master/readthedocs/telemetry/collectors.py
MIT
def _get_all_apt_packages(self): """ Get all installed apt packages and their versions. The output of ``dpkg-query --show`` is the form of:: adduser 3.116ubuntu1 apt 1.6.14 base-files 10.1ubuntu2.11 base-passwd 3.5.44 bash 4.4.18-2ubuntu1.2 bsdutils 1:2.31.1-0.4ubuntu3.7 bzip2 1.0.6-8.1ubuntu0.2 coreutils 8.28-1ubuntu1 dash 0.5.8-2.10 debconf 1.5.66ubuntu1 debianutils 4.8.4 diffutils 1:3.6-1 dpkg 1.19.0.5ubuntu2.3 e2fsprogs 1.44.1-1ubuntu1.3 fdisk 2.31.1-0.4ubuntu3.7 findutils 4.6.0+git+20170828-2 gcc-8-base 8.4.0-1ubuntu1~18.04 gpgv 2.2.4-1ubuntu1.4 grep 3.1-2build1 gzip 1.6-5ubuntu1.2 hostname 3.20 """ code, stdout, _ = self.run( "dpkg-query", "--showformat", "${package} ${version}\\n", "--show" ) stdout = stdout.strip() packages = [] if code != 0 or not stdout: return packages for line in stdout.split("\n"): parts = line.split() if len(parts) == 2: package, version = parts packages.append( { "name": package.lower(), "version": version, } ) return packages
Get all installed apt packages and their versions. The output of ``dpkg-query --show`` is the form of:: adduser 3.116ubuntu1 apt 1.6.14 base-files 10.1ubuntu2.11 base-passwd 3.5.44 bash 4.4.18-2ubuntu1.2 bsdutils 1:2.31.1-0.4ubuntu3.7 bzip2 1.0.6-8.1ubuntu0.2 coreutils 8.28-1ubuntu1 dash 0.5.8-2.10 debconf 1.5.66ubuntu1 debianutils 4.8.4 diffutils 1:3.6-1 dpkg 1.19.0.5ubuntu2.3 e2fsprogs 1.44.1-1ubuntu1.3 fdisk 2.31.1-0.4ubuntu3.7 findutils 4.6.0+git+20170828-2 gcc-8-base 8.4.0-1ubuntu1~18.04 gpgv 2.2.4-1ubuntu1.4 grep 3.1-2build1 gzip 1.6-5ubuntu1.2 hostname 3.20
_get_all_apt_packages
python
readthedocs/readthedocs.org
readthedocs/telemetry/collectors.py
https://github.com/readthedocs/readthedocs.org/blob/master/readthedocs/telemetry/collectors.py
MIT
def _get_python_version(self): """ Get the python version currently used. The output of ``python --version`` is in the form of:: Python 3.8.12 """ code, stdout, _ = self.run("python", "--version") stdout = stdout.strip() if code == 0 and stdout: parts = stdout.split() if len(parts) == 2: return parts[1] return ""
Get the python version currently used. The output of ``python --version`` is in the form of:: Python 3.8.12
_get_python_version
python
readthedocs/readthedocs.org
readthedocs/telemetry/collectors.py
https://github.com/readthedocs/readthedocs.org/blob/master/readthedocs/telemetry/collectors.py
MIT
def save_build_data(build_id, data): """ Save the build data asynchronously. Mainly used from the builders, since they don't have access to the database. """ build = Build.objects.filter(id=build_id).first() if build: BuildData.objects.collect(build, data)
Save the build data asynchronously. Mainly used from the builders, since they don't have access to the database.
save_build_data
python
readthedocs/readthedocs.org
readthedocs/telemetry/tasks.py
https://github.com/readthedocs/readthedocs.org/blob/master/readthedocs/telemetry/tasks.py
MIT
def delete_old_build_data(): """ Delete BuildData models older than ``RTD_TELEMETRY_DATA_RETENTION_DAYS``. This is intended to run from a periodic task daily. NOTE: the logic of this task could be improved to keep longer data we care more (eg. active projects )and remove data we don't (eg. builds from spam projects) """ retention_days = settings.RTD_TELEMETRY_DATA_RETENTION_DAYS days_ago = timezone.now().date() - timezone.timedelta(days=retention_days) # NOTE: we are using raw SQL here to avoid Django doing a SELECT first to # send `pre_` and `post_` delete signals # See https://docs.djangoproject.com/en/4.2/ref/models/querysets/#delete with connections["telemetry"].cursor() as cursor: cursor.execute( # "SELECT COUNT(*) FROM telemetry_builddata WHERE created BETWEEN %s AND %s", "DELETE FROM telemetry_builddata WHERE created BETWEEN %s AND %s", [ days_ago - timezone.timedelta(days=90), days_ago, ], )
Delete BuildData models older than ``RTD_TELEMETRY_DATA_RETENTION_DAYS``. This is intended to run from a periodic task daily. NOTE: the logic of this task could be improved to keep longer data we care more (eg. active projects )and remove data we don't (eg. builds from spam projects)
delete_old_build_data
python
readthedocs/readthedocs.org
readthedocs/telemetry/tasks.py
https://github.com/readthedocs/readthedocs.org/blob/master/readthedocs/telemetry/tasks.py
MIT
def collect(self, build, data): """ Save the collected information from a build. We fill other fields from data we have access to before saving it, like the project, version, organization, etc. The final JSON structure should look like: .. code-block:: json { "os": "ubuntu-18.04.5" "python": "3.10.2", "organization": { "id": 1, "slug": "org" }, "project": { "id": 2, "slug": "docs" }, "version": { "id": 1, "slug": "latest" }, "build": { "id": 3, "start": "2021-04-20-...", # Date in isoformat "length": "600", # Build length in seconds "commit": "abcd1234" "success": true, }, "config": { "user": {}, "final": {} }, "packages": { "pip": { "user": [ { "name": "sphinx", "version": "3.4.5" }, ], "all": [ { "name": "sphinx", "version": "3.4.5" }, ], }, "conda": { "all": [ { "name": "sphinx", "channel": "conda-forge", "version": "0.1" }, ], }, "apt": { "user": [ { "name": "python3-dev", "version": "3.8.2-0ubuntu2" }, ], "all": [ { "name": "python3-dev", "version": "3.8.2-0ubuntu2" }, ], }, }, } """ data["build"] = { "id": build.id, "start": build.date.isoformat(), "length": build.length, "commit": build.commit, "success": build.success, } data["project"] = {"id": build.project.id, "slug": build.project.slug} data["version"] = { "id": build.version.id, "slug": build.version.slug, } org = build.project.organizations.first() if org: data["organization"] = { "id": org.id, "slug": org.slug, } data["config"]["final"] = build.config return self.create(data=data)
Save the collected information from a build. We fill other fields from data we have access to before saving it, like the project, version, organization, etc. The final JSON structure should look like: .. code-block:: json { "os": "ubuntu-18.04.5" "python": "3.10.2", "organization": { "id": 1, "slug": "org" }, "project": { "id": 2, "slug": "docs" }, "version": { "id": 1, "slug": "latest" }, "build": { "id": 3, "start": "2021-04-20-...", # Date in isoformat "length": "600", # Build length in seconds "commit": "abcd1234" "success": true, }, "config": { "user": {}, "final": {} }, "packages": { "pip": { "user": [ { "name": "sphinx", "version": "3.4.5" }, ], "all": [ { "name": "sphinx", "version": "3.4.5" }, ], }, "conda": { "all": [ { "name": "sphinx", "channel": "conda-forge", "version": "0.1" }, ], }, "apt": { "user": [ { "name": "python3-dev", "version": "3.8.2-0ubuntu2" }, ], "all": [ { "name": "python3-dev", "version": "3.8.2-0ubuntu2" }, ], }, }, }
collect
python
readthedocs/readthedocs.org
readthedocs/telemetry/models.py
https://github.com/readthedocs/readthedocs.org/blob/master/readthedocs/telemetry/models.py
MIT
def test_get_all_conda_packages(self, run): out = dedent( """ [ { "base_url": "https://conda.anaconda.org/conda-forge", "build_number": 0, "build_string": "py_0", "channel": "conda-forge", "dist_name": "alabaster-0.7.12-py_0", "name": "alabaster", "platform": "noarch", "version": "0.7.12" }, { "base_url": "https://conda.anaconda.org/conda-forge", "build_number": 0, "build_string": "pyh9f0ad1d_0", "channel": "conda-forge", "dist_name": "asn1crypto-1.4.0-pyh9f0ad1d_0", "name": "asn1crypto", "platform": "noarch", "version": "1.4.0" } ] """ ) run.return_value = (0, out, "") self.assertEqual( self.collector._get_all_conda_packages(), [ { "name": "alabaster", "channel": "conda-forge", "version": "0.7.12", }, { "name": "asn1crypto", "channel": "conda-forge", "version": "1.4.0", }, ], )
[ { "base_url": "https://conda.anaconda.org/conda-forge", "build_number": 0, "build_string": "py_0", "channel": "conda-forge", "dist_name": "alabaster-0.7.12-py_0", "name": "alabaster", "platform": "noarch", "version": "0.7.12" }, { "base_url": "https://conda.anaconda.org/conda-forge", "build_number": 0, "build_string": "pyh9f0ad1d_0", "channel": "conda-forge", "dist_name": "asn1crypto-1.4.0-pyh9f0ad1d_0", "name": "asn1crypto", "platform": "noarch", "version": "1.4.0" } ]
test_get_all_conda_packages
python
readthedocs/readthedocs.org
readthedocs/telemetry/tests/test_collectors.py
https://github.com/readthedocs/readthedocs.org/blob/master/readthedocs/telemetry/tests/test_collectors.py
MIT
def test_get_user_pip_packages(self, run): self.collector.config = get_build_config( {"python": {"install": [{"requirements": "docs/requirements.txt"}]}} ) self.collector.config.validate() out = dedent( """ requests-mock==1.8.0 requests-toolbelt==0.9.1 rstcheck==3.3.1 Sphinx>=5 # >= specs requests # no specs """ ) run.return_value = (0, out, "") self.assertEqual( self.collector._get_user_pip_packages(), [ {"name": "requests-mock", "version": "1.8.0"}, {"name": "requests-toolbelt", "version": "0.9.1"}, {"name": "rstcheck", "version": "3.3.1"}, {"name": "sphinx", "version": "unknown"}, # >= specs {"name": "requests", "version": "undefined"}, # no specs ], )
requests-mock==1.8.0 requests-toolbelt==0.9.1 rstcheck==3.3.1 Sphinx>=5 # >= specs requests # no specs
test_get_user_pip_packages
python
readthedocs/readthedocs.org
readthedocs/telemetry/tests/test_collectors.py
https://github.com/readthedocs/readthedocs.org/blob/master/readthedocs/telemetry/tests/test_collectors.py
MIT
def test_get_all_pip_packages(self, run): out = dedent( """ [ { "name": "requests-mock", "version": "1.8.0" }, { "name": "requests-toolbelt", "version": "0.9.1" }, { "name": "rstcheck", "version": "3.3.1" } ] """ ) run.return_value = (0, out, "") self.assertEqual( self.collector._get_all_pip_packages(), [ {"name": "requests-mock", "version": "1.8.0"}, {"name": "requests-toolbelt", "version": "0.9.1"}, {"name": "rstcheck", "version": "3.3.1"}, ], )
[ { "name": "requests-mock", "version": "1.8.0" }, { "name": "requests-toolbelt", "version": "0.9.1" }, { "name": "rstcheck", "version": "3.3.1" } ]
test_get_all_pip_packages
python
readthedocs/readthedocs.org
readthedocs/telemetry/tests/test_collectors.py
https://github.com/readthedocs/readthedocs.org/blob/master/readthedocs/telemetry/tests/test_collectors.py
MIT
def test_get_all_apt_packages(self, run): out = dedent( """ apt 1.6.14 base-files 10.1ubuntu2.11 base-passwd 3.5.44 bash 4.4.18-2ubuntu1.2 bsdutils 1:2.31.1-0.4ubuntu3.7 coreutils 8.28-1ubuntu1 """ ) run.return_value = (0, out, "") self.assertEqual( self.collector._get_all_apt_packages(), [ { "name": "apt", "version": "1.6.14", }, { "name": "base-files", "version": "10.1ubuntu2.11", }, { "name": "base-passwd", "version": "3.5.44", }, { "name": "bash", "version": "4.4.18-2ubuntu1.2", }, { "name": "bsdutils", "version": "1:2.31.1-0.4ubuntu3.7", }, { "name": "coreutils", "version": "8.28-1ubuntu1", }, ], )
apt 1.6.14 base-files 10.1ubuntu2.11 base-passwd 3.5.44 bash 4.4.18-2ubuntu1.2 bsdutils 1:2.31.1-0.4ubuntu3.7 coreutils 8.28-1ubuntu1
test_get_all_apt_packages
python
readthedocs/readthedocs.org
readthedocs/telemetry/tests/test_collectors.py
https://github.com/readthedocs/readthedocs.org/blob/master/readthedocs/telemetry/tests/test_collectors.py
MIT
def _get_cache_tags(self): """ Get cache tags for this view. .. warning:: This method is run at the end of the request, so any exceptions like 404 should be caught. """ try: project = self._get_project() version = self._get_version() except Exception: log.warning( "Error while retrieving project or version for this view.", exc_info=True, ) return [] tags = [] if project: tags.append(project.slug) if project and version: tags.append(get_cache_tag(project.slug, version.slug)) if project and self.project_cache_tag: tags.append(get_cache_tag(project.slug, self.project_cache_tag)) return tags
Get cache tags for this view. .. warning:: This method is run at the end of the request, so any exceptions like 404 should be caught.
_get_cache_tags
python
readthedocs/readthedocs.org
readthedocs/api/mixins.py
https://github.com/readthedocs/readthedocs.org/blob/master/readthedocs/api/mixins.py
MIT
def get_environment_variables(self, obj): """Get all environment variables, including public ones.""" return { variable.name: { "value": variable.value, "public": variable.public, } for variable in obj.environmentvariable_set.all() }
Get all environment variables, including public ones.
get_environment_variables
python
readthedocs/readthedocs.org
readthedocs/api/v2/serializers.py
https://github.com/readthedocs/readthedocs.org/blob/master/readthedocs/api/v2/serializers.py
MIT
def get_skip(self, obj): """ Override ``Project.skip`` to consider more cases whether skip a project. We rely on ``.is_active`` manager's method here that encapsulates all these possible cases. """ return not Project.objects.is_active(obj)
Override ``Project.skip`` to consider more cases whether skip a project. We rely on ``.is_active`` manager's method here that encapsulates all these possible cases.
get_skip
python
readthedocs/readthedocs.org
readthedocs/api/v2/serializers.py
https://github.com/readthedocs/readthedocs.org/blob/master/readthedocs/api/v2/serializers.py
MIT
def _get_project_serialized(self, obj): """Get a serialized project from the cache or create a new one.""" project = obj.project project_serialized = self._serialized_projects_cache.get(project.id) if project_serialized: return project_serialized self._serialized_projects_cache[project.id] = self.project_serializer_class(project) return self._serialized_projects_cache[project.id]
Get a serialized project from the cache or create a new one.
_get_project_serialized
python
readthedocs/readthedocs.org
readthedocs/api/v2/serializers.py
https://github.com/readthedocs/readthedocs.org/blob/master/readthedocs/api/v2/serializers.py
MIT
def sync_versions_to_db(project, versions, type): """ Update the database with the current versions from the repository. - check if user has a ``stable`` / ``latest`` version and disable ours - update old versions with newer configs (identifier, type, machine) - create new versions that do not exist on DB (in bulk) - it does not delete versions :param project: project to update versions :param versions: list of VCSVersion fetched from the repository :param type: internal or external version :returns: set of versions' slug added """ old_version_values = project.versions.filter(type=type).values_list( "verbose_name", "identifier", ) old_versions = dict(old_version_values) # Add new versions versions_to_create = [] added = set() has_user_stable = False has_user_latest = False for version in versions: version_id = version["identifier"] version_name = version["verbose_name"] if version_name == STABLE_VERBOSE_NAME: has_user_stable = True created_version, created = _set_or_create_version( project=project, slug=STABLE, version_id=version_id, verbose_name=version_name, type_=type, ) if created: added.add(created_version.slug) elif version_name == LATEST_VERBOSE_NAME: has_user_latest = True created_version, created = _set_or_create_version( project=project, slug=LATEST, version_id=version_id, verbose_name=version_name, type_=type, ) if created: added.add(created_version.slug) elif version_name in old_versions: if version_id == old_versions[version_name]: # Version is correct continue # Update slug with new identifier Version.objects.filter( project=project, verbose_name=version_name, # Always filter by type, a tag and a branch # can share the same verbose_name. type=type, ).update( identifier=version_id, machine=False, ) log.info( "Re-syncing versions: version updated.", version_verbose_name=version_name, version_id=version_id, ) else: # New Version versions_to_create.append((version_id, version_name)) added.update(_create_versions(project, type, versions_to_create)) if not has_user_stable: stable_version = project.versions.filter(slug=STABLE, type=type).first() if stable_version: # Put back the RTD's stable version stable_version.machine = True stable_version.save() if not has_user_latest: latest_version = project.versions.filter(slug=LATEST, type=type).first() if latest_version: # Put back the RTD's latest version latest_version.machine = True latest_version.save() if added: log.info( "Re-syncing versions: versions added.", count=len(added), versions=",".join(itertools.islice(added, 100)), ) return added
Update the database with the current versions from the repository. - check if user has a ``stable`` / ``latest`` version and disable ours - update old versions with newer configs (identifier, type, machine) - create new versions that do not exist on DB (in bulk) - it does not delete versions :param project: project to update versions :param versions: list of VCSVersion fetched from the repository :param type: internal or external version :returns: set of versions' slug added
sync_versions_to_db
python
readthedocs/readthedocs.org
readthedocs/api/v2/utils.py
https://github.com/readthedocs/readthedocs.org/blob/master/readthedocs/api/v2/utils.py
MIT
def _create_versions(project, type, versions): """ Create versions (tuple of version_id and version_name). Returns the slug of all added versions. .. note:: ``Version.slug`` relies on the post_save signal, so we can't use bulk_create. """ versions_objs = ( Version( project=project, type=type, identifier=version_id, verbose_name=version_name, ) for version_id, version_name in versions ) added = set() for version in versions_objs: version.save() added.add(version.slug) return added
Create versions (tuple of version_id and version_name). Returns the slug of all added versions. .. note:: ``Version.slug`` relies on the post_save signal, so we can't use bulk_create.
_create_versions
python
readthedocs/readthedocs.org
readthedocs/api/v2/utils.py
https://github.com/readthedocs/readthedocs.org/blob/master/readthedocs/api/v2/utils.py
MIT
def _set_or_create_version(project, slug, version_id, verbose_name, type_): """Search or create a version and set its machine attribute to false.""" version = project.versions.filter(slug=slug).first() if version: version.identifier = version_id version.machine = False version.type = type_ version.save() else: created_version = Version.objects.create( project=project, type=type_, identifier=version_id, verbose_name=verbose_name, ) return created_version, True return version, False
Search or create a version and set its machine attribute to false.
_set_or_create_version
python
readthedocs/readthedocs.org
readthedocs/api/v2/utils.py
https://github.com/readthedocs/readthedocs.org/blob/master/readthedocs/api/v2/utils.py
MIT
def delete_versions_from_db(project, tags_data, branches_data): """ Delete all versions not in the current repo. :returns: The slug of the deleted versions from the database. """ to_delete_qs = _get_deleted_versions_qs( project=project, tags_data=tags_data, branches_data=branches_data, ).exclude(active=True) _, deleted = to_delete_qs.delete() versions_count = deleted.get("builds.Version", 0) log.info( "Re-syncing versions: versions deleted.", project_slug=project.slug, count=versions_count, )
Delete all versions not in the current repo. :returns: The slug of the deleted versions from the database.
delete_versions_from_db
python
readthedocs/readthedocs.org
readthedocs/api/v2/utils.py
https://github.com/readthedocs/readthedocs.org/blob/master/readthedocs/api/v2/utils.py
MIT
def get_deleted_active_versions(project, tags_data, branches_data): """Return the slug of active versions that were deleted from the repository.""" to_delete_qs = _get_deleted_versions_qs( project=project, tags_data=tags_data, branches_data=branches_data, ).filter(active=True) return set(to_delete_qs.values_list("slug", flat=True))
Return the slug of active versions that were deleted from the repository.
get_deleted_active_versions
python
readthedocs/readthedocs.org
readthedocs/api/v2/utils.py
https://github.com/readthedocs/readthedocs.org/blob/master/readthedocs/api/v2/utils.py
MIT
def run_automation_rules(project, added_versions, deleted_active_versions): """ Runs the automation rules on each version. The rules are sorted by priority. :param added_versions: Slugs of versions that were added. :param deleted_active_versions: Slugs of active versions that were deleted from the repository. .. note:: Currently the versions aren't sorted in any way, the same order is keeped. """ class_ = RegexAutomationRule actions = [ (added_versions, class_.allowed_actions_on_create), (deleted_active_versions, class_.allowed_actions_on_delete), ] for versions_slug, allowed_actions in actions: versions = project.versions.filter(slug__in=versions_slug) rules = project.automation_rules.filter(action__in=allowed_actions) for version, rule in itertools.product(versions, rules): rule.run(version)
Runs the automation rules on each version. The rules are sorted by priority. :param added_versions: Slugs of versions that were added. :param deleted_active_versions: Slugs of active versions that were deleted from the repository. .. note:: Currently the versions aren't sorted in any way, the same order is keeped.
run_automation_rules
python
readthedocs/readthedocs.org
readthedocs/api/v2/utils.py
https://github.com/readthedocs/readthedocs.org/blob/master/readthedocs/api/v2/utils.py
MIT
def normalize_build_command(command, project_slug, version_slug): """ Sanitize the build command to be shown to users. It removes internal variables and long paths to make them nicer. """ docroot = settings.DOCROOT.rstrip("/") # remove trailing '/' # Remove Docker hash from DOCROOT when running it locally # DOCROOT contains the Docker container hash (e.g. b7703d1b5854). # We have to remove it from the DOCROOT it self since it changes each time # we spin up a new Docker instance locally. container_hash = "/" if settings.RTD_DOCKER_COMPOSE: docroot = re.sub("/[0-9a-z]+/?$", "", settings.DOCROOT, count=1) container_hash = "/([0-9a-z]+/)?" regex = f"{docroot}{container_hash}{project_slug}/envs/{version_slug}(/bin/)?" command = re.sub(regex, "", command, count=1) # Remove explicit variable names we use to run commands, # since users don't care about these. regex = r"^\$READTHEDOCS_VIRTUALENV_PATH/bin/" command = re.sub(regex, "", command, count=1) regex = r"^\$CONDA_ENVS_PATH/\$CONDA_DEFAULT_ENV/bin/" command = re.sub(regex, "", command, count=1) return command
Sanitize the build command to be shown to users. It removes internal variables and long paths to make them nicer.
normalize_build_command
python
readthedocs/readthedocs.org
readthedocs/api/v2/utils.py
https://github.com/readthedocs/readthedocs.org/blob/master/readthedocs/api/v2/utils.py
MIT
def dumps(self, data): """Used to be able to render datetime objects.""" return JSONRenderer().render(data)
Used to be able to render datetime objects.
dumps
python
readthedocs/readthedocs.org
readthedocs/api/v2/client.py
https://github.com/readthedocs/readthedocs.org/blob/master/readthedocs/api/v2/client.py
MIT
def delete_old_revoked_build_api_keys(days=15): """ Delete revoked and expired keys that are older than x days. We don't delete keys created in the last 15 days, to have some audit trail in case we need to investigate something. """ created_before = timezone.now() - timedelta(days=days) to_delete = BuildAPIKey.objects.filter(revoked=True, created__lt=created_before) log.info("Deleting revoked keys", count=to_delete.count()) to_delete.delete() to_delete = BuildAPIKey.objects.filter( expiry_date__lt=timezone.now(), created__lt=created_before ) log.info("Deleting expired keys", count=to_delete.count()) to_delete.delete()
Delete revoked and expired keys that are older than x days. We don't delete keys created in the last 15 days, to have some audit trail in case we need to investigate something.
delete_old_revoked_build_api_keys
python
readthedocs/readthedocs.org
readthedocs/api/v2/tasks.py
https://github.com/readthedocs/readthedocs.org/blob/master/readthedocs/api/v2/tasks.py
MIT
def create_key(self, project): """ Create a new API key for a project. Build API keys are valid for 3 hours, and can be revoked at any time by hitting the /api/v2/revoke/ endpoint. """ expiry_date = timezone.now() + timedelta(hours=3) name_max_length = self.model._meta.get_field("name").max_length return super().create_key( # Name is required, so we use the project slug for it. name=project.slug[:name_max_length], expiry_date=expiry_date, project=project, )
Create a new API key for a project. Build API keys are valid for 3 hours, and can be revoked at any time by hitting the /api/v2/revoke/ endpoint.
create_key
python
readthedocs/readthedocs.org
readthedocs/api/v2/models.py
https://github.com/readthedocs/readthedocs.org/blob/master/readthedocs/api/v2/models.py
MIT
def get_queryset_for_api_key(self, api_key): """Queryset used when an API key is used in the request.""" raise NotImplementedError
Queryset used when an API key is used in the request.
get_queryset_for_api_key
python
readthedocs/readthedocs.org
readthedocs/api/v2/views/model_views.py
https://github.com/readthedocs/readthedocs.org/blob/master/readthedocs/api/v2/views/model_views.py
MIT
def get_queryset(self): """ Filter objects by user or API key. If an API key is present, we filter by the project associated with the key. Otherwise, we filter using our API manager method. With this we check if the user/api key is authorized to acccess the object. """ api_key = getattr(self.request, "build_api_key", None) if api_key: return self.get_queryset_for_api_key(api_key) return self.model.objects.api_v2(self.request.user)
Filter objects by user or API key. If an API key is present, we filter by the project associated with the key. Otherwise, we filter using our API manager method. With this we check if the user/api key is authorized to acccess the object.
get_queryset
python
readthedocs/readthedocs.org
readthedocs/api/v2/views/model_views.py
https://github.com/readthedocs/readthedocs.org/blob/master/readthedocs/api/v2/views/model_views.py
MIT
def get_serializer_class(self): """ Return the proper serializer for UI and Admin. This ViewSet has a sligtly different pattern since we want to pre-process the `command` field before returning it to the user, and we also want to have a specific serializer for admins. """ if self.request.build_api_key: # Logic copied from `UserSelectViewSet.get_serializer_class` # and extended to choose serializer from self.action if self.action not in ["list", "retrieve"]: return BuildAdminSerializer # Staff write-only return BuildAdminReadOnlySerializer # Staff read-only return BuildSerializer # Non-staff
Return the proper serializer for UI and Admin. This ViewSet has a sligtly different pattern since we want to pre-process the `command` field before returning it to the user, and we also want to have a specific serializer for admins.
get_serializer_class
python
readthedocs/readthedocs.org
readthedocs/api/v2/views/model_views.py
https://github.com/readthedocs/readthedocs.org/blob/master/readthedocs/api/v2/views/model_views.py
MIT
def retrieve(self, *args, **kwargs): """ Retrieves command data from storage. This uses files from storage to get the JSON, and replaces the ``commands`` part of the response data. """ if not settings.RTD_SAVE_BUILD_COMMANDS_TO_STORAGE: return super().retrieve(*args, **kwargs) instance = self.get_object() serializer = self.get_serializer(instance) data = serializer.data if instance.cold_storage: storage_path = "{date}/{id}.json".format( date=str(instance.date.date()), id=instance.id, ) if build_commands_storage.exists(storage_path): try: json_resp = build_commands_storage.open(storage_path).read() data["commands"] = json.loads(json_resp) # Normalize commands in the same way than when returning # them using the serializer for buildcommand in data["commands"]: buildcommand["command"] = normalize_build_command( buildcommand["command"], instance.project.slug, instance.version.slug, ) except Exception: log.exception( "Failed to read build data from storage.", path=storage_path, ) return Response(data)
Retrieves command data from storage. This uses files from storage to get the JSON, and replaces the ``commands`` part of the response data.
retrieve
python
readthedocs/readthedocs.org
readthedocs/api/v2/views/model_views.py
https://github.com/readthedocs/readthedocs.org/blob/master/readthedocs/api/v2/views/model_views.py
MIT
def reset(self, request, **kwargs): """Reset the build so it can be re-used when re-trying.""" instance = self.get_object() instance.reset() return Response(status=status.HTTP_204_NO_CONTENT)
Reset the build so it can be re-used when re-trying.
reset
python
readthedocs/readthedocs.org
readthedocs/api/v2/views/model_views.py
https://github.com/readthedocs/readthedocs.org/blob/master/readthedocs/api/v2/views/model_views.py
MIT
def perform_create(self, serializer): """Restrict creation to builds attached to the project from the api key.""" build_pk = serializer.validated_data["build"].pk build_api_key = self.request.build_api_key if not build_api_key.project.builds.filter(pk=build_pk).exists(): raise PermissionDenied() if BuildCommandResult.objects.filter( build=serializer.validated_data["build"], start_time=serializer.validated_data["start_time"], ).exists(): log.warning("Build command is duplicated. Skipping...") return return super().perform_create(serializer)
Restrict creation to builds attached to the project from the api key.
perform_create
python
readthedocs/readthedocs.org
readthedocs/api/v2/views/model_views.py
https://github.com/readthedocs/readthedocs.org/blob/master/readthedocs/api/v2/views/model_views.py
MIT
def perform_create(self, serializer): """Restrict creation to notifications attached to the project's builds from the api key.""" attached_to = serializer.validated_data["attached_to"] build_api_key = self.request.build_api_key project_slug = None if isinstance(attached_to, Build): project_slug = attached_to.project.slug elif isinstance(attached_to, Project): project_slug = attached_to.slug # Limit the permissions to create a notification on this object only if the API key # is attached to the related project if not project_slug or build_api_key.project.slug != project_slug: raise PermissionDenied() return super().perform_create(serializer)
Restrict creation to notifications attached to the project's builds from the api key.
perform_create
python
readthedocs/readthedocs.org
readthedocs/api/v2/views/model_views.py
https://github.com/readthedocs/readthedocs.org/blob/master/readthedocs/api/v2/views/model_views.py
MIT
def post(self, request, project_slug): """Set up webhook post view with request and project objects.""" self.request = request log.bind( project_slug=project_slug, integration_type=self.integration_type, ) # WARNING: this is a hack to allow us access to `request.body` later. # Due to a limitation of DRF, we can't access `request.body` # after accessing `request.data`. # By accessing `request.body` we are able to access `request.body` and # `request.data` later without any problem (mostly black magic). # See #4940 for more background. self.request.body # noqa self.project = None self.data = self.get_data() try: self.project = self.get_project(slug=project_slug) if not Project.objects.is_active(self.project): resp = {"detail": "This project is currently disabled"} return Response(resp, status=status.HTTP_406_NOT_ACCEPTABLE) except Project.DoesNotExist as exc: raise NotFound("Project not found") from exc # Webhooks without a secret are no longer permitted. # https://blog.readthedocs.com/security-update-on-incoming-webhooks/. if not self.has_secret(): return Response( {"detail": self.missing_secret_deprecated_msg}, status=HTTP_400_BAD_REQUEST, ) if not self.is_payload_valid(): log.warning("Invalid payload for project and integration.") return Response( {"detail": self.invalid_payload_msg}, status=HTTP_400_BAD_REQUEST, ) resp = self.handle_webhook() if resp is None: log.info("Unhandled webhook event") resp = {"detail": "Unhandled webhook event"} # The response can be a DRF Response with with the status code already set. # In that case, we just return it as is. if isinstance(resp, Response): return resp return Response(resp)
Set up webhook post view with request and project objects.
post
python
readthedocs/readthedocs.org
readthedocs/api/v2/views/integrations.py
https://github.com/readthedocs/readthedocs.org/blob/master/readthedocs/api/v2/views/integrations.py
MIT
def finalize_response(self, req, *args, **kwargs): """If the project was set on POST, store an HTTP exchange.""" resp = super().finalize_response(req, *args, **kwargs) if hasattr(self, "project") and self.project: HttpExchange.objects.from_exchange( req, resp, related_object=self.get_integration(), payload=self.data, ) return resp
If the project was set on POST, store an HTTP exchange.
finalize_response
python
readthedocs/readthedocs.org
readthedocs/api/v2/views/integrations.py
https://github.com/readthedocs/readthedocs.org/blob/master/readthedocs/api/v2/views/integrations.py
MIT
def get_data(self): """ Normalize posted data. This can be overridden to support multiples content types. """ return self.request.data
Normalize posted data. This can be overridden to support multiples content types.
get_data
python
readthedocs/readthedocs.org
readthedocs/api/v2/views/integrations.py
https://github.com/readthedocs/readthedocs.org/blob/master/readthedocs/api/v2/views/integrations.py
MIT
def handle_webhook(self): """Handle webhook payload.""" raise NotImplementedError
Handle webhook payload.
handle_webhook
python
readthedocs/readthedocs.org
readthedocs/api/v2/views/integrations.py
https://github.com/readthedocs/readthedocs.org/blob/master/readthedocs/api/v2/views/integrations.py
MIT
def get_external_version_data(self): """Get External Version data from payload.""" raise NotImplementedError
Get External Version data from payload.
get_external_version_data
python
readthedocs/readthedocs.org
readthedocs/api/v2/views/integrations.py
https://github.com/readthedocs/readthedocs.org/blob/master/readthedocs/api/v2/views/integrations.py
MIT
def is_payload_valid(self): """Validates the webhook's payload using the integration's secret.""" return False
Validates the webhook's payload using the integration's secret.
is_payload_valid
python
readthedocs/readthedocs.org
readthedocs/api/v2/views/integrations.py
https://github.com/readthedocs/readthedocs.org/blob/master/readthedocs/api/v2/views/integrations.py
MIT
def get_digest(secret, msg): """Get a HMAC digest of `msg` using `secret`.""" digest = hmac.new( secret.encode(), msg=msg.encode(), digestmod=hashlib.sha256, ) return digest.hexdigest()
Get a HMAC digest of `msg` using `secret`.
get_digest
python
readthedocs/readthedocs.org
readthedocs/api/v2/views/integrations.py
https://github.com/readthedocs/readthedocs.org/blob/master/readthedocs/api/v2/views/integrations.py
MIT
def get_integration(self): """ Get or create an inbound webhook to track webhook requests. Most providers don't pass the webhook ID in either, so we default to just finding *any* integration from the provider. This is not ideal, but the :py:class:`WebhookView` view solves this by performing a lookup on the integration instead of guessing. """ # `integration` can be passed in as an argument to `as_view`, as it is # in `WebhookView` if self.integration is not None: return self.integration self.integration = get_object_or_404( Integration, project=self.project, integration_type=self.integration_type, ) return self.integration
Get or create an inbound webhook to track webhook requests. Most providers don't pass the webhook ID in either, so we default to just finding *any* integration from the provider. This is not ideal, but the :py:class:`WebhookView` view solves this by performing a lookup on the integration instead of guessing.
get_integration
python
readthedocs/readthedocs.org
readthedocs/api/v2/views/integrations.py
https://github.com/readthedocs/readthedocs.org/blob/master/readthedocs/api/v2/views/integrations.py
MIT
def get_response_push(self, project, branches): """ Build branches on push events and return API response. Return a JSON response with the following:: { "build_triggered": true, "project": "project_name", "versions": [...] } :param project: Project instance :type project: Project :param branches: List of branch/tag names to build :type branches: list(str) """ to_build, not_building = build_branches(project, branches) if not_building: log.info( "Skipping project branches.", branches=branches, ) triggered = bool(to_build) return { "build_triggered": triggered, "project": project.slug, "versions": list(to_build), }
Build branches on push events and return API response. Return a JSON response with the following:: { "build_triggered": true, "project": "project_name", "versions": [...] } :param project: Project instance :type project: Project :param branches: List of branch/tag names to build :type branches: list(str)
get_response_push
python
readthedocs/readthedocs.org
readthedocs/api/v2/views/integrations.py
https://github.com/readthedocs/readthedocs.org/blob/master/readthedocs/api/v2/views/integrations.py
MIT
def sync_versions_response(self, project, sync=True): """ Trigger a sync and returns a response indicating if the build was triggered or not. If `sync` is False, the sync isn't triggered and a response indicating so is returned. """ version = None if sync: version = trigger_sync_versions(project) return { "build_triggered": False, "project": project.slug, "versions": [version] if version else [], "versions_synced": version is not None, }
Trigger a sync and returns a response indicating if the build was triggered or not. If `sync` is False, the sync isn't triggered and a response indicating so is returned.
sync_versions_response
python
readthedocs/readthedocs.org
readthedocs/api/v2/views/integrations.py
https://github.com/readthedocs/readthedocs.org/blob/master/readthedocs/api/v2/views/integrations.py
MIT
def get_external_version_response(self, project): """ Trigger builds for External versions on pull/merge request events and return API response. Return a JSON response with the following:: { "build_triggered": true, "project": "project_name", "versions": [verbose_name] } :param project: Project instance :type project: readthedocs.projects.models.Project """ version_data = self.get_external_version_data() # create or get external version object using `verbose_name`. external_version = get_or_create_external_version( project=project, version_data=version_data, ) # returns external version verbose_name (pull/merge request number) to_build = build_external_version( project=project, version=external_version, ) return { "build_triggered": bool(to_build), "project": project.slug, "versions": [to_build] if to_build else [], }
Trigger builds for External versions on pull/merge request events and return API response. Return a JSON response with the following:: { "build_triggered": true, "project": "project_name", "versions": [verbose_name] } :param project: Project instance :type project: readthedocs.projects.models.Project
get_external_version_response
python
readthedocs/readthedocs.org
readthedocs/api/v2/views/integrations.py
https://github.com/readthedocs/readthedocs.org/blob/master/readthedocs/api/v2/views/integrations.py
MIT
def get_closed_external_version_response(self, project): """ Close the external version on merge/close events and return the API response. Return a JSON response with the following:: { "closed": true, "project": "project_name", "versions": [verbose_name] } :param project: Project instance :type project: Project """ version_data = self.get_external_version_data() version_closed = close_external_version( project=project, version_data=version_data, ) return { "closed": bool(version_closed), "project": project.slug, "versions": [version_closed] if version_closed else [], }
Close the external version on merge/close events and return the API response. Return a JSON response with the following:: { "closed": true, "project": "project_name", "versions": [verbose_name] } :param project: Project instance :type project: Project
get_closed_external_version_response
python
readthedocs/readthedocs.org
readthedocs/api/v2/views/integrations.py
https://github.com/readthedocs/readthedocs.org/blob/master/readthedocs/api/v2/views/integrations.py
MIT
def update_default_branch(self, default_branch): """ Update the `Version.identifer` for `latest` with the VCS's `default_branch`. The VCS's `default_branch` is the branch cloned when there is no specific branch specified (e.g. `git clone <URL>`). Some VCS providers (GitHub and GitLab) send the `default_branch` via incoming webhooks. We use that data to update our database and keep it in sync. This solves the problem about "changing the default branch in GitHub" and also importing repositories with a different `default_branch` than `main` manually: https://github.com/readthedocs/readthedocs.org/issues/9367 In case the user already selected a `default-branch` from the "Advanced settings", it does not override it. This action can be performed only if the integration has a secret, requests from anonymous users are ignored. """ if self.get_integration().secret and not self.project.default_branch: # Always check for the machine attribute, since latest can be user created. # RTD doesn't manage those. self.project.versions.filter(slug=LATEST, machine=True).update( identifier=default_branch )
Update the `Version.identifer` for `latest` with the VCS's `default_branch`. The VCS's `default_branch` is the branch cloned when there is no specific branch specified (e.g. `git clone <URL>`). Some VCS providers (GitHub and GitLab) send the `default_branch` via incoming webhooks. We use that data to update our database and keep it in sync. This solves the problem about "changing the default branch in GitHub" and also importing repositories with a different `default_branch` than `main` manually: https://github.com/readthedocs/readthedocs.org/issues/9367 In case the user already selected a `default-branch` from the "Advanced settings", it does not override it. This action can be performed only if the integration has a secret, requests from anonymous users are ignored.
update_default_branch
python
readthedocs/readthedocs.org
readthedocs/api/v2/views/integrations.py
https://github.com/readthedocs/readthedocs.org/blob/master/readthedocs/api/v2/views/integrations.py
MIT
def get_external_version_data(self): """Get Commit Sha and pull request number from payload.""" try: data = ExternalVersionData( id=str(self.data["number"]), commit=self.data["pull_request"]["head"]["sha"], source_branch=self.data["pull_request"]["head"]["ref"], base_branch=self.data["pull_request"]["base"]["ref"], ) return data except KeyError as e: key = e.args[0] raise ParseError(f"Invalid payload. {key} is required.") from e
Get Commit Sha and pull request number from payload.
get_external_version_data
python
readthedocs/readthedocs.org
readthedocs/api/v2/views/integrations.py
https://github.com/readthedocs/readthedocs.org/blob/master/readthedocs/api/v2/views/integrations.py
MIT
def is_payload_valid(self): """ GitHub use a HMAC hexdigest hash to sign the payload. It is sent in the request's header. See https://developer.github.com/webhooks/securing/. """ signature = self.request.headers.get(GITHUB_SIGNATURE_HEADER) if not signature: return False secret = self.get_integration().secret msg = self.request.body.decode() digest = WebhookMixin.get_digest(secret, msg) result = hmac.compare_digest( b"sha256=" + digest.encode(), signature.encode(), ) return result
GitHub use a HMAC hexdigest hash to sign the payload. It is sent in the request's header. See https://developer.github.com/webhooks/securing/.
is_payload_valid
python
readthedocs/readthedocs.org
readthedocs/api/v2/views/integrations.py
https://github.com/readthedocs/readthedocs.org/blob/master/readthedocs/api/v2/views/integrations.py
MIT
def handle_webhook(self): """ Handle GitHub webhook events. It checks for all the events we support currently: - PUSH: Triggered on a push to a repository branch. Branch pushes and repository tag pushes also trigger webhook push events. .. note:: ``created`` and ``deleted`` indicate if the push was a branch/tag created or deleted. This is required for old webhook created at Read the Docs that do not register the ``create`` and ``delete`` events. Newer webhooks created on Read the Docs, will trigger a PUSH+created=True **and** a CREATE event. We need to handle this in a specific way to not trigger the sync twice. - CREATE: Represents a created branch or tag. - DELETE: Represents a deleted branch or tag. - PULL_REQUEST: Triggered when a pull request is assigned, unassigned, labeled, unlabeled, opened, edited, closed, reopened, synchronize, ready_for_review, locked, unlocked or when a pull request review is requested or removed (``action`` will contain this data) See https://developer.github.com/v3/activity/events/types/ """ # Get event and trigger other webhook events action = self.data.get("action", None) created = self.data.get("created", False) deleted = self.data.get("deleted", False) event = self.request.headers.get(GITHUB_EVENT_HEADER, GITHUB_PUSH) log.bind(webhook_event=event) webhook_github.send( Project, project=self.project, data=self.data, event=event, ) # Always update `latest` branch to point to the default branch in the repository # even if the event is not gonna be handled. This helps us to keep our db in sync. default_branch = self.data.get("repository", {}).get("default_branch", None) if default_branch: self.update_default_branch(default_branch) if event == GITHUB_PING: return {"detail": "Webhook configured correctly"} # Sync versions when a branch/tag was created/deleted if event in (GITHUB_CREATE, GITHUB_DELETE): log.debug("Triggered sync_versions.") return self.sync_versions_response(self.project) integration = self.get_integration() # Handle pull request events. if self.project.external_builds_enabled and event == GITHUB_PULL_REQUEST: if action in [ GITHUB_PULL_REQUEST_OPENED, GITHUB_PULL_REQUEST_REOPENED, GITHUB_PULL_REQUEST_SYNC, ]: # Trigger a build when PR is opened/reopened/sync return self.get_external_version_response(self.project) if action == GITHUB_PULL_REQUEST_CLOSED: # Delete external version when PR is closed return self.get_closed_external_version_response(self.project) # Sync versions when push event is created/deleted action if all( [ event == GITHUB_PUSH, (created or deleted), ] ): events = ( integration.provider_data.get("events", []) if integration.provider_data else [] ) # noqa if any( [ GITHUB_CREATE in events, GITHUB_DELETE in events, ] ): # GitHub will send PUSH **and** CREATE/DELETE events on a creation/deletion in newer # webhooks. If we receive a PUSH event we need to check if the webhook doesn't # already have the CREATE/DELETE events. So we don't trigger the sync twice. return self.sync_versions_response(self.project, sync=False) log.debug( "Triggered sync_versions.", integration_events=events, ) return self.sync_versions_response(self.project) # Trigger a build for all branches in the push if event == GITHUB_PUSH: try: branch = self._normalize_ref(self.data["ref"]) return self.get_response_push(self.project, [branch]) except KeyError as exc: raise ParseError('Parameter "ref" is required') from exc return None
Handle GitHub webhook events. It checks for all the events we support currently: - PUSH: Triggered on a push to a repository branch. Branch pushes and repository tag pushes also trigger webhook push events. .. note:: ``created`` and ``deleted`` indicate if the push was a branch/tag created or deleted. This is required for old webhook created at Read the Docs that do not register the ``create`` and ``delete`` events. Newer webhooks created on Read the Docs, will trigger a PUSH+created=True **and** a CREATE event. We need to handle this in a specific way to not trigger the sync twice. - CREATE: Represents a created branch or tag. - DELETE: Represents a deleted branch or tag. - PULL_REQUEST: Triggered when a pull request is assigned, unassigned, labeled, unlabeled, opened, edited, closed, reopened, synchronize, ready_for_review, locked, unlocked or when a pull request review is requested or removed (``action`` will contain this data) See https://developer.github.com/v3/activity/events/types/
handle_webhook
python
readthedocs/readthedocs.org
readthedocs/api/v2/views/integrations.py
https://github.com/readthedocs/readthedocs.org/blob/master/readthedocs/api/v2/views/integrations.py
MIT
def _normalize_ref(self, ref): """Remove `ref/(heads|tags)/` from the reference to match a Version on the db.""" pattern = re.compile(r"^refs/(heads|tags)/") return pattern.sub("", ref)
Remove `ref/(heads|tags)/` from the reference to match a Version on the db.
_normalize_ref
python
readthedocs/readthedocs.org
readthedocs/api/v2/views/integrations.py
https://github.com/readthedocs/readthedocs.org/blob/master/readthedocs/api/v2/views/integrations.py
MIT
def is_payload_valid(self): """ GitLab only sends back the token from the webhook. It is sent in the request's header. See https://docs.gitlab.com/ee/user/project/integrations/webhooks.html#secret-token. """ token = self.request.headers.get(GITLAB_TOKEN_HEADER, "") if not token: return False secret = self.get_integration().secret return constant_time_compare(secret, token)
GitLab only sends back the token from the webhook. It is sent in the request's header. See https://docs.gitlab.com/ee/user/project/integrations/webhooks.html#secret-token.
is_payload_valid
python
readthedocs/readthedocs.org
readthedocs/api/v2/views/integrations.py
https://github.com/readthedocs/readthedocs.org/blob/master/readthedocs/api/v2/views/integrations.py
MIT
def get_external_version_data(self): """Get commit SHA and merge request number from payload.""" try: data = ExternalVersionData( id=str(self.data["object_attributes"]["iid"]), commit=self.data["object_attributes"]["last_commit"]["id"], source_branch=self.data["object_attributes"]["source_branch"], base_branch=self.data["object_attributes"]["target_branch"], ) return data except KeyError as e: key = e.args[0] raise ParseError(f"Invalid payload. {key} is required.") from e
Get commit SHA and merge request number from payload.
get_external_version_data
python
readthedocs/readthedocs.org
readthedocs/api/v2/views/integrations.py
https://github.com/readthedocs/readthedocs.org/blob/master/readthedocs/api/v2/views/integrations.py
MIT
def handle_webhook(self): """ Handle GitLab events for push and tag_push. GitLab doesn't have a separate event for creation/deletion, instead, it sets the before/after field to 0000000000000000000000000000000000000000 ('0' * 40) """ event = self.request.data.get("object_kind", GITLAB_PUSH) action = self.data.get("object_attributes", {}).get("action", None) log.bind(webhook_event=event) webhook_gitlab.send( Project, project=self.project, data=self.request.data, event=event, ) # Always update `latest` branch to point to the default branch in the repository # even if the event is not gonna be handled. This helps us to keep our db in sync. default_branch = self.data.get("project", {}).get("default_branch", None) if default_branch: self.update_default_branch(default_branch) # Handle push events and trigger builds if event in (GITLAB_PUSH, GITLAB_TAG_PUSH): data = self.request.data before = data.get("before") after = data.get("after") # Tag/branch created/deleted if GITLAB_NULL_HASH in (before, after): log.debug( "Triggered sync_versions.", before=before, after=after, ) return self.sync_versions_response(self.project) # Normal push to master try: branch = self._normalize_ref(data["ref"]) return self.get_response_push(self.project, [branch]) except KeyError as exc: raise ParseError('Parameter "ref" is required') from exc if self.project.external_builds_enabled and event == GITLAB_MERGE_REQUEST: if action in [ GITLAB_MERGE_REQUEST_OPEN, GITLAB_MERGE_REQUEST_REOPEN, GITLAB_MERGE_REQUEST_UPDATE, ]: # Handle open, update, reopen merge_request event. return self.get_external_version_response(self.project) if action in [GITLAB_MERGE_REQUEST_CLOSE, GITLAB_MERGE_REQUEST_MERGE]: # Handle merge and close merge_request event. return self.get_closed_external_version_response(self.project) return None
Handle GitLab events for push and tag_push. GitLab doesn't have a separate event for creation/deletion, instead, it sets the before/after field to 0000000000000000000000000000000000000000 ('0' * 40)
handle_webhook
python
readthedocs/readthedocs.org
readthedocs/api/v2/views/integrations.py
https://github.com/readthedocs/readthedocs.org/blob/master/readthedocs/api/v2/views/integrations.py
MIT
def handle_webhook(self): """ Handle Bitbucket events for push. Bitbucket doesn't have a separate event for creation/deletion, instead it sets the new attribute (null if it is a deletion) and the old attribute (null if it is a creation). """ event = self.request.headers.get(BITBUCKET_EVENT_HEADER, BITBUCKET_PUSH) log.bind(webhook_event=event) webhook_bitbucket.send( Project, project=self.project, data=self.request.data, event=event, ) # NOTE: we can't call `self.update_default_branch` here because # BitBucket does not tell us what is the `default_branch` for a # repository in these incoming webhooks. if event == BITBUCKET_PUSH: try: data = self.request.data changes = data["push"]["changes"] branches = [] for change in changes: old = change["old"] new = change["new"] # Normal push to master if old is not None and new is not None: branches.append(new["name"]) # BitBuck returns an array of changes rather than # one webhook per change. If we have at least one normal push # we don't trigger the sync versions, because that # will be triggered with the normal push. if branches: return self.get_response_push( self.project, branches, ) log.debug("Triggered sync_versions.") return self.sync_versions_response(self.project) except KeyError as exc: raise ParseError("Invalid request") from exc return None
Handle Bitbucket events for push. Bitbucket doesn't have a separate event for creation/deletion, instead it sets the new attribute (null if it is a deletion) and the old attribute (null if it is a creation).
handle_webhook
python
readthedocs/readthedocs.org
readthedocs/api/v2/views/integrations.py
https://github.com/readthedocs/readthedocs.org/blob/master/readthedocs/api/v2/views/integrations.py
MIT
def is_payload_valid(self): """ BitBucket use a HMAC hexdigest hash to sign the payload. It is sent in the request's header. See https://support.atlassian.com/bitbucket-cloud/docs/manage-webhooks/#Secure-webhooks. """ signature = self.request.headers.get(BITBUCKET_SIGNATURE_HEADER) if not signature: return False secret = self.get_integration().secret msg = self.request.body.decode() digest = WebhookMixin.get_digest(secret, msg) result = hmac.compare_digest( b"sha256=" + digest.encode(), signature.encode(), ) return result
BitBucket use a HMAC hexdigest hash to sign the payload. It is sent in the request's header. See https://support.atlassian.com/bitbucket-cloud/docs/manage-webhooks/#Secure-webhooks.
is_payload_valid
python
readthedocs/readthedocs.org
readthedocs/api/v2/views/integrations.py
https://github.com/readthedocs/readthedocs.org/blob/master/readthedocs/api/v2/views/integrations.py
MIT
def get_project(self, **kwargs): """ Get authenticated user projects, or token authed projects. Allow for a user to either be authed to receive a project, or require the integration token to be specified as a POST argument. """ # If the user is not an admin of the project, fall back to token auth if self.request.user.is_authenticated: try: return Project.objects.for_admin_user( self.request.user, ).get(**kwargs) except Project.DoesNotExist: pass # Recheck project and integration relationship during token auth check token = self.request.data.get("token") if token: integration = self.get_integration() obj = Project.objects.get(**kwargs) is_valid = integration.project == obj and constant_time_compare( token, getattr(integration, "token", "") ) if is_valid: return obj raise Project.DoesNotExist()
Get authenticated user projects, or token authed projects. Allow for a user to either be authed to receive a project, or require the integration token to be specified as a POST argument.
get_project
python
readthedocs/readthedocs.org
readthedocs/api/v2/views/integrations.py
https://github.com/readthedocs/readthedocs.org/blob/master/readthedocs/api/v2/views/integrations.py
MIT
def is_payload_valid(self): """ Generic webhooks don't have payload validation. We use basic auth or token auth to validate that the user has access to the project and integration (get_project() method). """ return True
Generic webhooks don't have payload validation. We use basic auth or token auth to validate that the user has access to the project and integration (get_project() method).
is_payload_valid
python
readthedocs/readthedocs.org
readthedocs/api/v2/views/integrations.py
https://github.com/readthedocs/readthedocs.org/blob/master/readthedocs/api/v2/views/integrations.py
MIT
def post(self, request, project_slug, integration_pk): """Set up webhook post view with request and project objects.""" # WARNING: this is a hack to allow us access to `request.body` later. # Due to a limitation of DRF, we can't access `request.body` # after accessing `request.data`. # By accessing `request.body` we are able to access `request.body` and # `request.data` later without any problem (mostly black magic). # See #4940 for more background. request.body # noqa integration = get_object_or_404( Integration, project__slug=project_slug, pk=integration_pk, ) view_cls = self.VIEW_MAP[integration.integration_type] view = view_cls.as_view(integration=integration) # DRF uses ``rest_framework.request.Request`` and Django expects # ``django.http.HttpRequest`` # https://www.django-rest-framework.org/api-guide/requests/ # https://github.com/encode/django-rest-framework/pull/5771#issuecomment-362815342 return view(request._request, project_slug)
Set up webhook post view with request and project objects.
post
python
readthedocs/readthedocs.org
readthedocs/api/v2/views/integrations.py
https://github.com/readthedocs/readthedocs.org/blob/master/readthedocs/api/v2/views/integrations.py
MIT
def job_status(request, task_id): """Retrieve Celery task function state from frontend.""" # HACK: always poll up to N times and after that return the sync has # finished. This is a way to avoid re-enabling Celery result backend for now. # TODO remove this API and RemoteRepo sync UI when we have better auto syncing poll_n = cache.get(task_id, 0) poll_n += 1 cache.set(task_id, poll_n, 5 * 60) finished = poll_n == 5 data = { "name": "sync_remote_repositories", "data": {}, "started": True, "finished": finished, "success": finished, } return Response(data)
Retrieve Celery task function state from frontend.
job_status
python
readthedocs/readthedocs.org
readthedocs/api/v2/views/task_views.py
https://github.com/readthedocs/readthedocs.org/blob/master/readthedocs/api/v2/views/task_views.py
MIT
def sync_remote_repositories(request): """Trigger a re-sync of remote repositories for the user.""" result = tasks.sync_remote_repositories.delay( user_id=request.user.id, ) task_id = result.task_id return Response( { "task_id": task_id, "url": reverse("api_job_status", kwargs={"task_id": task_id}), } )
Trigger a re-sync of remote repositories for the user.
sync_remote_repositories
python
readthedocs/readthedocs.org
readthedocs/api/v2/views/task_views.py
https://github.com/readthedocs/readthedocs.org/blob/master/readthedocs/api/v2/views/task_views.py
MIT
def docurl(request): """ Get the url that a slug resolves to. Example:: GET https://readthedocs.org/api/v2/docurl/? project=requests& version=latest& doc=index& path=index.html """ project = request.GET.get("project") version = request.GET.get("version", LATEST) doc = request.GET.get("doc", "index") path = request.GET.get("path", "") if project is None: return Response( {"error": "Need project and doc"}, status=status.HTTP_400_BAD_REQUEST, ) project = get_object_or_404(Project, slug=project) version = get_object_or_404( Version.objects.public(request.user, project=project, only_active=False), slug=version, ) return Response( { "url": make_document_url( project=project, version=version.slug, page=doc, path=path, ), } )
Get the url that a slug resolves to. Example:: GET https://readthedocs.org/api/v2/docurl/? project=requests& version=latest& doc=index& path=index.html
docurl
python
readthedocs/readthedocs.org
readthedocs/api/v2/views/core_views.py
https://github.com/readthedocs/readthedocs.org/blob/master/readthedocs/api/v2/views/core_views.py
MIT
def get_success(self, obj): """ Return ``None`` if the build is not finished. This is needed because ``default=True`` in the model field. """ if obj.finished: return obj.success return None
Return ``None`` if the build is not finished. This is needed because ``default=True`` in the model field.
get_success
python
readthedocs/readthedocs.org
readthedocs/api/v3/serializers.py
https://github.com/readthedocs/readthedocs.org/blob/master/readthedocs/api/v3/serializers.py
MIT
def _validate_remote_repository(self, data): """ Validate connection between `Project` and `RemoteRepository`. We don't do anything in community, but we do ensure this relationship is posible before creating the `Project` on commercial when the organization has VCS SSO enabled. If we cannot ensure the relationship here, this method should raise a `ValidationError`. """
Validate connection between `Project` and `RemoteRepository`. We don't do anything in community, but we do ensure this relationship is posible before creating the `Project` on commercial when the organization has VCS SSO enabled. If we cannot ensure the relationship here, this method should raise a `ValidationError`.
_validate_remote_repository
python
readthedocs/readthedocs.org
readthedocs/api/v3/serializers.py
https://github.com/readthedocs/readthedocs.org/blob/master/readthedocs/api/v3/serializers.py
MIT
def get_serializer_class(self): """ Return correct serializer depending on the action. For GET it returns a serializer with many fields and on PUT/PATCH/POST, it return a serializer to validate just a few fields. """ if self.action in ("list", "retrieve"): return ProjectSerializer if self.action == "create": return ProjectCreateSerializer if self.action in ("update", "partial_update"): return ProjectUpdateSerializer # Default serializer so that sync_versions works with the BrowseableAPI return ProjectSerializer
Return correct serializer depending on the action. For GET it returns a serializer with many fields and on PUT/PATCH/POST, it return a serializer to validate just a few fields.
get_serializer_class
python
readthedocs/readthedocs.org
readthedocs/api/v3/views.py
https://github.com/readthedocs/readthedocs.org/blob/master/readthedocs/api/v3/views.py
MIT
def create(self, request, *args, **kwargs): """ Import Project. Override to use a different serializer in the response, since it's a different format than the one used for the request. """ serializer = self.get_serializer(data=request.data) serializer.is_valid(raise_exception=True) self.perform_create(serializer) headers = self.get_success_headers(serializer.data) # Use a serializer that fully renders a Project, # instead of the one used for the request. serializer = ProjectSerializer(instance=serializer.instance) return Response(serializer.data, status=status.HTTP_201_CREATED, headers=headers)
Import Project. Override to use a different serializer in the response, since it's a different format than the one used for the request.
create
python
readthedocs/readthedocs.org
readthedocs/api/v3/views.py
https://github.com/readthedocs/readthedocs.org/blob/master/readthedocs/api/v3/views.py
MIT
def perform_create(self, serializer): """ Import Project. Trigger our internal mechanism to import a project after it's saved in the database. """ project = super().perform_create(serializer) self.finish_import_project(self.request, project)
Import Project. Trigger our internal mechanism to import a project after it's saved in the database.
perform_create
python
readthedocs/readthedocs.org
readthedocs/api/v3/views.py
https://github.com/readthedocs/readthedocs.org/blob/master/readthedocs/api/v3/views.py
MIT
def superproject(self, request, project_slug): """Return the superproject of a ``Project``.""" superproject = self._get_superproject() if not superproject: return Response(status=status.HTTP_404_NOT_FOUND) data = ProjectSerializer(superproject).data return Response(data)
Return the superproject of a ``Project``.
superproject
python
readthedocs/readthedocs.org
readthedocs/api/v3/views.py
https://github.com/readthedocs/readthedocs.org/blob/master/readthedocs/api/v3/views.py
MIT
def _get_superproject(self): """Get the superproject of the project, taking into consideration the user permissions.""" project = self.get_object() return self.get_queryset().filter(subprojects__child=project).first()
Get the superproject of the project, taking into consideration the user permissions.
_get_superproject
python
readthedocs/readthedocs.org
readthedocs/api/v3/views.py
https://github.com/readthedocs/readthedocs.org/blob/master/readthedocs/api/v3/views.py
MIT
def sync_versions(self, request, project_slug): """ Kick off a task to sync versions for a project. POST to this endpoint to trigger a task that syncs versions for the project. This will be used in a button in the frontend, but also can be used to trigger a sync from the API. """ project = self.get_object() triggered = trigger_sync_versions(project) data = {} if triggered: data.update({"triggered": True}) code = status.HTTP_202_ACCEPTED else: data.update({"triggered": False}) code = status.HTTP_400_BAD_REQUEST return Response(data=data, status=code)
Kick off a task to sync versions for a project. POST to this endpoint to trigger a task that syncs versions for the project. This will be used in a button in the frontend, but also can be used to trigger a sync from the API.
sync_versions
python
readthedocs/readthedocs.org
readthedocs/api/v3/views.py
https://github.com/readthedocs/readthedocs.org/blob/master/readthedocs/api/v3/views.py
MIT
def get_serializer_class(self): """ Return correct serializer depending on the action. For GET it returns a serializer with many fields and on POST, it return a serializer to validate just a few fields. """ if self.action == "create": return SubprojectCreateSerializer if self.action == "destroy": return SubprojectDestroySerializer return SubprojectSerializer
Return correct serializer depending on the action. For GET it returns a serializer with many fields and on POST, it return a serializer to validate just a few fields.
get_serializer_class
python
readthedocs/readthedocs.org
readthedocs/api/v3/views.py
https://github.com/readthedocs/readthedocs.org/blob/master/readthedocs/api/v3/views.py
MIT
def create(self, request, *args, **kwargs): """Define a Project as subproject of another Project.""" parent = self._get_parent_project() serializer = self.get_serializer(data=request.data) serializer.is_valid(raise_exception=True) serializer.save(parent=parent) headers = self.get_success_headers(serializer.data) # Use serializer that fully render a the subproject serializer = SubprojectSerializer(instance=serializer.instance) return Response(serializer.data, status=status.HTTP_201_CREATED, headers=headers)
Define a Project as subproject of another Project.
create
python
readthedocs/readthedocs.org
readthedocs/api/v3/views.py
https://github.com/readthedocs/readthedocs.org/blob/master/readthedocs/api/v3/views.py
MIT
def get_serializer_class(self): """ Return correct serializer depending on the action. For GET it returns a serializer with many fields and on PUT/PATCH/POST, it return a serializer to validate just a few fields. """ if self.action in ("list", "retrieve"): return VersionSerializer return VersionUpdateSerializer
Return correct serializer depending on the action. For GET it returns a serializer with many fields and on PUT/PATCH/POST, it return a serializer to validate just a few fields.
get_serializer_class
python
readthedocs/readthedocs.org
readthedocs/api/v3/views.py
https://github.com/readthedocs/readthedocs.org/blob/master/readthedocs/api/v3/views.py
MIT
def update(self, request, *args, **kwargs): """Overridden to call ``post_save`` method on the updated version.""" # Get the current value before updating. version = self.get_object() was_active = version.active result = super().update(request, *args, **kwargs) # Get the updated version. version = self.get_object() version.post_save(was_active=was_active) return result
Overridden to call ``post_save`` method on the updated version.
update
python
readthedocs/readthedocs.org
readthedocs/api/v3/views.py
https://github.com/readthedocs/readthedocs.org/blob/master/readthedocs/api/v3/views.py
MIT
def get_queryset(self): """Overridden to allow internal versions only.""" return super().get_queryset().exclude(type=EXTERNAL)
Overridden to allow internal versions only.
get_queryset
python
readthedocs/readthedocs.org
readthedocs/api/v3/views.py
https://github.com/readthedocs/readthedocs.org/blob/master/readthedocs/api/v3/views.py
MIT
def render(self, data, accepted_media_type=None, renderer_context=None): """ Copied from ``rest_framework.renders.JSONRenderer``. Changes: - sort_keys=True on json.dumps - use str instead of six.text_types https://github.com/encode/django-rest-framework/blob/b7523f4/rest_framework/renderers.py#L84 """ if data is None: return bytes() renderer_context = renderer_context or {} indent = self.get_indent(accepted_media_type, renderer_context) if indent is None: separators = SHORT_SEPARATORS if self.compact else LONG_SEPARATORS else: separators = INDENT_SEPARATORS ret = json.dumps( data, cls=self.encoder_class, indent=indent, ensure_ascii=self.ensure_ascii, allow_nan=not self.strict, separators=separators, sort_keys=True, ) if isinstance(ret, str): # We always fully escape \u2028 and \u2029 to ensure we output JSON # that is a strict javascript subset. If bytes were returned # by json.dumps() then we don't have these characters in any case. # See: http://timelessrepo.com/json-isnt-a-javascript-subset ret = ret.replace("\u2028", "\\u2028").replace("\u2029", "\\u2029") return bytes(ret.encode("utf-8")) return ret
Copied from ``rest_framework.renders.JSONRenderer``. Changes: - sort_keys=True on json.dumps - use str instead of six.text_types https://github.com/encode/django-rest-framework/blob/b7523f4/rest_framework/renderers.py#L84
render
python
readthedocs/readthedocs.org
readthedocs/api/v3/renderers.py
https://github.com/readthedocs/readthedocs.org/blob/master/readthedocs/api/v3/renderers.py
MIT
def _get_parent_build(self): """ Filter the build by the permissions of the current user. Build permissions depend not only on the project, but also on the version, Build.objects.api takes all that into consideration. """ project_slug = self._get_parent_object_lookup(self.PROJECT_LOOKUP_NAMES) build_pk = self._get_parent_object_lookup(self.BUILD_LOOKUP_NAMES) return get_object_or_404( Build.objects.api(user=self.request.user), pk=build_pk, project__slug=project_slug, )
Filter the build by the permissions of the current user. Build permissions depend not only on the project, but also on the version, Build.objects.api takes all that into consideration.
_get_parent_build
python
readthedocs/readthedocs.org
readthedocs/api/v3/mixins.py
https://github.com/readthedocs/readthedocs.org/blob/master/readthedocs/api/v3/mixins.py
MIT
def get_queryset(self): """Filter projects or related resources based on the permissions of the current user.""" return self.model.objects.api(user=self.request.user)
Filter projects or related resources based on the permissions of the current user.
get_queryset
python
readthedocs/readthedocs.org
readthedocs/api/v3/mixins.py
https://github.com/readthedocs/readthedocs.org/blob/master/readthedocs/api/v3/mixins.py
MIT
def has_admin_permission(self, user, organization): """Check if user is an owner of the organization.""" if self.admin_organizations(user).filter(pk=organization.pk).exists(): return True return False
Check if user is an owner of the organization.
has_admin_permission
python
readthedocs/readthedocs.org
readthedocs/api/v3/mixins.py
https://github.com/readthedocs/readthedocs.org/blob/master/readthedocs/api/v3/mixins.py
MIT
def is_admin_member(self, user, organization): """Check if user is an owner or belongs to a team with admin permissions of the organization.""" if self.has_admin_permission(user, organization): return True return ( Project.objects.for_admin_user(user=user) .filter(organizations__in=[organization]) .exists() )
Check if user is an owner or belongs to a team with admin permissions of the organization.
is_admin_member
python
readthedocs/readthedocs.org
readthedocs/api/v3/mixins.py
https://github.com/readthedocs/readthedocs.org/blob/master/readthedocs/api/v3/mixins.py
MIT
def get_queryset(self): """Filter organizations or related resources based on the permissions of the current user.""" return self.model.objects.api(user=self.request.user)
Filter organizations or related resources based on the permissions of the current user.
get_queryset
python
readthedocs/readthedocs.org
readthedocs/api/v3/mixins.py
https://github.com/readthedocs/readthedocs.org/blob/master/readthedocs/api/v3/mixins.py
MIT
def test_users_notifications_list_with_email_like_username(self): """Test for #11260.""" self.me.username = "[email protected]" self.me.save() url = reverse( "users-notifications-list", kwargs={ "parent_lookup_user__username": self.me.username, }, ) self.client.credentials(HTTP_AUTHORIZATION=f"Token {self.token.key}") response = self.client.get(url) self.assertEqual(response.status_code, 200)
Test for #11260.
test_users_notifications_list_with_email_like_username
python
readthedocs/readthedocs.org
readthedocs/api/v3/tests/test_users.py
https://github.com/readthedocs/readthedocs.org/blob/master/readthedocs/api/v3/tests/test_users.py
MIT
def test_external_version_projects_versions_builds_list_post(self): """Build starts using last commit against external version.""" self.version.type = EXTERNAL self.build.commit = "d4e5f6" self.version.save() self.build.save() url = reverse( "projects-versions-builds-list", kwargs={ "parent_lookup_project__slug": self.project.slug, "parent_lookup_version__slug": self.version.slug, }, ) self.client.credentials(HTTP_AUTHORIZATION=f"Token {self.token.key}") self.assertEqual(self.project.builds.count(), 1) response = self.client.post(url) self.assertEqual(response.status_code, 202) self.assertEqual(self.project.builds.count(), 2) response_json = response.json() response_json["build"]["created"] = "2019-04-29T14:00:00Z" expected = self._get_response_dict("projects-versions-builds-list_POST") expected["build"]["commit"] = "d4e5f6" expected["version"]["type"] = "external" expected["version"]["urls"][ "documentation" ] = "http://project--v1.0.external-builds.readthedocs.io/en/v1.0/" expected["version"]["urls"]["vcs"] = "https://github.com/rtfd/project/pull/v1.0" self.assertDictEqual(response_json, expected)
Build starts using last commit against external version.
test_external_version_projects_versions_builds_list_post
python
readthedocs/readthedocs.org
readthedocs/api/v3/tests/test_builds.py
https://github.com/readthedocs/readthedocs.org/blob/master/readthedocs/api/v3/tests/test_builds.py
MIT
def _create_new_project(self): """Helper to create a project with all the fields set.""" return fixture.get( Project, pub_date=self.created, modified_date=self.modified, description="Project description", repo="https://github.com/rtfd/project", project_url="http://project.com", name="new-project", slug="new-project", related_projects=[], main_language_project=None, users=[self.me], versions=[], external_builds_privacy_level=PUBLIC, privacy_level=PUBLIC, )
Helper to create a project with all the fields set.
_create_new_project
python
readthedocs/readthedocs.org
readthedocs/api/v3/tests/mixins.py
https://github.com/readthedocs/readthedocs.org/blob/master/readthedocs/api/v3/tests/mixins.py
MIT
def _create_subproject(self): """Helper to create a sub-project with all the fields set.""" self.subproject = fixture.get( Project, pub_date=self.created, modified_date=self.modified, description="SubProject description", repo="https://github.com/rtfd/subproject", project_url="http://subproject.com", name="subproject", slug="subproject", related_projects=[], main_language_project=None, users=[self.me], versions=[], external_builds_privacy_level=PUBLIC, privacy_level=PUBLIC, ) self.project_relationship = self.project.add_subproject(self.subproject)
Helper to create a sub-project with all the fields set.
_create_subproject
python
readthedocs/readthedocs.org
readthedocs/api/v3/tests/mixins.py
https://github.com/readthedocs/readthedocs.org/blob/master/readthedocs/api/v3/tests/mixins.py
MIT
def assertDictEqual(self, d1, d2): """ Show the differences between the dicts in a human readable way. It's just a helper for debugging API responses. """ message = "" try: import datadiff message = datadiff.diff(d1, d2) except ImportError: pass return super().assertDictEqual(d1, d2, message)
Show the differences between the dicts in a human readable way. It's just a helper for debugging API responses.
assertDictEqual
python
readthedocs/readthedocs.org
readthedocs/api/v3/tests/mixins.py
https://github.com/readthedocs/readthedocs.org/blob/master/readthedocs/api/v3/tests/mixins.py
MIT
def check_domains_limit(project, error_class=ValidationError): """Check if the project has reached the limit on the number of domains.""" feature = get_feature(project, TYPE_CNAME) if feature.unlimited: return if project.domains.count() >= feature.value: msg = _( f"This project has reached the limit of {feature.value} domains." " Consider removing unused domains." ) if settings.ALLOW_PRIVATE_REPOS: msg = _( f"Your organization has reached the limit of {feature.value} domains." " Consider removing unused domains or upgrading your plan." ) raise error_class(msg)
Check if the project has reached the limit on the number of domains.
check_domains_limit
python
readthedocs/readthedocs.org
readthedocs/domains/validators.py
https://github.com/readthedocs/readthedocs.org/blob/master/readthedocs/domains/validators.py
MIT
def expired(self, when=None): """ Return domains that have their validation process expired. :param when: If given, return domains that expired on this date only. """ queryset = self.exclude(Q(ssl_status=SSL_STATUS_VALID) | Q(skip_validation=True)) if when: start_date = when - timedelta(days=settings.RTD_CUSTOM_DOMAINS_VALIDATION_PERIOD) queryset = queryset.filter(validation_process_start__date=start_date) else: max_date = timezone.now() - timedelta( days=settings.RTD_CUSTOM_DOMAINS_VALIDATION_PERIOD ) queryset = queryset.filter(validation_process_start__date__lte=max_date) return queryset
Return domains that have their validation process expired. :param when: If given, return domains that expired on this date only.
expired
python
readthedocs/readthedocs.org
readthedocs/domains/querysets.py
https://github.com/readthedocs/readthedocs.org/blob/master/readthedocs/domains/querysets.py
MIT