text
stringlengths 1
2.05k
|
---|
on(e)
if "The workflow run containing this job is already running" in str(e):
pass
else:
raise e
def comment_failure(self, msg: str, exceptions: Union[Exception, List[Exception]]):
if not isinstance(exceptions, list):
exceptions = [exceptions]
logging.info(f"Failed, commenting {exceptions}")
for item in exceptions:
try:
raise item
except Exception:
item.exception_msg = traceback.format_exc()
comment = f"{msg} in {args.run_url}\n\n"
for exception in exceptions:
comment += f"<details>\n\n```\n{exception.exception_msg}\n```\n\n"
if hasattr(exception, "read"):
comment += f"with response\n\n```\n{exception.read().decode()}\n```\n\n"
comment += "</details>"
pr.comment(comment)
pr.has_error = True
return exception
def check_author(pr, triggering_comment, args):
comment_author = triggering_comment["user"]["login"]
if pr.author() == comment_author:
logging.info("Comment user is PR author, continuing")
return True
return False
def search_users(name, triggering_comment, testing_json, search_fn):
logging.info(f"Checking {name}")
commment_author = triggering_comment["user"]["login"]
if testing_json:
matching_users = json.loads(testing_json)
else:
matching_users = search_fn(commment_author)
logging.info(f"Found {name}: {matching_users}")
user_names = {user["login"] for user in matching_users}
return len(matching_users) > 0 and commment_author in user_names
def check_collaborator(pr, triggering_comment, args):
return search_users(
name="collaborators",
triggering_comment=triggering_comment,
search_fn=pr.search_collaborator,
testing_json=args.testing_collaborators_json,
)
def check_me |
ntionable_users(pr, triggering_comment, args):
return search_users(
name="mentionable users",
triggering_comment=triggering_comment,
search_fn=pr.search_mentionable_users,
testing_json=args.testing_mentionable_users_json,
)
AUTH_CHECKS = {
"metionable_users": check_mentionable_users,
"collaborators": check_collaborator,
"author": check_author,
}
AUTH_CHECKS = {k: (k, v) for k, v in AUTH_CHECKS.items()}
class Merge:
triggers = [
"merge",
"merge this",
"merge this pr",
]
auth = [AUTH_CHECKS["collaborators"], AUTH_CHECKS["author"]]
@staticmethod
def run(pr: PR):
info = None
try:
info = pr.merge_if_passed_checks()
except Exception as e:
pr.comment_failure("Failed to process merge request", e)
raise e
if info is not None:
try:
pr.trigger_gha_ci(sha=info["sha"])
except Exception as e:
pr.comment_failure("Failed to trigger GitHub Actions", e)
raise e
class Rerun:
triggers = [
"rerun",
"rerun ci",
"re-run",
"re-run ci",
"run",
"run ci",
]
auth = [AUTH_CHECKS["metionable_users"]]
@staticmethod
def run(pr: PR):
errors = []
try:
pr.rerun_jenkins_ci()
except Exception as e:
errors.append(e)
try:
pr.rerun_github_actions()
except Exception as e:
errors.append(e)
if len(errors) > 0:
pr.comment_failure("Failed to re-run CI", errors)
if __name__ == "__main__":
help = "Check if a PR has comments trying to merge it, and do so based on reviews/CI status"
parser = argparse.ArgumentParser(description=help)
parser.add_argument("--remote", default="origin", help="ssh remote to parse")
parser.add_argument("--pr", required=True, help="pr number to check")
parser.add_argument("--run-url", required=Tr |
ue, help="workflow run URL")
parser.add_argument(
"--trigger-comment-json", required=True, help="json of the comment that triggered this run"
)
parser.add_argument("--testing-pr-json", help="(testing only) manual data for testing")
parser.add_argument(
"--testing-collaborators-json", help="(testing only) manual data for testing"
)
parser.add_argument(
"--testing-mentionable-users-json", help="(testing only) manual data for testing"
)
parser.add_argument(
"--dry-run",
action="store_true",
default=False,
help="run but don't send any request to GitHub",
)
args = parser.parse_args()
init_log()
comment = json.loads(args.trigger_comment_json)
body = comment["body"].strip()
if not body.startswith("@tvm-bot "):
logging.info(f"Not a bot comment, '{body}' does not start with '@tvm-bot'")
exit(0)
user_command = body.lstrip("@tvm-bot").strip()
command_to_run = None
for command in [Merge, Rerun]:
if user_command in command.triggers:
command_to_run = command
break
if command_to_run is None:
logging.info(f"Command '{user_command}' did not match anything")
exit(0)
remote = git(["config", "--get", f"remote.{args.remote}.url"])
logging.info(f"Using remote remote={remote}")
owner, repo = parse_remote(remote)
if args.pr.strip() == "":
logging.info("No PR number passed")
exit(0)
logging.info(f"Checking owner={owner} repo={repo}")
if args.testing_pr_json:
pr = PR(
number=int(args.pr),
owner=owner,
repo=repo,
dry_run=args.dry_run,
raw_data=json.loads(args.testing_pr_json),
)
else:
pr = PR(number=int(args.pr), owner=owner, repo=repo, dry_run=args.dry_run)
for name, check in command_to_run.auth:
if check(pr, comment, args):
logging.info(f"Passed auth check '{name}', continuing") |
break
else:
logging.info(f"Failed auth check '{name}', quitting")
pr.react(comment, "confused")
exit(0)
pr.react(comment, "+1")
state = pr.state()
if state != "OPEN":
logging.info(f"Ignoring event on PR, state was not OPEN, instead was state={state}")
exit(0)
command_to_run.run(pr)
if pr.has_error:
raise RuntimeError("PR commented a failure") |
#!/usr/bin/env python3
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import json
import logging
from urllib import request
from typing import Dict, Any, Optional
def get(url: str, headers: Optional[Dict[str, str]] = None) -> Dict[str, Any]:
logging.info(f"Requesting GET to {url}")
if headers is None:
headers = {}
req = request.Request(url, headers=headers)
with request.urlopen(req) as response:
response_headers = {k: v for k, v in response.getheaders()}
response = json.loads(response.read())
return response, response_headers
|
import argparse |
import re |
import logging |
import datetime |
import os |
import json |
import re
from urllib |
import error
from typing |
import List, Dict, Any, Optional, Callable
from git_utils |
import git, parse_remote, GitHubRepo
from cmd_utils |
import REPO_ROOT, init_log, Sh
from should_rebuild_docker |
import docker_api
JENKINSFILE = REPO_ROOT / "ci" / "jenkins" / "Jenkinsfile.j2"
GENERATED_JENKINSFILE = REPO_ROOT / "Jenkinsfile"
GENERATE_SCRIPT = REPO_ROOT / "ci" / "jenkins" / "generate.py"
GITHUB_TOKEN = os.environ["GITHUB_TOKEN"]
BRANCH = "nightly-docker-update"
def _testing_docker_api(data: Dict[str, Any]) -> Callable[[str], Dict[str, Any]]:
"""Returns a function that can be used in place of docker_api"""
def mock(url: str) -> Dict[str, Any]:
if url in data:
return data[url]
else:
raise error.HTTPError(url, 404, f"Not found: {url}", {}, None)
return mock
def parse_docker_date(d: str) -> datetime.datetime:
"""Turn a date string from the Docker API into a datetime object"""
return datetime.datetime.strptime(d, "%Y-%m-%dT%H:%M:%S.%fZ")
def check_tag(tag: Dict[str, Any]) -> bool:
return re.match(r"^[0-9]+-[0-9]+-[a-z0-9]+$", tag["name"]) is not None
def latest_tag(user: str, repo: str) -> List[Dict[str, Any]]:
"""
Queries Docker Hub and finds the most recent tag for the specified image/repo pair
"""
r = docker_api(f"repositories/{user}/{repo}/tags")
results = r["results"]
for result in results:
result["last_updated"] = parse_docker_date(result["last_updated"])
results = list(sorted(results, key=lambda d: d["last_updated"]))
results = [tag for tag in results if check_tag(tag)]
return results[-1]
def latest_tlcpackstaging_image(source: str) -> Optional[str]:
"""
Finds the latest full tag to use in the Jenkinsfile or returns None if no
update is needed
"""
name, current_tag = source.split(":")
user, repo = name.split("/")
logging.info(
f"Running with name: {name}, current_tag: {current_tag}, user: {user}, repo: {repo}"
)
staging_repo = repo.replace("-", "_")
latest_tlcpackstaging_tag = latest_tag(user="tlcpackstaging", repo=staging_repo)
logging.info(f"Found latest tlcpackstaging tag:\n{latest_tlcpackstaging_tag}")
if latest_tlcpackstag |
ing_tag["name"] == current_tag:
logging.info(f"tlcpackstaging tag is the same as the one in the Jenkinsfile")
latest_tlcpack_tag = latest_tag(user="tlcpack", repo=repo)
logging.info(f"Found latest tlcpack tag:\n{latest_tlcpack_tag}")
if latest_tlcpack_tag["name"] == latest_tlcpackstaging_tag["name"]:
logging.info("Tag names were the same, no update needed")
return None
if latest_tlcpack_tag["last_updated"] > latest_tlcpackstaging_tag["last_updated"]:
new_spec = f"tlcpack/{repo}:{latest_tlcpack_tag['name']}"
else:
new_spec = f"tlcpack/{repo}:{latest_tlcpackstaging_tag['name']}"
logging.info("Using tlcpackstaging tag on tlcpack")
logging.info(f"Found newer image, using: {new_spec}")
return new_spec
if __name__ == "__main__":
init_log()
help = "Open a PR to update the Docker images to use the latest available in tlcpackstaging"
parser = argparse.ArgumentParser(description=help)
parser.add_argument("--remote", default="origin", help="ssh remote to parse")
parser.add_argument("--dry-run", action="store_true", help="don't send PR to GitHub")
parser.add_argument("--testing-docker-data", help="JSON data to mock Docker Hub API response")
args = parser.parse_args()
if args.testing_docker_data is not None:
docker_api = _testing_docker_api(data=json.loads(args.testing_docker_data))
remote = git(["config", "--get", f"remote.{args.remote}.url"])
user, repo = parse_remote(remote)
logging.info(f"Reading {JENKINSFILE}")
with open(JENKINSFILE) as f:
content = f.readlines()
new_content = []
replacements = {}
for line in content:
m = re.match(r"^(ci_[a-zA-Z0-9]+) = \'(.*)\'", line.strip())
if m is not None:
logging.info(f"Found match on line {line.strip()}")
groups = m.groups()
new_image = latest_tlcpackstaging_image(groups[1])
if new_image is None:
logging.info( |
f"No new image found")
new_content.append(line)
else:
logging.info(f"Using new image {new_image}")
new_line = f"{groups[0]} = '{new_image}'\n"
new_content.append(new_line)
replacements[line] = new_line
else:
new_content.append(line)
if args.dry_run:
logging.info(f"Dry run, would have written new content to {JENKINSFILE}")
else:
logging.info(f"Writing new content to {JENKINSFILE}")
with open(JENKINSFILE, "w") as f:
f.write("".join(new_content))
logging.info(f"Editing {GENERATED_JENKINSFILE}")
with open(GENERATED_JENKINSFILE) as f:
generated_content = f.read()
for original_line, new_line in replacements.items():
generated_content = generated_content.replace(original_line, new_line)
if args.dry_run:
print(f"Would have written:\n{generated_content}")
else:
with open(GENERATED_JENKINSFILE, "w") as f:
f.write(generated_content)
title = "[ci][docker] Nightly Docker image update"
body = "This bumps the Docker images to the latest versions from Docker Hub."
message = f"{title}\n\n\n{body}"
if args.dry_run:
logging.info("Dry run, would have committed Jenkinsfile")
else:
logging.info(f"Creating git commit")
git(["checkout", "-B", BRANCH])
git(["add", str(JENKINSFILE.relative_to(REPO_ROOT))])
git(["add", str(GENERATED_JENKINSFILE.relative_to(REPO_ROOT))])
git(["config", "user.name", "tvm-bot"])
git(["config", "user.email", "[email protected]"])
git(["commit", "-m", message])
git(["push", "--set-upstream", args.remote, BRANCH, "--force"])
logging.info(f"Sending PR to GitHub")
github = GitHubRepo(user=user, repo=repo, token=GITHUB_TOKEN)
data = {
"title": title,
"body": body,
"head": BRANCH,
"base": "main",
"maintainer_can_m |
odify": True,
}
url = "pulls"
if args.dry_run:
logging.info(f"Dry run, would have sent {data} to {url}")
else:
try:
github.post(url, data=data)
except error.HTTPError as e:
if e.code == 422:
logging.info("PR already exists, ignoring error")
logging.exception(e)
else:
raise e |
import os |
import argparse |
import re |
import datetime |
import json |
import textwrap
from typing |
import Dict, Any, List
from git_utils |
import git, GitHubRepo, parse_remote
GIT_DATE_FORMAT = "%Y-%m-%dT%H:%M:%SZ"
def prs_query(user: str, repo: str, cursor: str = None):
after = ""
if cursor is not None:
after = f', before:"{cursor}"'
time_keys = "createdAt updatedAt lastEditedAt publishedAt"
return f"""
{{
repository(name: "{repo}", owner: "{user}") {{
pullRequests(states: [OPEN], last: 10{after}) {{
edges {{
cursor
}}
nodes {{
number
url
body
{time_keys}
isDraft
author {{
login
}}
reviews(last:100) {{
nodes {{
{time_keys}
bodyText
author {{ login }}
comments(last:100) {{
nodes {{
{time_keys}
bodyText
}}
}}
}}
}}
comments(last:100) {{
nodes {{
authorAssociation
bodyText
{time_keys}
author {{
login
}}
}}
}}
}}
}}
}}
}}
"""
def find_reviewers(body: str) -> List[str]:
matches = re.findall(r"(cc( @[-A-Za-z0-9]+)+)", body, flags=re.MULTILINE)
matches = [full for full, last in matches]
reviewers = []
for match in matches:
if match.startswith("cc "):
match = match.replace("cc ", "")
users = [x.strip() for x in match.split("@")]
reviewers += users
reviewers = set(x for x in reviewers if x != "")
return list(reviewers)
def check_pr(pr, wait_time, now):
last_action = None
author = pr["author"]["login"]
def update_last(new_time, description):
if isinstance(new_time, str):
new_time = datetime.datetime.strptime(new_time, GIT |
_DATE_FORMAT)
if new_time is None:
print(f" time not found: {description}")
return
nonlocal last_action
if last_action is None or new_time > last_action[0]:
last_action = (new_time, description)
def check_obj(obj, name):
update_last(obj["publishedAt"], f"{name} publishedAt: {obj}")
update_last(obj["updatedAt"], f"{name} updatedAt: {obj}")
update_last(obj["lastEditedAt"], f"{name} lastEditedAt: {obj}")
update_last(obj["createdAt"], f"{name} lastEditedAt: {obj}")
check_obj(pr, "pr")
reviews = pr["reviews"]["nodes"]
review_comments = []
for review in reviews:
review_comments += review["comments"]["nodes"]
check_obj(review, "review")
comments = pr["comments"]["nodes"] + review_comments
for comment in comments:
check_obj(comment, "comment")
time_since_last_action = now - last_action[0]
pr_body_reviewers = find_reviewers(pr["body"])
cc_reviewers = [find_reviewers(c["bodyText"]) for c in comments]
cc_reviewers = [r for revs in cc_reviewers for r in revs]
review_reviewers = list(set(r["author"]["login"] for r in reviews))
reviewers = cc_reviewers + review_reviewers + pr_body_reviewers
reviewers = list(set(reviewers))
reviewers = [r for r in reviewers if r != author]
if time_since_last_action > wait_time:
print(
" Pinging reviewers",
reviewers,
"on",
pr["url"],
"since it has been",
time_since_last_action,
f"since anything happened on that PR (last action: {last_action[1]})",
)
return reviewers
else:
print(
f" Not pinging PR {pr['number']} since it has been only {time_since_last_action} since the last action: {last_action[1]}"
)
return None
def make_ping_message(pr, reviewers):
reviewers = [f"@{r}" for r in reviewers]
author = f'@{pr["author |
"]["login"]}'
text = (
"It has been a while since this PR was updated, "
+ " ".join(reviewers)
+ " please leave a review or address the outstanding comments. "
+ f"{author} if this PR is still a work in progress, please [convert it to a draft](https:
" until it is ready for review."
)
return text
if __name__ == "__main__":
help = "Comment on languishing issues and PRs"
parser = argparse.ArgumentParser(description=help)
parser.add_argument("--remote", default="origin", help="ssh remote to parse")
parser.add_argument("--wait-time-minutes", required=True, type=int, help="ssh remote to parse")
parser.add_argument("--cutoff-pr-number", default=0, type=int, help="ssh remote to parse")
parser.add_argument("--dry-run", action="store_true", help="don't update GitHub")
parser.add_argument("--pr-json", help="(testing) data for testing to use instead of GitHub")
parser.add_argument("--now", help="(testing) custom string for current time")
args = parser.parse_args()
remote = git(["config", "--get", f"remote.{args.remote}.url"])
user, repo = parse_remote(remote)
wait_time = datetime.timedelta(minutes=int(args.wait_time_minutes))
cutoff_pr_number = int(args.cutoff_pr_number)
print(
"Running with:\n"
f" time cutoff: {wait_time}\n"
f" number cutoff: {cutoff_pr_number}\n"
f" dry run: {args.dry_run}\n"
f" user/repo: {user}/{repo}\n",
end="",
)
if args.pr_json:
r = json.loads(args.pr_json)
else:
q = prs_query(user, repo)
r = github.graphql(q)
now = datetime.datetime.utcnow()
if args.now:
now = datetime.datetime.strptime(args.now, GIT_DATE_FORMAT)
while True:
prs = r["data"]["repository"]["pullRequests"]["nodes"]
prs_to_check = []
for pr in prs:
if pr["isDraft"]:
print(f"Skipping
elif pr["number"] <= cutoff_pr_number:
pr |
int(
f"Skipping
)
else:
print(f"Checking
prs_to_check.append(pr)
print(f"Summary: Checking {len(prs_to_check)} of {len(prs)} fetched")
for pr in prs_to_check:
print("Checking", pr["url"])
reviewers = check_pr(pr, wait_time, now)
if reviewers is not None:
message = make_ping_message(pr, reviewers)
if args.dry_run:
print(
f"Would have commented on
)
else:
r = github.post(f"issues/{pr['number']}/comments", {"body": message})
print(r)
edges = r["data"]["repository"]["pullRequests"]["edges"]
if len(edges) == 0:
break
cursor = edges[0]["cursor"]
r = github.graphql(prs_query(user, repo, cursor)) |
#!/usr/bin/env python3
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import pytest
import io
import argparse
from contextlib import redirect_stdout
class NodeidsCollector:
def pytest_collection_modifyitems(self, items):
self.nodeids = [item.nodeid for item in items]
def main(folder):
collector = NodeidsCollector()
f = io.StringIO()
with redirect_stdout(f):
pytest.main(["-qq", "--collect-only", folder], plugins=[collector])
for nodeid in collector.nodeids:
print(nodeid)
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="List pytest nodeids for a folder")
parser.add_argument("--folder", required=True, help="test folder to inspect")
args = parser.parse_args()
main(args.folder)
|
import argparse |
import textwrap |
import junitparser |
import traceback
from pathlib |
import Path
from typing |
import List, Optional |
import os |
import urllib.parse |
import logging
from cmd_utils |
import init_log
REPO_ROOT = Path(__file__).resolve().parent.parent.parent
def lstrip(s: str, prefix: str) -> str:
if s.startswith(prefix):
s = s[len(prefix) :]
return s
def classname_to_file(classname: str) -> str:
classname = lstrip(classname, "cython.")
classname = lstrip(classname, "ctypes.")
return classname.replace(".", "/") + ".py"
def failed_test_ids() -> List[str]:
FAILURE_TYPES = (junitparser.Failure, junitparser.Error)
junit_dir = REPO_ROOT / "build" / "pytest-results"
failed_node_ids = []
for junit in junit_dir.glob("*.xml"):
xml = junitparser.JUnitXml.fromfile(str(junit))
for suite in xml:
for case in suite:
if case.result is None:
logging.warn(f"Incorrectly formatted JUnit found, result was None on {case}")
continue
if len(case.result) > 0 and isinstance(case.result[0], FAILURE_TYPES):
node_id = classname_to_file(case.classname) + "::" + case.name
failed_node_ids.append(node_id)
return list(set(failed_node_ids))
def repro_command(build_type: str, failed_node_ids: List[str]) -> Optional[str]:
"""
Parse available JUnit XML files and output a command that users can run to
reproduce CI failures locally
"""
test_args = [f"--tests {node_id}" for node_id in failed_node_ids]
test_args_str = " ".join(test_args)
return f"python3 tests/scripts/ci.py {build_type} {test_args_str}"
def make_issue_url(failed_node_ids: List[str]) -> str:
names = [f"`{node_id}`" for node_id in failed_node_ids]
run_url = os.getenv("RUN_DISPLAY_URL", "<insert run URL>")
test_bullets = [f" - `{node_id}`" for node_id in failed_node_ids]
params = {
"labels": "test: flaky",
"title": "[Flaky Test] " + ", ".join(names),
"body": textwrap.dedent(
f"""
These tests were found to be flaky (intermittently failing on `main` or failed in a PR with |
unrelated changes). See [the docs](https:
"""
)
+ "\n".join(test_bullets)
+ f"\n\n
}
return "https:
def show_failure_help(failed_suites: List[str]) -> None:
failed_node_ids = failed_test_ids()
if len(failed_node_ids) == 0:
return
build_type = os.getenv("PLATFORM")
if build_type is None:
raise RuntimeError("build type was None, cannot show command")
repro = repro_command(build_type=build_type, failed_node_ids=failed_node_ids)
if repro is None:
print("No test failures detected")
return
print(f"Report flaky test shortcut: {make_issue_url(failed_node_ids)}")
print("=============================== PYTEST FAILURES ================================")
print(
"These pytest suites failed to execute. The results can be found in the "
"Jenkins 'Tests' tab or by scrolling up through the raw logs here. "
"If there is no test listed below, the failure likely came from a segmentation "
"fault which you can find in the logs above.\n"
)
if failed_suites is not None and len(failed_suites) > 0:
print("\n".join([f" - {suite}" for suite in failed_suites]))
print("")
print("You can reproduce these specific failures locally with this command:\n")
print(textwrap.indent(repro, prefix=" "))
print("")
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="Print information about a failed pytest run")
args, other = parser.parse_known_args()
init_log()
try:
show_failure_help(failed_suites=other)
except Exception as e:
logging.exception(e) |
import argparse |
import datetime |
import json |
import logging |
import subprocess
from typing |
import Dict, Any, List
from http_utils |
import get
from cmd_utils |
import Sh, init_log
DOCKER_API_BASE = "https:
PAGE_SIZE = 25
TEST_DATA = None
def docker_api(url: str) -> Dict[str, Any]:
"""
Run a paginated fetch from the public Docker Hub API
"""
if TEST_DATA is not None:
return TEST_DATA[url]
pagination = f"?page_size={PAGE_SIZE}&page=1"
url = DOCKER_API_BASE + url + pagination
r, headers = get(url)
reset = headers.get("x-ratelimit-reset")
if reset is not None:
reset = datetime.datetime.fromtimestamp(int(reset))
reset = reset.isoformat()
logging.info(
f"Docker API Rate Limit: {headers.get('x-ratelimit-remaining')} / {headers.get('x-ratelimit-limit')} (reset at {reset})"
)
if "results" not in r:
raise RuntimeError(f"Error fetching data, no results found in: {r}")
return r
def any_docker_changes_since(hash: str) -> bool:
"""
Check the docker/ directory, return True if there have been any code changes
since the specified hash
"""
sh = Sh()
cmd = f"git diff {hash} -- docker/"
proc = sh.run(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
stdout = proc.stdout.strip()
return stdout != "", stdout
def does_commit_exist(hash: str) -> bool:
"""
Returns True if the hash exists in the repo
"""
sh = Sh()
cmd = f"git rev-parse -q {hash}"
proc = sh.run(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, check=False)
print(proc.stdout)
if proc.returncode == 0:
return True
if "unknown revision or path not in the working tree" in proc.stdout:
return False
raise RuntimeError(f"Unexpected failure when running: {cmd}")
def find_hash_for_tag(tag: Dict[str, Any]) -> str:
"""
Split the hash off of a name like <date>-<time>-<hash>
"""
name = tag["name"]
name_parts = name.split("-")
if len(name_parts) != 3:
raise RuntimeError(f"Image {name} is not using new naming scheme")
shorthash = name_parts[2]
return shorthash
def find_commit_in_repo(tags: List[Dict[str |
, Any]]):
"""
Look through all the docker tags, find the most recent one which references
a commit that is present in the repo
"""
for tag in tags["results"]:
shorthash = find_hash_for_tag(tag)
logging.info(f"Hash '{shorthash}' does not exist in repo")
if does_commit_exist(shorthash):
return shorthash, tag
raise RuntimeError(f"No extant hash found in tags:\n{tags}")
def main():
images = docker_api("repositories/tlcpack")
relevant_images = [image for image in images["results"] if image["name"].startswith("ci-")]
image_names = [image["name"] for image in relevant_images]
logging.info(f"Found {len(relevant_images)} images to check: {', '.join(image_names)}")
for image in relevant_images:
tags = docker_api(f"repositories/tlcpack/{image['name']}/tags")
shorthash, tag = find_commit_in_repo(tags)
name = tag["name"]
logging.info(f"Looking for docker/ changes since {shorthash}")
any_docker_changes, diff = any_docker_changes_since(shorthash)
if any_docker_changes:
logging.info(f"Found docker changes from {shorthash} when checking {name}")
logging.info(diff)
exit(2)
logging.info("Did not find changes, no rebuild necessary")
exit(0)
if __name__ == "__main__":
init_log()
parser = argparse.ArgumentParser(
description="Exits 0 if Docker images don't need to be rebuilt, 1 otherwise"
)
parser.add_argument(
"--testing-docker-data",
help="(testing only) JSON data to mock response from Docker Hub API",
)
args = parser.parse_args()
if args.testing_docker_data is not None:
TEST_DATA = json.loads(args.testing_docker_data)
main() |
import os |
import json |
import argparse |
import subprocess |
import re |
import textwrap
from urllib |
import request
from typing |
import Dict, Tuple, Any, List, Optional
from git_utils |
import GitHubRepo, parse_remote, git
SLOW_TEST_TRIGGERS = [
"@tvm-bot run slow tests",
"@tvm-bot run slow test",
"@tvm-bot run slow",
"@tvm-bot slow tests",
"@tvm-bot slow test",
"@tvm-bot slow",
]
def check_match(s: str, searches: List[str]) -> Tuple[bool, Optional[str]]:
for search in searches:
if search in s:
return True, search
return False, None
def display(long_str: str) -> str:
return textwrap.indent(long_str, " ")
if __name__ == "__main__":
help = "Exits with 1 if CI should run slow tests, 0 otherwise"
parser = argparse.ArgumentParser(description=help)
parser.add_argument("--pr", required=True)
parser.add_argument("--remote", default="origin", help="ssh remote to parse")
parser.add_argument(
"--pr-body", help="(testing) PR body to use instead of fetching from GitHub"
)
args = parser.parse_args()
branch = git(["rev-parse", "--abbrev-ref", "HEAD"])
skip_branches = {"main", "ci-docker-staging"}
if branch in skip_branches:
print(f"Branch {branch} is in {skip_branches}, running slow tests")
exit(1)
print(f"Branch {branch} is not in {skip_branches}, checking last commit...")
if args.pr_body:
body = args.pr_body
else:
remote = git(["config", "--get", f"remote.{args.remote}.url"])
user, repo = parse_remote(remote)
github = GitHubRepo(token=os.environ["GITHUB_TOKEN"], user=user, repo=repo)
pr = github.get(f"pulls/{args.pr}")
body = pr["body"]
body_match, reason = check_match(body, SLOW_TEST_TRIGGERS)
if body_match:
print(f"Matched {reason} in PR body:\n{display(body)}, running slow tests")
exit(1)
print(
f"PR Body:\n{display(body)}\ndid not have any of {SLOW_TEST_TRIGGERS}, skipping slow tests"
)
exit(0) |
import os |
import json |
import argparse |
import tempfile
from typing |
import Any, Dict
from git_utils |
import git, GitHubRepo, parse_remote
_commit_query_fields = """
messageHeadline
oid
statusCheckRollup {
contexts(last:100) {
nodes {
... on CheckRun {
conclusion
status
name
checkSuite {
workflowRun {
workflow {
name
}
}
}
}
... on StatusContext {
context
state
}
}
}
}
"""
def commits_query(user: str, repo: str, cursor: str = None):
"""
Create a GraphQL query to find the last N commits along with their statuses
and some metadata (paginated after 'cursor')
"""
after = ""
if cursor is not None:
after = f', after:"{cursor}"'
return f"""
{{
repository(name: "{repo}", owner: "{user}") {{
defaultBranchRef {{
target {{
... on Commit {{
history(first: 15{after}) {{
edges {{ cursor }}
nodes {{
{_commit_query_fields}
}}
}}
}}
}}
}}
}}
}}
"""
def commit_passed_ci(commit: Dict[str, Any]) -> bool:
"""
Returns true if all of a commit's statuses are SUCCESS
"""
statuses = commit["statusCheckRollup"]["contexts"]["nodes"]
unified_statuses = []
for status in statuses:
if "context" in status:
unified_statuses.append((status["context"], status["state"] == "SUCCESS"))
else:
workflow = status["checkSuite"]["workflowRun"]["workflow"]["name"]
name = f"{workflow} / {status['name']}"
unified_statuses.append((name, status["conclusion"] == "SUCCESS"))
print(f"Statuses on {commit['oid']}:", json.dumps(unified_stat |
uses, indent=2))
expected_jobs = {"tvm-ci/branch"}
job_names = {name for name, status in unified_statuses}
for job in expected_jobs:
if job not in job_names:
return False
passed_ci = all(status for name, status in unified_statuses)
return passed_ci
def update_branch(user: str, repo: str, sha: str, branch_name: str) -> None:
git(["fetch", "origin", sha])
git(["reset", "--hard", "FETCH_HEAD"])
try:
git(["branch", "-D", branch_name])
except RuntimeError:
pass
git(["checkout", "-b", branch_name])
git(["push", "origin", "--force", branch_name])
print(f"Pushed branch {branch_name} with commit {sha}")
if __name__ == "__main__":
help = "Push the a branch to the last commit that passed all CI runs"
parser = argparse.ArgumentParser(description=help)
parser.add_argument("--remote", default="origin", help="ssh remote to parse")
parser.add_argument("--dry-run", action="store_true", help="don't submit to GitHub")
parser.add_argument("--branch", default="last-successful", help="branch name")
parser.add_argument(
"--testonly-json", help="(testing) data to use instead of fetching from GitHub"
)
args = parser.parse_args()
remote = git(["config", "--get", f"remote.{args.remote}.url"])
user, repo = parse_remote(remote)
user, repo = ("apache", "tvm")
if args.testonly_json:
r = json.loads(args.testonly_json)
else:
github = GitHubRepo(token=os.environ["GITHUB_TOKEN"], user=user, repo=repo)
q = commits_query(user, repo)
r = github.graphql(q)
commits = r["data"]["repository"]["defaultBranchRef"]["target"]["history"]["nodes"]
MAX_COMMITS_TO_CHECK = 50
i = 0
while i < MAX_COMMITS_TO_CHECK:
for commit in commits:
if commit_passed_ci(commit):
print(f"Found last good commit: {commit['oid']}: {commit['messageHeadline']}")
if not args.dry |
_run:
update_branch(
user=user,
repo=repo,
sha=commit["oid"],
branch_name=args.branch,
)
exit(0)
edges = r["data"]["repository"]["defaultBranchRef"]["target"]["history"]["edges"]
if len(edges) == 0:
break
else:
q = commits_query(user, repo, cursor=edges[-1]["cursor"])
r = github.graphql(q)
commits = r["data"]["repository"]["defaultBranchRef"]["target"]["history"]["nodes"]
i += len(commits)
print(f"No good commits found in the last {len(commits)} commits")
exit(1) |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import os
import sys
import subprocess
from jinja2 import Template
CUDA_VERSIONS = ["10.0", "9.0"]
# Make sure that the cudnn version you set here is available
# for all the cuda versions that you want both from nvidia
# and from conda.
# These two must be in sync
CUDNN_FULL_VERSION = "7.6.0.64"
CUDNN_VERSION = "7.6.0"
condadir = os.path.dirname(sys.argv[0])
condadir = os.path.abspath(condadir)
srcdir = os.path.dirname(condadir)
with open(os.path.join(condadir, "Dockerfile.template")) as f:
docker_template = Template(f.read())
def render_dockerfile(version):
txt = docker_template.render(
cuda_version=version, cudnn_short_version=CUDNN_VERSION, cudnn_version=CUDNN_FULL_VERSION
)
fname = os.path.join(condadir, "../docker/Dockerfile.conda_cuda" + version.replace(".", ""))
with open(fname, "w") as f:
f.write(txt + "\n")
return fname
if __name__ == "__main__":
build_versions = CUDA_VERSIONS
if len(sys.argv) > 1:
build_versions = sys.argv[1:]
for version in build_versions:
render_dockerfile(version)
|
import hashlib |
import pytest |
import sys |
import os
from pathlib |
import Path
pytest_plugins = ["tvm.testing.plugin"]
IS_IN_CI = os.getenv("CI", "") == "true"
REPO_ROOT = Path(__file__).resolve().parent
_slowest_tests = [
"tests/python/frontend/tensorflow/test_forward.py::test_forward_broadcast_args",
"tests/python/frontend/tensorflow/test_forward.py::test_forward_broadcast_to",
"tests/python/topi/python/test_topi_conv2d_int8.py::test_conv2d_nchw[int8]",
"tests/python/topi/python/test_topi_conv2d_int8.py::test_conv2d_nchw[uint8]",
"tests/python/topi/python/test_topi_upsampling.py::test_upsampling3d",
"tests/python/topi/python/test_topi_upsampling.py::test_upsampling3d",
"tests/python/topi/python/test_topi_conv2d_int8.py::test_conv2d_nchw[int8]",
"tests/python/frontend/tflite/test_forward.py::test_all_elemwise",
"tests/python/frontend/pytorch/test_object_detection.py::test_detection_models",
"tests/python/topi/python/test_topi_conv2d_int8.py::test_conv2d_nchw[uint8]",
"tests/python/topi/python/test_topi_conv2d_NCHWc.py::test_conv2d_NCHWc",
"tests/python/topi/python/test_topi_conv2d_hwnc_tensorcore.py::test_conv2d_hwnc_tensorcore",
"tests/python/contrib/test_tensorrt.py::test_binary[compile]",
"tests/python/frontend/pytorch/test_forward.py::test_segmentation_models",
"tests/python/topi/python/test_topi_conv2d_NCHWc.py::test_conv2d_NCHWc",
"tests/python/relay/test_py_converter.py::test_global_recursion",
"tests/python/frontend/tensorflow/test_forward.py::test_forward_ptb",
"tests/python/relay/test_op_level6.py::test_topk",
"tests/python/topi/python/test_topi_conv2d_winograd.py::test_conv2d_nchw",
"tests/python/relay/test_py_converter.py::test_global_recursion",
]
HARDCODED_ALLOCATIONS = {}
for idx, test in enumerate(_slowest_tests):
HARDCODED_ALLOCATIONS[test] = idx
FIXED_ALLOCATION_PREFIXES = {
"tests/python/unittest/test_tvm_testing_features.py": 0,
}
def find_shard_index(nodeid: str, num_shards: int) -> int:
"""
Return the index of the shard that should run this test
" |
""
for prefix, target_shard_idx in FIXED_ALLOCATION_PREFIXES.items():
if nodeid.startswith(prefix):
if target_shard_idx >= num_shards:
raise RuntimeError(
f"Cannot collect sharded tests, {nodeid} has hardcoded shard index {target_shard_idx} among only {num_shards} shards"
)
return target_shard_idx
if nodeid in HARDCODED_ALLOCATIONS:
hash = HARDCODED_ALLOCATIONS[nodeid]
else:
hash = hashlib.md5(nodeid.encode())
hash = int(hash.hexdigest(), 16)
return hash % num_shards
def pytest_collection_modifyitems(config, items):
if not all(k in os.environ for k in ["CI", "TVM_NUM_SHARDS", "TVM_SHARD_INDEX"]):
return
num_shards = int(os.environ["TVM_NUM_SHARDS"])
shard_index = int(os.environ["TVM_SHARD_INDEX"])
print(f"Marking tests for shard {shard_index} of {num_shards}")
items_copy = list(items)
for item in items_copy:
item_shard_index = find_shard_index(item.nodeid, num_shards=num_shards)
if item_shard_index != shard_index:
items.remove(item)
def pytest_sessionstart():
if IS_IN_CI:
hook_script_dir = REPO_ROOT / "tests" / "scripts" / "request_hook"
sys.path.append(str(hook_script_dir)) |
import request_hook
request_hook.init() |
import gc |
import inspect |
import os
from pathlib |
import Path |
import re |
import sys |
import sphinx_gallery
curr_path = Path(__file__).expanduser().absolute().parent
if curr_path.name == "_staging":
tvm_path = Path(os.pardir, os.pardir)
else:
tvm_path = Path(os.pardir)
sys.path.insert(0, str(tvm_path.resolve() / "python"))
sys.path.insert(0, str(tvm_path.resolve() / "vta" / "python"))
sys.path.insert(0, str(tvm_path.resolve() / "docs"))
project = "tvm"
author = "Apache Software Foundation"
copyright = "2020 - 2022, %s" % author
github_doc_root = "https:
os.environ["TVM_BUILD_DOC"] = "1"
def git_describe_version(original_version):
"""Get git describe version."""
ver_py = tvm_path.joinpath("version.py")
libver = {"__file__": ver_py}
exec(compile(open(ver_py, "rb").read(), ver_py, "exec"), libver, libver)
_, gd_version = libver["git_describe_version"]()
if gd_version != original_version:
print("Use git describe based version %s" % gd_version)
return gd_version |
import tvm
from tvm |
import topi
from tvm |
import te
from tvm |
import testing
version = git_describe_version(tvm.__version__)
release = version
extensions = [
"sphinx.ext.autodoc",
"sphinx.ext.autosummary",
"sphinx.ext.intersphinx",
"sphinx.ext.napoleon",
"sphinx.ext.mathjax",
"sphinx_gallery.gen_gallery",
"autodocsumm",
]
templates_path = ["_templates"]
source_suffix = [".rst", ".md"]
autosummary_generate = True
main_doc = "index"
language = None
exclude_patterns = ["_build", "_staging"]
pygments_style = "sphinx"
todo_include_todos = False
html_theme = os.environ.get("TVM_THEME", "rtd")
on_rtd = os.environ.get("READTHEDOCS", None) == "True"
if not on_rtd and html_theme == "rtd": |
import sphinx_rtd_theme
html_theme = "sphinx_rtd_theme"
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
html_static_path = ["_static"]
html_theme_options = {
"analytics_id": "UA-75982049-2",
"logo_only": True,
}
html_logo = "_static/img/tvm-logo-small.png"
html_favicon = "_static/img/tvm-logo-square.png"
htmlhelp_basename = project + "doc"
latex_elements = {}
latex_documents = [
(main_doc, "%s.tex" % project, project, author, "manual"),
]
intersphinx_mapping = {
"python": ("https:
}
from sphinx_gallery.sorting |
import ExplicitOrder
examples_dirs = [
tvm_path.joinpath("gallery", "tutorial"),
tvm_path.joinpath("gallery", "how_to", "compile_models"),
tvm_path.joinpath("gallery", "how_to", "deploy_models"),
tvm_path.joinpath("gallery", "how_to", "work_with_relay"),
tvm_path.joinpath("gallery", "how_to", "work_with_schedules"),
tvm_path.joinpath("gallery", "how_to", "optimize_operators"),
tvm_path.joinpath("gallery", "how_to", "tune_with_autotvm"),
tvm_path.joinpath("gallery", "how_to", "tune_with_autoscheduler"),
tvm_path.joinpath("gallery", "how_to", "work_with_microtvm"),
tvm_path.joinpath("gallery", "how_to", "extend_tvm"),
tvm_path.joinpath("vta", "tutorials"),
]
gallery_dirs = [
"tutorial",
"how_to/compile_models",
"how_to/deploy_models",
"how_to/work_with_relay",
"how_to/work_with_schedules",
"how_to/optimize_operators",
"how_to/tune_with_autotvm",
"how_to/tune_with_autoscheduler",
"how_to/work_with_microtvm",
"how_to/extend_tvm",
"topic/vta/tutorials",
]
subsection_order = ExplicitOrder(
str(p)
for p in [
tvm_path / "vta" / "tutorials" / "frontend",
tvm_path / "vta" / "tutorials" / "optimize",
tvm_path / "vta" / "tutorials" / "autotvm",
]
)
within_subsection_order = {
"tutorial": [
"introduction.py",
"install.py",
"tvmc_command_line_driver.py",
"tvmc_python.py",
"autotvm_relay_x86.py",
"tensor_expr_get_started.py",
"autotvm_matmul_x86.py",
"auto_scheduler_matmul_x86.py",
"tensor_ir_blitz_course.py",
"topi.pi",
"cross_compilation_and_rpc.py",
"relay_quick_start.py",
"uma.py",
],
"compile_models": [
"from_pytorch.py",
"from_tensorflow.py",
"from_mxnet.py",
"from_onnx.py",
"from_keras.py",
"from_tflite.py",
"from_coreml.py",
"from_darknet.py",
"from_caffe2.py",
"from_paddle.py",
],
"work_wi |
th_schedules": [
"schedule_primitives.py",
"reduction.py",
"intrin_math.py",
"scan.py",
"extern_op.py",
"tensorize.py",
"tuple_inputs.py",
"tedd.py",
],
"optimize_operators": [
"opt_gemm.py",
"opt_conv_cuda.py",
"opt_conv_tensorcore.py",
],
"tune_with_autotvm": [
"tune_conv2d_cuda.py",
"tune_relay_cuda.py",
"tune_relay_x86.py",
"tune_relay_arm.py",
"tune_relay_mobile_gpu.py",
],
"tune_with_autoscheduler": [
"tune_matmul_x86.py",
"tune_conv2d_layer_cuda.py",
"tune_network_x86.py",
"tune_network_cuda.py",
],
"extend_tvm": [
"low_level_custom_pass.py",
"use_pass_infra.py",
"use_pass_instrument.py",
"bring_your_own_datatypes.py",
],
"micro": [
"micro_train.py",
"micro_autotune.py",
"micro_reference_vm.py",
"micro_tflite.py",
"micro_ethosu.py",
"micro_tvmc.py",
"micro_aot.py",
"micro_pytorch.py",
],
}
class WithinSubsectionOrder:
def __init__(self, src_dir):
self.src_dir = src_dir.split("/")[-1]
def __call__(self, filename):
if (
self.src_dir in within_subsection_order
and filename in within_subsection_order[self.src_dir]
):
index = within_subsection_order[self.src_dir].index(filename)
assert index < 1e10
return "\0%010d" % index
return filename
def force_gc(gallery_conf, fname):
gc.collect()
sphinx_gallery_conf = {
"backreferences_dir": "gen_modules/backreferences",
"doc_module": ("tvm", "numpy"),
"reference_url": {
"tvm": None,
},
"examples_dirs": examples_dirs,
"within_subsection_order": WithinSubsectionOrder,
"gallery_dirs": gallery_dirs,
"subsection_order": subsection_order,
"filename_pattern": os.environ.get("TVM_TUTORIAL_EXE |
C_PATTERN", ".py"),
"download_all_examples": False,
"min_reported_time": 60,
"expected_failing_examples": [],
"reset_modules": ("matplotlib", "seaborn", force_gc),
"promote_jupyter_magic": True,
}
autodoc_default_options = {
"member-order": "bysource",
}
tvm_alias_check_map = {
"tvm.te": ["tvm.tir"],
"tvm.tir": ["tvm.ir", "tvm.runtime"],
"tvm.relay": ["tvm.ir", "tvm.tir"],
} |
import tlcpack_sphinx_addon
footer_copyright = "© 2022 Apache Software Foundation | All rights reserved"
footer_note = " ".join(
"""
Copyright © 2022 The Apache Software Foundation. Apache TVM, Apache, the Apache feather,
and the Apache TVM project logo are either trademarks or registered trademarks of
the Apache Software Foundation.""".split(
"\n"
)
).strip()
header_logo = "https:
header_logo_link = "https:
header_links = [
("Community", "https:
("Download", "https:
("VTA", "https:
("Blog", "https:
("Docs", "https:
("Conference", "https:
("Github", "https:
]
header_dropdown = {
"name": "ASF",
"items": [
("Apache Homepage", "https:
("License", "https:
("Sponsorship", "https:
("Security", "https:
("Thanks", "https:
("Events", "https:
],
}
def fixup_tutorials(original_url: str) -> str:
if "docs/tutorial" in original_url:
if original_url.endswith("index.rst"):
return re.sub(
r"docs/tutorial/(.*)index\.rst", "gallery/tutorial/\\1README.txt", original_url
)
else:
return re.sub(r"docs/tutorial/(.*)\.rst", "gallery/tutorial/\\1.py", original_url)
else:
return original_url
html_context = {
"footer_copyright": footer_copyright,
"footer_note": footer_note,
"header_links": header_links,
"header_dropdown": header_dropdown,
"header_logo": header_logo,
"header_logo_link": header_logo_link,
"version_prefixes": ["main", "v0.8.0/", "v0.9.0/", "v0.10.0/"],
"display_github": True,
"github_user": "apache",
"github_repo": "tvm",
"github_version": "main/docs/",
"theme_vcs_pageview_mode": "edit",
"edit_link_hook_fn": fixup_tutorials,
}
templates_path += [tlcpack_sphinx_addon.get_templates_path()]
html_static_path += [tlcpack_sphinx_addon.get_static_path()]
def update_alias_docstring(name, obj, lines):
"""Update the docstring of alia |
s functions.
This function checks if the obj is an alias of another documented object
in a different module.
If it is an alias, then it will append the alias information to the docstring.
Parameters
----------
name : str
The full name of the object in the doc.
obj : object
The original object.
lines : list
The docstring lines, need to be modified inplace.
"""
arr = name.rsplit(".", 1)
if len(arr) != 2:
return
target_mod, target_name = arr
if target_mod not in tvm_alias_check_map:
return
if not hasattr(obj, "__module__"):
return
obj_mod = obj.__module__
for amod in tvm_alias_check_map[target_mod]:
if not obj_mod.startswith(amod):
continue
if hasattr(sys.modules[amod], target_name):
obj_type = ":py:func" if callable(obj) else ":py:class"
lines.append(".. rubric:: Alias of %s:`%s.%s`" % (obj_type, amod, target_name))
def process_docstring(app, what, name, obj, options, lines):
"""Sphinx callback to process docstring"""
if callable(obj) or inspect.isclass(obj):
update_alias_docstring(name, obj, lines)
from legacy_redirect |
import build_legacy_redirect
def setup(app):
app.connect("autodoc-process-docstring", process_docstring)
app.connect("build-finished", build_legacy_redirect(tvm_path)) |
from string |
import Template |
import json |
import os
legacy_redirects = [
["dev/benchmark.html", "../arch/benchmark.html"],
["dev/convert_layout.html", "../arch/convert_layout.html"],
["dev/debugger.html", "../arch/debugger.html"],
["dev/device_target_interactions.html", "../arch/device_target_interactions.html"],
["dev/frontend/tensorflow.html", "../../arch/frontend/tensorflow.html"],
["dev/hybrid_script.html", "../arch/hybrid_script.html"],
["dev/index.html", "../arch/index.html"],
["dev/inferbound.html", "../arch/inferbound.html"],
[
"dev/introduction_to_module_serialization.html",
"../arch/introduction_to_module_serialization.html",
],
["dev/microtvm_design.html", "../arch/microtvm_design.html"],
["dev/model_library_format.html", "../arch/model_library_format.html"],
["dev/pass_infra.html", "../arch/pass_infra.html"],
["dev/relay_intro.html", "../arch/relay_intro.html"],
["dev/relay_op_strategy.html", "../arch/relay_op_strategy.html"],
["dev/runtime.html", "../arch/runtime.html"],
["dev/runtimes/vulkan.html", "../../arch/runtimes/vulkan.html"],
["dev/security.html", "../arch/security.html"],
["dev/virtual_machine.html", "../arch/virtual_machine.html"],
["dev/how_to.html", "index.html"],
["dev/pytest_target_parametrization.html", "how_to/pytest_target_parametrization.html"],
["dev/relay_add_op.html", "how_to/relay_add_op.html"],
["dev/relay_add_pass.html", "how_to/relay_add_pass.html"],
["dev/relay_bring_your_own_codegen.html", "how_to/relay_bring_your_own_codegen.html"],
["dev/codebase_walkthrough.html", "tutorial/codebase_walkthrough.html"],
["deploy/android.html", "../how_to/deploy/android.html"],
["deploy/arm_compute_lib.html", "../how_to/deploy/arm_compute_lib.html"],
["deploy/bnns.html", "../how_to/deploy/bnns.html"],
["deploy/cpp_deploy.html", "../how_to/deploy/cpp_deploy.html"],
["deploy/hls.html", "../how_to/deploy/hls.html"],
["deploy/index.html", "../how_to/deploy/index.html"],
["deploy/integrate.ht |
ml", "../how_to/deploy/integrate.html"],
["deploy/tensorrt.html", "../how_to/deploy/tensorrt.html"],
["deploy/vitis_ai.html", "../how_to/deploy/vitis_ai.html"],
["profiling/index.html", "../how_to/profile/index.html"],
["profiling/papi.html", "../how_to/profile/papi.html"],
["api/links.html", "../reference/api/links.html"],
["api/python/auto_scheduler.html", "../../reference/api/python/auto_scheduler.html"],
["api/python/autotvm.html", "../../reference/api/python/autotvm.html"],
["api/python/contrib.html", "../../reference/api/python/contrib.html"],
["api/python/driver.html", "../../reference/api/python/driver.html"],
["api/python/error.html", "../../reference/api/python/error.html"],
["api/python/graph_executor.html", "../../reference/api/python/graph_executor.html"],
["api/python/index.html", "../../reference/api/python/index.html"],
["api/python/ir.html", "../../reference/api/python/ir.html"],
["api/python/micro.html", "../../reference/api/python/micro.html"],
["api/python/ndarray.html", "../../reference/api/python/ndarray.html"],
["api/python/relay/analysis.html", "../../../reference/api/python/relay/analysis.html"],
["api/python/relay/backend.html", "../../../reference/api/python/relay/backend.html"],
[
"api/python/relay/dataflow_pattern.html",
"../../../reference/api/python/relay/dataflow_pattern.html",
],
["api/python/relay/frontend.html", "../../../reference/api/python/relay/frontend.html"],
["api/python/relay/image.html", "../../../reference/api/python/relay/image.html"],
["api/python/relay/index.html", "../../../reference/api/python/relay/index.html"],
["api/python/relay/nn.html", "../../../reference/api/python/relay/nn.html"],
["api/python/relay/testing.html", "../../../reference/api/python/relay/testing.html"],
["api/python/relay/transform.html", "../../../reference/api/python/relay/transform.html"],
["api/python/relay/vision.html", "../../../reference/api/python/relay/vision.html"], |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.