File size: 3,518 Bytes
6c92442
 
e2473e2
 
 
 
 
50b09d2
8aefb4e
f90a14b
8aefb4e
 
 
 
 
 
 
 
f90a14b
50b09d2
 
 
f90a14b
8aefb4e
f90a14b
 
e2473e2
 
 
 
 
 
50b09d2
e2473e2
 
 
 
 
 
 
 
 
 
 
 
 
6c92442
e2473e2
6c92442
9203553
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
from .formatting import styled_warning

# ================================
# =            ABOUT             =
# ================================
INTRODUCTION_TITLE = """<h1 align="center">🏟️ Long Code Arena</h1>"""

INTRODUCTION_TEXT = """🏟️ **Long Code Arena** is a suite of benchmarks for code-related tasks with large contexts, up to a whole code repository.
It currently spans six different tasks and contains six datasets:

* πŸ€— [Library-based code generation](https://huggingface.co/datasets/JetBrains-Research/lca-library-based-code-generation)
* πŸ€— [CI builds repair](https://huggingface.co/datasets/JetBrains-Research/lca-ci-builds-repair)
* πŸ€— [Project-level code completion](https://huggingface.co/datasets/JetBrains-Research/lca-project-level-code-completion)
* πŸ€— [Commit message generation](https://huggingface.co/datasets/JetBrains-Research/lca-commit-message-generation)
* πŸ€— [Bug localization](https://huggingface.co/datasets/JetBrains-Research/lca-bug-localization)
* πŸ€— [Module summarization](https://huggingface.co/datasets/JetBrains-Research/lca-module-summarization)

We are excited to invite you to participate in solving our benchmarks! To submit your results, please send the following materials to our πŸ“© email ([email protected]):  

* **Results**: Include the summary of your benchmark outcomes.
* **Reproduction Package**: To ensure the integrity and reproducibility of your results, please include the code for context collection (if any), generation of predictions, and evaluating. You can follow [our baselines](https://github.com/JetBrains-Research/lca-baselines) as a reference.  
* **Metadata**: Model information, organization name, licence of your model, context size, and other information you find relevant.

We look forward to reviewing your innovative solutions!

"""

# ================================
# =         LEADERBOARD          =
# ================================
LEADERBOARD_TITLE = '<h2 align="center">πŸ…Leaderboard</h2>'

LEADERBOARD_TEXT = """The raw results from the leaderboard are available in πŸ€— [JetBrains-Research/lca-results](https://huggingface.co/datasets/JetBrains-Research/lca-results)."""

# ================================
# =          SUBMISSION          =
# ================================
SUBMISSION_TITLE = '<h2 align="center">πŸ“© Make A Submission</h2>'

SUBMISSION_TEXT_INTRO = """Use the form below to submit new results to 🏟️ Long Code Arena. If any problems arise, don't hesitate to contact us by email `TODO` or open a discussion πŸ’›"""

SUBMISSION_TEXT_TASK = """1. Select a task you want to submit results for."""

SUBMISSION_TEXT_METADATA = """2. Fill in some metadata about your submission."""

SUBMISSION_TEXT_FILES = """3. Attach one or more files with your model's predictions.
    * If several files are attached, they will be treated as separate runs of the submitted model (e.g., with different seeds), and the metrics will be averaged across runs. For baselines provided by 🏟️ Long Code Arena Team, the results are averaged across 3 runs. 
"""

SUBMISSION_TEXT_SUBMIT = """All set! A new PR to πŸ€— [JetBrains-Research/lca-results](https://huggingface.co/datasets/JetBrains-Research/lca-results) should be opened when you press "Submit" button. 🏟️ Long Code Arena Team will review it shortly, and the results will appear in the leaderboard.

⏳ **Note:** It might take some time (up to 40 minutes) for PR to get created, since it involves computing metrics for your submission."""