Jack Monas
commited on
Commit
·
7721b7e
1
Parent(s):
f51addb
rules
Browse files
app.py
CHANGED
@@ -105,6 +105,20 @@ def main():
|
|
105 |
)
|
106 |
st.markdown("---")
|
107 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
108 |
scoring_section()
|
109 |
|
110 |
|
@@ -112,3 +126,5 @@ def main():
|
|
112 |
if __name__ == '__main__':
|
113 |
main()
|
114 |
|
|
|
|
|
|
105 |
)
|
106 |
st.markdown("---")
|
107 |
|
108 |
+
|
109 |
+
st.markdown("### Datasets")
|
110 |
+
st.write(
|
111 |
+
"We offer two key datasets to support the 1X World Model Challenge:\n\n"
|
112 |
+
"**Raw Data:** The [world_model_raw_data](https://huggingface.co/datasets/1x-technologies/world_model_raw_data) dataset "
|
113 |
+
"provides raw sensor data, video logs, and annotated robot state sequences gathered from diverse real-world scenarios. "
|
114 |
+
"This dataset is split into 100 shards—each containing a 512x512 MP4 video, a segment index mapping, and state arrays—"
|
115 |
+
"and is licensed under CC-BY-NC-SA 4.0.\n\n"
|
116 |
+
"**Tokenized Data:** The [world_model_tokenized_data](https://huggingface.co/datasets/1x-technologies/world_model_tokenized_data) dataset "
|
117 |
+
"tokenizes the raw video sequences generated using the NVIDIA Cosmos Tokenizer. This compact representation of the raw data "
|
118 |
+
"is optimal for the compression challenge and is released under the Apache 2.0 license.\n\n"
|
119 |
+
)
|
120 |
+
|
121 |
+
|
122 |
scoring_section()
|
123 |
|
124 |
|
|
|
126 |
if __name__ == '__main__':
|
127 |
main()
|
128 |
|
129 |
+
|
130 |
+
|