-
Notifications
You must be signed in to change notification settings - Fork 0
/
index.html
696 lines (650 loc) · 42.9 KB
/
index.html
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
<!DOCTYPE html>
<html>
<head>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1">
<title>DROID: A Large-Scale In-the-Wild Robot Manipulation Dataset</title>
<link href="https://fonts.googleapis.com/css?family=Google+Sans|Noto+Sans|Castoro" rel="stylesheet">
<link rel="stylesheet" href="./static/css/bulma.min.css">
<link rel="stylesheet" href="./static/css/bulma-carousel.min.css">
<link rel="stylesheet" href="./static/css/bulma-slider.min.css">
<link rel="stylesheet" href="./static/css/fontawesome.all.min.css">
<link rel="stylesheet" href="./static/css/academicons.min.css">
<link rel="stylesheet" href="https://cdn.jsdelivr.net/gh/jpswalsh/academicons@1/css/academicons.min.css">
<link rel="stylesheet" href="./static/css/index.css">
<script src="https://ajax.googleapis.com/ajax/libs/jquery/3.5.1/jquery.min.js"></script>
<script defer src="./static/js/fontawesome.all.min.js"></script>
<script src="./static/js/bulma-carousel.min.js"></script>
<script src="./static/js/bulma-slider.min.js"></script>
<script src="./static/js/index.js"></script>
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/highlight.js/11.8.0/styles/github.min.css">
<script src="https://cdnjs.cloudflare.com/ajax/libs/highlight.js/11.8.0/highlight.min.js"></script>
<script>hljs.highlightAll();</script>
<link rel="stylesheet" href="https://stackpath.bootstrapcdn.com/bootstrap/4.3.1/css/bootstrap.min.css">
<style>
.author-block {
display: none;
}
.publication-authors:hover .author-block {
display: block;
}
</style>
</head>
<body>
<section class="hero">
<div class="hero-body">
<div class="container is-max-desktop">
<div class="columns is-centered">
<div class="column has-text-centered">
<h1 class="title is-1 publication-title">DROID: A Large-Scale In-the-Wild<br>Robot Manipulation Dataset</h1>
<div class="is-size-5 publication-authors">
<span class="team-name"><b>DROID Dataset Team <p style="font-size: 70%">(hover to display full author list)</p></b></span>
<span class="author-block">
Alexander Khazatsky<sup>*</sup>, Karl Pertsch<sup>*</sup>, Suraj Nair, Ashwin Balakrishna, Sudeep Dasari, Siddharth Karamcheti, Soroush Nasiriany, Mohan Kumar Srirama, Lawrence Yunliang Chen, Kirsty Ellis, Peter David Fagan, Joey Hejna, Masha Itkina, Marion Lepert, Yecheng Jason Ma, Patrick Tree Miller, Jimmy Wu, Suneel Belkhale, Shivin Dass, Huy Ha, Arhan Jain, Abraham Lee, Youngwoon Lee, Marius Memmel, Sungjae Park, Ilija Radosavovic, Kaiyuan Wang, Albert Zhan, Kevin Black, Cheng Chi, Kyle Beltran Hatch, Shan Lin, Jingpei Lu, Jean Mercat, Abdul Rehman, Pannag R Sanketi, Archit Sharma, Cody Simpson, Quan Vuong, Homer Rich Walke, Blake Wulfe, Ted Xiao, Jonathan Heewon Yang, Arefeh Yavary, Tony Z. Zhao, Christopher Agia, Rohan Baijal, Mateo Guaman Castro, Daphne Chen, Qiuyu Chen, Trinity Chung, Jaimyn Drake, Ethan Paul Foster, Jensen Gao, David Antonio Herrera, Minho Heo, Kyle Hsu, Jiaheng Hu, Donovon Jackson, Charlotte Le, Yunshuang Li, Kevin Lin, Roy Lin, Zehan Ma, Abhiram Maddukuri, Suvir Mirchandani, Daniel Morton, Tony Nguyen, Abigail O'Neill, Rosario Scalise, Derick Seale, Victor Son, Stephen Tian, Emi Tran, Andrew E. Wang, Yilin Wu, Annie Xie, Jingyun Yang, Patrick Yin, Yunchu Zhang, Osbert Bastani, Glen Berseth, Jeannette Bohg, Ken Goldberg, Abhinav Gupta, Abhishek Gupta, Dinesh Jayaraman, Joseph J Lim, Jitendra Malik, Roberto Martín-Martín, Subramanian Ramamoorthy, Dorsa Sadigh, Shuran Song, Jiajun Wu, Michael C. Yip, Yuke Zhu, Thomas Kollar, Sergey Levine, Chelsea Finn
<br>
<span class="is-size-6"><sup>*</sup>Co-Lead</span>
</span>
</div>
<div class="column has-text-centered">
<img src="figures/droid_logos.jpeg" class="interpolation-image" alt=""
style="display: block; width: 100%; margin-left: auto; margin-right: auto" />
</div>
<div class="column has-text-centered">
<div class="publication-links">
<span class="link-block">
<a target="_blank" href="https://arxiv.org/abs/2403.12945"
class="external-link button is-normal is-rounded is-dark">
<span class="icon">
<i class="fas fa-file-pdf"></i>
</span>
<span><b>PDF</b></span>
</a>
<a target="_blank" href="visualizer"
class="external-link button is-normal is-rounded is-dark">
<span class="icon">
<i class="fas fa-database"></i>
</span>
<span><b>Dataset Visualizer</b></span>
</a>
<a target="_blank" href="https://colab.research.google.com/drive/1b4PPH4XGht4Jve2xPKMCh-AXXAQziNQa?usp=sharing"
class="external-link button is-normal is-rounded is-dark">
<span class="icon">
<i class="fas fa-database"></i>
</span>
<span><b>Dataset Colab</b></span>
</a>
<a target="_blank" href="https://droid-dataset.github.io/droid/"
class="external-link button is-normal is-rounded is-dark">
<span class="icon">
<i class="fab fa-github"></i>
</span>
<span><b>Setup Guide</b></span>
</a>
<a target="_blank" href="https://github.com/droid-dataset/droid"
class="external-link button is-normal is-rounded is-dark">
<span class="icon">
<i class="fab fa-github"></i>
</span>
<span><b>Hardware Code</b></span>
</a>
<a target="_blank" href="https://github.com/droid-dataset/droid_policy_learning"
class="external-link button is-normal is-rounded is-dark">
<span class="icon">
<i class="fab fa-github"></i>
</span>
<span><b>Policy Learning Code</b></span>
</a>
</span>
</div>
</div>
</div>
</div>
</div>
</div>
</section>
<section class="section" style="padding: 0">
<div class="container is-max-desktop">
<div>
<video poster="" id="" autoplay controls muted loop width="100%" playbackRate=2.0 style="border-radius: 5px;">
<source src="videos/droid_teaser_animated.mp4" type="video/mp4">
</video>
</div>
</div>
</section>
<br>
<br>
<div class="container is-max-widescreen">
<h1><b>Getting Started</b></h1>
<div class="columns">
<div class="column has-text-centered">
<a href="visualizer">
<p style="font-size: 200%"><b>Interactive Dataset Visualizer <br> </b></p>
</a>
<video poster="" id="" autoplay controls muted loop height="50%" playbackRate=2.0 style="border-radius: 5px;">
<!-- <source src="videos/droid_scene_videos/8x8_video_width_240_fps_10_duration_20_shuffle.mp4" type="video/mp4"> -->
<source src="videos/dataset-visualizer.mp4" type="video/mp4">
</video>
</div>
<div class="column">
<p class="center-text" style="font-size: 200%"><b> Dataset Quickstart </b>
<a href="https://colab.research.google.com/drive/1b4PPH4XGht4Jve2xPKMCh-AXXAQziNQa?usp=sharing">
<b>(Colab) </b>
</a>
</p>
<pre><code>import tensorflow_datasets as tfds
ds = tfds.load("droid",
data_dir="gs://gresearch/robotics", split="train")
for episode in ds.take(5):
for step in episode["steps"]:
image = step["observation"]["exterior_image_1_left"]
wrist_image = step["observation"]["wrist_image_left"]
action = step["action"]
instruction = step["language_instruction"]</code></pre>
</div>
</div>
</div>
<br>
<br>
</div>
<section class="section">
<div class="container is-max-desktop">
<!-- Abstract. -->
<div class="columns is-centered has-text-centered">
<div class="column">
<h2 class="title is-3">Abstract</h2>
<div class="content has-text-justified">
<p style="font-size: 125%">
The creation of large, diverse, high-quality robot manipulation datasets is an important stepping stone on the path toward more capable and robust robotic manipulation policies. However, creating such datasets is challenging: collecting robot manipulation data in diverse environments poses logistical and safety challenges and requires substantial investments in hardware and human labour. As a result, even the most general robot manipulation policies today are mostly trained on data collected in a small number of environments with limited scene and task diversity. In this work, we introduce DROID (<b>D</b>istributed <b>Ro</b>bot <b>I</b>nteraction <b>D</b>ataset), a diverse robot manipulation dataset with 76k demonstration trajectories or 350h of interaction data, collected across 564 scenes and 86 tasks by 50 data collectors in North America, Asia, and Europe over the course of 12 months. We demonstrate that training with DROID leads to policies with higher performance, greater robustness, and improved generalization ability. We open source the full dataset, code for policy training, and a detailed guide for reproducing our robot hardware setup.
</p>
</div>
</div>
</div>
</div>
</section>
<section class="section">
<div class="container is-max-widescreen">
<div class="rows">
<div class="rows is-centered ">
<div class="row is-full-width">
<h2 class="title is-2"><span class="dvima">The DROID Robot Platform</span></h2>
</div>
<br>
<div class="columns">
<div class="column has-text-centered">
<img src="figures/droid_setup.png" class="interpolation-image" alt=""
style="display: block; width: 80%; margin-left: auto; margin-right: auto" />
</div>
<!-- <br> -->
<div class="column has-text-centered">
<span style="font-size: 140%">
<br>
DROID uses the same hardware
setup across all 13 institutions to streamline data collection while
maximizing portability and flexibility. The setup consists of a Franka
Panda 7DoF robot arm, two adjustable Zed 2 stereo cameras, a wristmounted Zed Mini stereo camera, and an Oculus Quest 2 headset with
controllers for teleoperation. Everything is mounted on a portable,
height-adjustable desk for quick scene changes.
</span>
</div>
</div>
</div>
</div>
</div>
</section>
<section class="section">
<div class="container is-max-widescreen">
<div class="rows">
<div class="rows is-centered ">
<div class="row is-full-width">
<h2 class="title is-2"><span class="dvima">DROID Dataset Analysis</span></h2>
</div>
<br>
<div class="columns">
<div class="column has-text-centered">
<img src="figures/droid_viewpoint_distribution.png" class="interpolation-image" alt=""
style="display: block; width: 100%; margin-left: auto; margin-right: auto" />
<br>
<span style="font-size: 125%">
<span style="font-weight: bold">Third-person camera viewpoints in DROID (subsampled).</span>
DROID episodes cover a total of 1417 camera viewpoints along
with intrinsic and extrinsic stereo camera calibration. Brighter colors
indicate regions of higher viewpoint density.
</span>
</div>
<div class="column has-text-centered">
<img src="figures/droid_interaction_points.png" class="interpolation-image" alt=""
style="display: block; width: 100%; margin-left: auto; margin-right: auto" />
<span style="font-size: 125%">
<span style="font-weight: bold">Visualization of 3D interaction points relative to the robot
base.</span>
We visualize the 3D location at which the gripper first closes in
each trajectory, since closing the gripper often indicates meaningful
object interactions. DROID’s interactions cover a larger part of the
robot’s workspace, since the robot is moved freely between collection
sessions instead of being placed in front of repetitive tabletop scenes.
</span>
</div>
</div>
<!-- <img src="figures/droid_viewpoint_distribution.png" class="interpolation-image" alt=""
style="display: block; width: 60%; margin-left: auto; margin-right: auto" />
<br>
<span style="font-size: 125%">
<span style="font-weight: bold">Third-person camera viewpoints in DROID (subsampled).</span>
DROID episodes cover a total of 1417 camera viewpoints along
with intrinsic and extrinsic stereo camera calibration. Brighter colors
indicate regions of higher viewpoint density.
</span>
<br>
<br>
<img src="figures/droid_interaction_points.png" class="interpolation-image" alt=""
style="display: block; width: 60%; margin-left: auto; margin-right: auto" />
<br>
<span style="font-size: 125%">
<span style="font-weight: bold">Visualization of 3D interaction points relative to the robot
base.</span>
We visualize the 3D location at which the gripper first closes in
each trajectory, since closing the gripper often indicates meaningful
object interactions. DROID’s interactions cover a larger part of the
robot’s workspace, since the robot is moved freely between collection
sessions instead of being placed in front of repetitive tabletop scenes.
</span>
<br>
<br> -->
<img src="figures/scene_distribution.png" class="interpolation-image" alt=""
style="display: block; width: 50%; margin-left: auto; margin-right: auto" />
<br>
<span style="font-size: 125%">
<span style="font-weight: bold">Number of scenes per scene type.</span> DROID has an order of
magnitude more scenes than other large robot manipulation datasets,
spanning a much wider range of scene types.
</span>
<br>
<!-- <div class="container">
<div id="results-carousel" class="carousel results-carousel">
<div class="item">
<video poster="" id="" autoplay controls muted loop height="200%" playbackRate=2.0>
<source src="videos/droid_scene_videos/scene_type_bathroom_video_width_240_fps_10_duration_20.mp4" type="video/mp4">
</video>
</div>
<div class="item">
<video poster="" id="" autoplay controls muted loop height="100%" playbackRate=2.0>
<source src="videos/droid_scene_videos/scene_type_bedroom_video_width_240_fps_10_duration_20.mp4" type="video/mp4">
</video>
</div>
<div class="item">
<video poster="" id="" autoplay controls muted loop height="100%" playbackRate=2.0>
<source src="videos/droid_scene_videos/scene_type_closet_video_width_240_fps_10_duration_20.mp4" type="video/mp4">
</video>
</div>
<div class="item">
<video poster="" id="" autoplay controls muted loop height="100%" playbackRate=2.0>
<source src="videos/droid_scene_videos/scene_type_dining_room_video_width_240_fps_10_duration_20.mp4" type="video/mp4">
</video>
</div>
<div class="item">
<video poster="" id="" autoplay controls muted loop height="100%" playbackRate=2.0>
<source src="videos/droid_scene_videos/scene_type_industrial_office_video_width_240_fps_10_duration_20.mp4" type="video/mp4">
</video>
</div>
<div class="item">
<video poster="" id="" autoplay controls muted loop height="100%" playbackRate=2.0>
<source src="videos/droid_scene_videos/scene_type_kitchen_video_width_240_fps_10_duration_20.mp4" type="video/mp4">
</video>
</div>
<div class="item">
<video poster="" id="" autoplay controls muted loop height="100%" playbackRate=2.0>
<source src="videos/droid_scene_videos/scene_type_laboratory_video_width_240_fps_10_duration_20.mp4" type="video/mp4">
</video>
</div>
<div class="item">
<video poster="" id="" autoplay controls muted loop height="100%" playbackRate=2.0>
<source src="videos/droid_scene_videos/scene_type_laundry_video_width_240_fps_10_duration_20.mp4" type="video/mp4">
</video>
</div>
<div class="item">
<video poster="" id="" autoplay controls muted loop height="100%" playbackRate=2.0>
<source src="videos/droid_scene_videos/scene_type_living_room_video_width_240_fps_10_duration_20.mp4" type="video/mp4">
</video>
</div>
</div> -->
</div>
<br>
<img src="figures/droid_verb_objects_highres.png" class="interpolation-image" alt=""
style="display: block; width: 100%; margin-left: auto; margin-right: auto" />
<br>
<span style="font-size: 125%">
<span style="font-weight: bold">Distribution of verbs and objects in DROID</span>
<b>Top:</b> Distribution of skills in DROID.
DROID features a long tail of diverse verb classes that is only matched by Bridge V2, while the RH20T and RT-1
datasets have a more constrained set of skills.
<b>Bottom</b>: Distribution of interacted objects in DROID, grouped by category. The robot interacts with a wide range of everyday objects.
</span>
<!-- <br>
<br>
<img src="figures/object_distribution.png" class="interpolation-image" alt=""
style="display: block; width: 90%; margin-left: auto; margin-right: auto" />
<br>
<span style="font-size: 125%">
<span style="font-weight: bold">Distribution of interacted objects in DROID,</span> grouped by category. The robot interacts with a wide range of everyday objects.
</span>
<br>
<br>
<img src="figures/verb_distributions.png" class="interpolation-image" alt=""
style="display: block; width: 90%; margin-left: auto; margin-right: auto" />
<br>
<span style="font-size: 125%">
<span style="font-weight: bold">Distribution of skills in DROID.</span>
DROID features a long tail of diverse verb classes that is only matched by Bridge V2, while the RH20T and RT-1
datasets have a more constrained set of skills.
</span> -->
</div>
</div>
</div>
</section>
<section class="section">
<div class="container is-max-widescreen">
<div class="rows">
<div class="rows is-centered ">
<div class="row is-full-width">
<h2 class="title is-2"><span class="dvima">Experiments</span></h2>
<p style="font-size: 125%">
We investigate whether DROID
can be used to boost policy performance and robustness across
a wide spectrum of robot manipulation tasks and environments.
To this end, we train policies across 6 tasks in 4 different
locations including lab, office, and household settings, to reflect
the diversity of real world robotic research use cases.
All experiments use representative, state of the art robot policy
learning approaches. Across the board, we find that DROID
improves policy success rate while increasing robustness to
scene changes like distractors or novel object instances.
</p>
</div>
<br>
<br>
<div class="row is-full-width">
<h2 class="title is-3"><span class="dvima">Qualitative Comparison</span></h2>
<p style="font-size: 125%">
Qualitatively, we find that policies that leverage DROID
during training are notably smoother and precise than other
comparisons.
</p>
</div>
<br>
<div class="columns">
<div class="column has-text-centered">
<p style="font-size: 150%"><b>DROID (Ours)</b></p>
<video poster="" id="" autoplay controls muted loop height="100%" playbackRate=2.0 style="border-radius: 5px;">
<source src="videos/droid_eval_videos/cooking_droid_speed.mp4" type="video/mp4">
</video>
</div>
<div class="column has-text-centered">
<p style="font-size: 150%"><b>Open-X</b></p>
<video poster="" id="" autoplay controls muted loop height="100%" playbackRate=2.0 style="border-radius: 5px;">
<source src="videos/droid_eval_videos/cooking_oxe_speed.mp4" type="video/mp4">
</video>
</div>
<div class="column has-text-centered">
<p style="font-size: 150%"><b>No Co-Train</b></p>
<video poster="" id="" autoplay controls muted loop height="100%" playbackRate=2.0 style="border-radius: 5px;">
<source src="videos/droid_eval_videos/cooking_narrow_speed.mp4" type="video/mp4">
</video>
</div>
</div>
<div class="columns">
<div class="column has-text-centered">
<p style="font-size: 150%"><b>DROID (Ours)</b></p>
<video poster="" id="" autoplay controls muted loop height="100%" playbackRate=2.0 style="border-radius: 5px;">
<source src="videos/droid_eval_videos/clean_droid.mp4" type="video/mp4">
</video>
</div>
<div class="column has-text-centered">
<p style="font-size: 150%"><b>Open-X</b></p>
<video poster="" id="" autoplay controls muted loop height="100%" playbackRate=2.0 style="border-radius: 5px;">
<source src="videos/droid_eval_videos/clean_oxe.mp4" type="video/mp4">
</video>
</div>
<div class="column has-text-centered">
<p style="font-size: 150%"><b>No Co-Train</b></p>
<video poster="" id="" autoplay controls muted loop height="100%" playbackRate=2.0 style="border-radius: 5px;">
<source src="videos/droid_eval_videos/clean_narrow.mp4" type="video/mp4">
</video>
</div>
</div>
<div class="columns">
<div class="column has-text-centered">
<p style="font-size: 150%"><b>DROID (Ours)</b></p>
<video poster="" id="" autoplay controls muted loop height="100%" playbackRate=2.0 style="border-radius: 5px;">
<source src="videos/droid_eval_videos/apple_droid.mp4" type="video/mp4">
</video>
</div>
<div class="column has-text-centered">
<p style="font-size: 150%"><b>Open-X</b></p>
<video poster="" id="" autoplay controls muted loop height="100%" playbackRate=2.0 style="border-radius: 5px;">
<source src="videos/droid_eval_videos/apple_oxe.mp4" type="video/mp4">
</video>
</div>
<div class="column has-text-centered">
<p style="font-size: 150%"><b>No Co-Train</b></p>
<video poster="" id="" autoplay controls muted loop height="100%" playbackRate=2.0 style="border-radius: 5px;">
<source src="videos/droid_eval_videos/apple_narrow.mp4" type="video/mp4">
</video>
</div>
</div>
<div class="columns">
<div class="column has-text-centered">
<p style="font-size: 150%"><b>DROID (Ours)</b></p>
<video poster="" id="" autoplay controls muted loop height="100%" playbackRate=2.0 style="border-radius: 5px;">
<source src="videos/droid_eval_videos/chips_droid.mp4" type="video/mp4">
</video>
</div>
<div class="column has-text-centered">
<p style="font-size: 150%"><b>Open-X</b></p>
<video poster="" id="" autoplay controls muted loop height="100%" playbackRate=2.0 style="border-radius: 5px;">
<source src="videos/droid_eval_videos/chips_oxe.mp4" type="video/mp4">
</video>
</div>
<div class="column has-text-centered">
<p style="font-size: 150%"><b>No Co-Train</b></p>
<video poster="" id="" autoplay controls muted loop height="100%" playbackRate=2.0 style="border-radius: 5px;">
<source src="videos/droid_eval_videos/chips_narrow.mp4" type="video/mp4">
</video>
</div>
</div>
<br>
<div class="row is-full-width">
<h2 class="title is-3"><span class="dvima">Qualitative Comparison (OOD Evaluations)</span></h2>
<p style="font-size: 125%">
We also find policies co-trained with DRIOD to be more robust to distractors and novel object instances.
</p>
</div>
<br>
<div class="columns">
<div class="column has-text-centered">
<p style="font-size: 150%"><b>DROID (Ours)</b></p>
<video poster="" id="" autoplay controls muted loop height="100%" playbackRate=2.0 style="border-radius: 5px;">
<source src="videos/droid_eval_videos/clean_droid_ood.mp4" type="video/mp4">
</video>
</div>
<div class="column has-text-centered">
<p style="font-size: 150%"><b>Open-X</b></p>
<video poster="" id="" autoplay controls muted loop height="100%" playbackRate=2.0 style="border-radius: 5px;">
<source src="videos/droid_eval_videos/clean_oxe_ood.mp4" type="video/mp4">
<!-- <source src="videos/oxe_iid_0.mp4" type="video/mp4"> -->
<!-- <source src="videos/droid_eval_videos/clean_narrow_ood.mp4" type="video/mp4"> -->
</video>
</div>
<div class="column has-text-centered">
<p style="font-size: 150%"><b>No Co-Train</b></p>
<video poster="" id="" autoplay controls muted loop height="100%" playbackRate=2.0 style="border-radius: 5px;">
<source src="videos/droid_eval_videos/clean_narrow_ood.mp4" type="video/mp4">
</video>
</div>
</div>
<div class="columns">
<div class="column has-text-centered">
<p style="font-size: 150%"><b>DROID (Ours)</b></p>
<video poster="" id="" autoplay controls muted loop height="100%" playbackRate=2.0 style="border-radius: 5px;">
<source src="videos/droid_eval_videos/apple_ood_droid.mp4" type="video/mp4">
</video>
</div>
<div class="column has-text-centered">
<p style="font-size: 150%"><b>Open-X</b></p>
<video poster="" id="" autoplay controls muted loop height="100%" playbackRate=2.0 style="border-radius: 5px;">
<source src="videos/droid_eval_videos/apple_ood_oxe.mp4" type="video/mp4">
</video>
</div>
<div class="column has-text-centered">
<p style="font-size: 150%"><b>No Co-Train</b></p>
<video poster="" id="" autoplay controls muted loop height="100%" playbackRate=2.0 style="border-radius: 5px;">
<source src="videos/droid_eval_videos/apple_ood_narrow.mp4" type="video/mp4">
</video>
</div>
</div>
<div class="columns">
<div class="column has-text-centered">
<p style="font-size: 150%"><b>DROID (Ours)</b></p>
<video poster="" id="" autoplay controls muted loop height="100%" playbackRate=2.0 style="border-radius: 5px;">
<source src="videos/droid_eval_videos/chips_ood_droid.mp4" type="video/mp4">
</video>
</div>
<div class="column has-text-centered">
<p style="font-size: 150%"><b>Open-X</b></p>
<video poster="" id="" autoplay controls muted loop height="100%" playbackRate=2.0 style="border-radius: 5px;">
<source src="videos/droid_eval_videos/chips_ood_oxe.mp4" type="video/mp4">
</video>
</div>
<div class="column has-text-centered">
<p style="font-size: 150%"><b>No Co-Train</b></p>
<video poster="" id="" autoplay controls muted loop height="100%" playbackRate=2.0 style="border-radius: 5px;">
<source src="videos/droid_eval_videos/chips_ood_narrow.mp4" type="video/mp4">
</video>
</div>
</div>
<br>
<div class="row is-full-width">
<h2 class="title is-3"><span class="dvima">Quantitative Comparison</span></h2>
<p style="font-size: 125%">
</p>
</div>
<br>
<img src="figures/droid_eval_setups.png" class="interpolation-image" alt=""
style="display: block; width: 100%; margin-left: auto; margin-right: auto" />
<br>
<span style="font-size: 125%">
<span style="font-weight: bold"> Robot setups for policy evaluation.</span>
We cover a wide range of tasks and scenes, from lab evaluations to offices and real households, to
reflect the diversity of use cases in real robot research.
</span>
<br>
<br>
<img src="figures/cotrain.png" class="interpolation-image" alt=""
style="display: block; width: 100%; margin-left: auto; margin-right: auto" />
<br>
<span style="font-size: 125%">
<span style="font-weight: bold"> Does DROID Improve Policy Performance and Robustness?</span>
We find that across all our evaluation tasks, co-training with DROID
significantly improves both in distribution and OOD performance over both no co-training and co-training with the Open-X dataset. We
compare success rate averaged across all tasks with standard error, and find DROID outperforms the next best method by <b>22%</b> absolute
success rate in-distribution and by <b>17%</b> out of distribution.
</span>
<br>
</div>
</div>
</div>
</section>
<section class="section" id="BibTeX">
<div class="container is-max-widescreen content">
<h2 class="title">BibTeX</h2>
<pre><code>@article{khazatsky2024droid,
title = {DROID: A Large-Scale In-The-Wild Robot Manipulation Dataset},
author = {Alexander Khazatsky and Karl Pertsch and Suraj Nair and Ashwin Balakrishna and Sudeep Dasari and Siddharth Karamcheti and Soroush Nasiriany and Mohan Kumar Srirama and Lawrence Yunliang Chen and Kirsty Ellis and Peter David Fagan and Joey Hejna and Masha Itkina and Marion Lepert and Yecheng Jason Ma and Patrick Tree Miller and Jimmy Wu and Suneel Belkhale and Shivin Dass and Huy Ha and Arhan Jain and Abraham Lee and Youngwoon Lee and Marius Memmel and Sungjae Park and Ilija Radosavovic and Kaiyuan Wang and Albert Zhan and Kevin Black and Cheng Chi and Kyle Beltran Hatch and Shan Lin and Jingpei Lu and Jean Mercat and Abdul Rehman and Pannag R Sanketi and Archit Sharma and Cody Simpson and Quan Vuong and Homer Rich Walke and Blake Wulfe and Ted Xiao and Jonathan Heewon Yang and Arefeh Yavary and Tony Z. Zhao and Christopher Agia and Rohan Baijal and Mateo Guaman Castro and Daphne Chen and Qiuyu Chen and Trinity Chung and Jaimyn Drake and Ethan Paul Foster and Jensen Gao and David Antonio Herrera and Minho Heo and Kyle Hsu and Jiaheng Hu and Donovon Jackson and Charlotte Le and Yunshuang Li and Kevin Lin and Roy Lin and Zehan Ma and Abhiram Maddukuri and Suvir Mirchandani and Daniel Morton and Tony Nguyen and Abigail O'Neill and Rosario Scalise and Derick Seale and Victor Son and Stephen Tian and Emi Tran and Andrew E. Wang and Yilin Wu and Annie Xie and Jingyun Yang and Patrick Yin and Yunchu Zhang and Osbert Bastani and Glen Berseth and Jeannette Bohg and Ken Goldberg and Abhinav Gupta and Abhishek Gupta and Dinesh Jayaraman and Joseph J Lim and Jitendra Malik and Roberto Martín-Martín and Subramanian Ramamoorthy and Dorsa Sadigh and Shuran Song and Jiajun Wu and Michael C. Yip and Yuke Zhu and Thomas Kollar and Sergey Levine and Chelsea Finn},
year = {2024},
}
</code></pre>
</div>
</section>
<footer class="footer">
<div class="container">
<div class="columns is-centered">
<div class="column">
<div class="content has-text-centered">
<p>
Website template borrowed from <a
href="https://github.com/nerfies/nerfies.github.io">NeRFies</a>
and <a href="https://eureka-research.github.io/">Eureka</a>
</p>
</div>
</div>
</div>
</div>
</footer>
</body>
<script>
document.addEventListener('DOMContentLoaded', (event) => {
document.querySelectorAll('.team-name').forEach(item => {
item.addEventListener('click', (event) => {
let authorBlock = item.nextElementSibling;
if (authorBlock.style.display === 'block') {
authorBlock.style.display = 'none';
} else {
authorBlock.style.display = 'block';
}
});
});
});
timeoutIds = [];
function populateDemo(imgs, num) {
// Get the expanded image
var expandImg = document.getElementById("expandedImg-" + num);
// Get the image text
var imgText = document.getElementById("imgtext-" + num);
var answer = document.getElementById("answer-" + num);
// Use the same src in the expanded image as the image being clicked on from the grid
expandImg.src = imgs.src.replace(".png", ".mp4");
var video = document.getElementById('demo-video-' + num);
// or video = $('.video-selector')[0];
video.pause()
video.load();
video.play();
video.removeAttribute('controls');
console.log(expandImg.src);
// Use the value of the alt attribute of the clickable image as text inside the expanded image
var qa = imgs.alt.split("[sep]");
imgText.innerHTML = qa[0];
answer.innerHTML = "";
// Show the container element (hidden with CSS)
expandImg.parentElement.style.display = "block";
for (timeoutId of timeoutIds) {
clearTimeout(timeoutId);
}
// NOTE (wliang): Modified from original to read from file instead
fetch(qa[1])
.then(response => response.text())
.then(contents => {
// Call the processData function and pass the contents as an argument
typeWriter(contents, 0, qa[0], num);
})
.catch(error => console.error('Error reading file:', error));
}
function typeWriter(txt, i, q, num) {
var imgText = document.getElementById("imgtext-" + num);
var answer = document.getElementById("answer-" + num);
if (imgText.innerHTML == q) {
for (let k = 0; k < 5; k++) {
if (i < txt.length) {
if (txt.charAt(i) == "\\") {
answer.innerHTML += "\n";
i += 1;
} else {
answer.innerHTML += txt.charAt(i);
}
i++;
}
}
hljs.highlightAll();
timeoutIds.push(setTimeout(typeWriter, 1, txt, i, q, num));
}
}
</script>
</html>