Skip to content

Commit

Permalink
Add LLAMA to the config
Browse files Browse the repository at this point in the history
  • Loading branch information
pierre.delaunay committed Nov 7, 2023
1 parent c1850f9 commit 2848b5c
Show file tree
Hide file tree
Showing 4 changed files with 44 additions and 3 deletions.
23 changes: 21 additions & 2 deletions benchmarks/llama/benchfile.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,9 @@
import uuid

from milabench.executors import TorchRunExecutor
from milabench.executors import CmdExecutor
from milabench.pack import Package

x

class LLAMA(Package):
base_requirements = "requirements.in"
main_script = "main.py"
Expand All @@ -14,5 +14,24 @@ def make_env(self):
"OMP_NUM_THREADS": str(self.config.get("cpus_per_gpu", 8))
}

def build_prepare_plan(self):
return CmdExecutor(
self,
str(self.dirs.code / "main.py"),
*self.argv,
"--prepare",
"--cache",
str(self.dirs.cache)
)

def build_run_plan(self):
return CmdExecutor(
self,
str(self.dirs.code / "main.py"),
*self.argv,
"--cache",
str(self.dirs.cache),
use_stdout=True
)

__pack__ = LLAMA
18 changes: 18 additions & 0 deletions config/base.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -39,6 +39,24 @@ _flops:
--number: 10
--repeat: 90

llama:
inherits: _defaults
definition: ../benchmarks/llama
group: llm
install_group: torch

plan:
method: per_gpu

tags:
- nlp
- llm

argv:
--number: 10
--repeat: 90


_hf:
inherits: _defaults
definition: ../benchmarks/huggingface
Expand Down
4 changes: 4 additions & 0 deletions config/standard.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,10 @@ include:
# Enabled tests #
#################

llama:
enabled: true
weight: 1.0

resnet50:
enabled: true
weight: 1.0
Expand Down
2 changes: 1 addition & 1 deletion milabench/schedule.py
Original file line number Diff line number Diff line change
Expand Up @@ -135,7 +135,7 @@ def launch_milabench(args, sbatch_args=None, dry: bool = False, sync: bool = Fal
if sbatch_args is None:
sbatch_args = [
"--ntasks=1",
"--gpus-per-task=4g.40gb:1",
"--gpus-per-task=rtx8000:1",
"--cpus-per-task=4",
"--time=01:30:00",
"--ntasks-per-node=1",
Expand Down

0 comments on commit 2848b5c

Please sign in to comment.