Skip to content

Commit

Permalink
Tweaks
Browse files Browse the repository at this point in the history
  • Loading branch information
pierre.delaunay committed Jun 6, 2024
1 parent bc08a16 commit 2c1aafb
Show file tree
Hide file tree
Showing 8 changed files with 65 additions and 10 deletions.
1 change: 0 additions & 1 deletion benchmarks/accelerate_opt/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -127,7 +127,6 @@ class CustomInitProcessGroupKwargs(InitProcessGroupKwargs):
rank=int(os.environ["RANK"]),
world_size=int(os.environ["WORLD_SIZE"]),
)
print(init_process_group_kwargs.backend)

# Accelerator SUCK, it is impossible to make it use hccl
# We can bypass Accelerator logic by initializing the group ourselves
Expand Down
13 changes: 7 additions & 6 deletions benchmarks/llama/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -59,10 +59,8 @@ def huggingface_main(args, model, config):
import transformers
from transformers import LlamaForCausalLM, LlamaTokenizerFast
from transformers.models.llama.configuration_llama import LlamaConfig
from voir.wrapper import DataloaderWrapper, Wrapper
from datasets import load_dataset
import optimum.habana


# Dataset here
println("Dataset")
dataset = load_dataset("wikitext", "wikitext-103-v1")
Expand Down Expand Up @@ -173,8 +171,11 @@ def main():


if __name__ == "__main__":
from voir.wrapper import StopProgram
import traceback
try:
main()
except Exception as err:
# Habana likes to eat exceptions
print(err)
except StopProgram:
print("Early stopped")
except Exception:
traceback.print_exc()
14 changes: 14 additions & 0 deletions benchmarks/stargan/prepare.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@



def download_celebA():
from datasets import load_dataset
dataset = load_dataset(
"student/celebA",
revision="2d31e6555722815c74ea7c845b07c1063dd705e9",
cache_dir="/tmp/milabench/cuda/results/data"
)


if __name__ == "__main__":
download_celebA()
1 change: 1 addition & 0 deletions benchmarks/stargan/stargan/data_loader.py
Original file line number Diff line number Diff line change
Expand Up @@ -91,6 +91,7 @@ def get_loader(

if dataset == "CelebA":
dataset = CelebA(image_dir, attr_path, selected_attrs, transform, mode)

elif dataset == "RaFD":
dataset = ImageFolder(image_dir, transform)

Expand Down
9 changes: 7 additions & 2 deletions benchmarks/stargan/stargan/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -197,8 +197,13 @@ def main(config):
parser.add_argument("--mode", type=str, default="train", choices=["train", "test"])
parser.add_argument("--use_tensorboard", type=str2bool, default=False)

mbconfig = json.loads(os.environ["MILABENCH_CONFIG"])
datadir = mbconfig["dirs"]["extra"]
# try:
# mbconfig = json.loads(os.environ["MILABENCH_CONFIG"])
# datadir = mbconfig["dirs"]["extra"]
# except:
# pass

datadir = "/tmp/milabench/cuda/results/data"

# Directories.
parser.add_argument("--celeba_image_dir", type=str, default="data/celeba/images")
Expand Down
32 changes: 32 additions & 0 deletions benchmate/benchmate/dataset.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,32 @@




def no_transform(args):
return args


def transform_images(transform_x, transform_y=no_transform):
def _(args):
print(args)
return transform_x(args[0]), transform_y(args[1])
return _


def transform_celebA(transform_x):
def _(args):
print(args)
return transform_x(args["image"])
return _


class TransformedDataset:
def __init__(self, dataset, transforms=no_transform):
self.dataset = dataset
self.transforms = transforms

def __len__(self):
return len(self.dataset)

def __getitem__(self, item):
return self.transforms(self.dataset[item])
3 changes: 2 additions & 1 deletion config/base.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -496,7 +496,8 @@ stargan:
--image_size: 512
--c_dim: 5
--batch_size: 16
--dataset: "synth"
--dataset: "CelebA"
--celeba_image_dir: "{milabench_data}"

super-slomo:
inherits: _defaults
Expand Down
2 changes: 2 additions & 0 deletions scripts/article/run_cuda.sh
Original file line number Diff line number Diff line change
Expand Up @@ -53,6 +53,8 @@ install_prepare() {
milabench prepare "$@"
}

module load cuda/12.3.2

if [ ! -d "$MILABENCH_WORDIR" ]; then
install_prepare
else
Expand Down

0 comments on commit 2c1aafb

Please sign in to comment.