diff --git a/Dockerfile-13b-chat b/Dockerfile-13b-chat index bea11affa75e36f4a46e26f5a494564e6f94579a..82ca45e47e7bcf086dd23375c00bce60166d1b42 100644 --- a/Dockerfile-13b-chat +++ b/Dockerfile-13b-chat @@ -27,7 +27,7 @@ RUN git clone https://gitlab.cs.pub.ro/netsys/llama /llama ADD llama-2-13b-chat/ /llama/llama-2-13b-chat/ # avoid an annoying PyTorch (torch.distributed) warning -ARG OMP_NUM_THREADS=1 +ENV OMP_NUM_THREADS=1 # run llama example program CMD ["torchrun", \ diff --git a/Dockerfile-70b-chat b/Dockerfile-70b-chat index 9d6f6a2c8e34366418fcf5ccd1867c574c78b52c..d485571bd6fe0ea19330137659a5cb2ee91b9e0e 100644 --- a/Dockerfile-70b-chat +++ b/Dockerfile-70b-chat @@ -27,7 +27,7 @@ RUN git clone https://gitlab.cs.pub.ro/netsys/llama /llama ADD llama-2-70b-chat/ /llama/llama-2-70b-chat/ # avoid an annoying PyTorch (torch.distributed) warning -ARG OMP_NUM_THREADS=1 +ENV OMP_NUM_THREADS=1 # run llama example program CMD ["torchrun", \ diff --git a/Dockerfile-7b-chat b/Dockerfile-7b-chat index 92fe7dba4f6a43864339529ab498c7eca48d9a6c..1885ad2d047b185098836e995188f2a2b9ea1dc2 100644 --- a/Dockerfile-7b-chat +++ b/Dockerfile-7b-chat @@ -27,7 +27,7 @@ RUN git clone https://gitlab.cs.pub.ro/netsys/llama /llama ADD llama-2-7b-chat/ /llama/llama-2-7b-chat/ # avoid an annoying PyTorch (torch.distributed) warning -ARG OMP_NUM_THREADS=1 +ENV OMP_NUM_THREADS=1 # run llama example program CMD ["torchrun", \