pull/579/head
mike dupont 1 month ago
parent 661545f0ff
commit be69f3117f

@ -33,6 +33,8 @@ ADD tests petals/tests
ADD LICENSE README.md pyproject.toml setup.cfg petals/
ADD src petals/src
RUN pip install --no-cache-dir -e petals
RUN pip freeze > pip.freeze.new
#RUN pip install --no-cache-dir --upgrade transformers==4.34.0
WORKDIR /home/petals/

@ -41,6 +41,26 @@ services:
env_file: ./.env
#
grok_local_gpu:
profiles: ["local","gpu"]
build: .
depends_on:
- backbone
# xai-org/grok-1
# hpcai-tech/grok-1
# keyfan/grok-1-hf
command: python -m petals.cli.run_server --port 31331 --num_blocks=1 keyfan/grok-1-hf --initial_peers $INITIAL_PEERS --device=$DEVICE
ports:
- "31331:31331"
restart: always
env_file: ./.env
deploy:
resources:
reservations:
devices:
- driver: nvidia
count: 1
capabilities: [gpu]
tinymixtral_local_gpu:
profiles: ["local","gpu"]
build: .

@ -40,6 +40,9 @@ class _AutoDistributedBase:
):
kwargs["use_auth_token"] = True
kwargs["trust_remote_code"] = True
#trust_remote_code=True
config = AutoConfig.from_pretrained(model_name_or_path, *args, **kwargs)
if config.model_type not in _CLASS_MAPPING:
raise ValueError(f"Petals does not support model type {config.model_type}")

Loading…
Cancel
Save