mirror of
https://github.com/bigscience-workshop/petals
synced 2024-11-16 06:12:50 +00:00
add testing guide
This commit is contained in:
parent
b843328b65
commit
2bf83b42e5
18
README.md
18
README.md
@ -79,4 +79,22 @@ MY_WRITE_TOKEN=TODO_WRITE_TOKEN_FROM_https://huggingface.co/settings/token
|
||||
python -m cli.convert_model --model bigscience/bloom-6b3 \
|
||||
--output_path ./converted_model --output_repo bigscience/test-bloomd-6b3 \
|
||||
--use_auth_token $MY_WRITE_TOKEN # ^-- todo replace output repo with something you have access to
|
||||
```
|
||||
|
||||
|
||||
### Test local vs remote model
|
||||
|
||||
To test distributed inference, run one or more servers, then open a new shell and run pytest with environment variables:
|
||||
```bash
|
||||
# shell A:
|
||||
python -m cli.run_server --prefix bloom6b3 --converted_model_name_or_path bigscience/test-bloomd-6b3 \
|
||||
--block_indices 3:5 --torch_dtype float32 --identity_path ./server1.id --host_maddrs /ip4/127.0.0.1/tcp/31337
|
||||
|
||||
# shell B:
|
||||
export PYTHONPATH=. INITIAL_PEERS="/ip4/TODO_COPY_INITIAL_PEERS_FROM_SERVER_OUTPUT"
|
||||
BLOCK_UID=bloom6b3.3 pytest tests/test_block_exact_match.py
|
||||
BLOCK_UID=bloom6b3.4 pytest tests/test_block_exact_match.py
|
||||
|
||||
# the test below will fail because server only has layers [3:5)
|
||||
# BLOCK_UID=bloom6b3.7 pytest tests/test_block_exact_match.py
|
||||
```
|
42
tests/test_block_exact_match.py
Normal file
42
tests/test_block_exact_match.py
Normal file
@ -0,0 +1,42 @@
|
||||
import os
|
||||
|
||||
import hivemind
|
||||
import torch
|
||||
|
||||
from src.bloom.from_pretrained import load_pretrained_block
|
||||
from src.client.remote_block import RemoteTransformerBlock, get_remote_module
|
||||
|
||||
INITIAL_PEERS = os.environ.get("INITIAL_PEERS")
|
||||
if not INITIAL_PEERS:
|
||||
raise RuntimeError("Must specify INITIAL_PEERS environment variable with one or more peer ids")
|
||||
INITIAL_PEERS = INITIAL_PEERS.split()
|
||||
|
||||
|
||||
BLOCK_UID = os.environ.get("BLOCK_UID")
|
||||
if not BLOCK_UID:
|
||||
raise RuntimeError("Must specify BLOCK_UID as an index of a transformer block to be tested")
|
||||
|
||||
REF_NAME = os.environ.get("REF_NAME", "bigscience/test-bloomd-6b3")
|
||||
REF_INDEX = int(os.environ.get("REF_INDEX", BLOCK_UID[-1].split(".")[-1]))
|
||||
|
||||
|
||||
def test_remote_block_exact_match(atol_forward=1e-5, atol_inference=1e-3):
|
||||
dht = hivemind.DHT(initial_peers=INITIAL_PEERS, client_mode=True, start=True)
|
||||
(remote_block,) = get_remote_module(dht, [BLOCK_UID])
|
||||
assert remote_block is not None, f"Could not find {BLOCK_UID} in DHT"
|
||||
assert isinstance(remote_block, RemoteTransformerBlock)
|
||||
|
||||
inputs = torch.randn(1, 8, 4096)
|
||||
(outputs_forward,) = remote_block(inputs)
|
||||
|
||||
outputs_inference = []
|
||||
with remote_block.begin_inference_session() as sess:
|
||||
for i in range(inputs.shape[1]):
|
||||
outputs_inference.append(sess.step(inputs[:, i : i + 1, :]))
|
||||
outputs_inference = torch.cat(outputs_inference, dim=1)
|
||||
|
||||
ref_block = load_pretrained_block(REF_NAME, REF_INDEX, torch_dtype=torch.float32)
|
||||
(outputs_local,) = ref_block(inputs)
|
||||
|
||||
assert torch.allclose(outputs_local, outputs_forward, rtol=0, atol=atol_forward)
|
||||
assert torch.allclose(outputs_local, outputs_inference, rtol=0, atol=atol_inference)
|
Loading…
Reference in New Issue
Block a user