Fix dummy cache allocation (#574)

* Fix dummy cache allocation

* Try mps device selecting

* Rechain reloc
pull/560/merge
Artem Chumachenko 2 weeks ago committed by GitHub
parent d6f4f80f3f
commit 30f522d1a0
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194

@ -206,7 +206,7 @@ def measure_compute_rps(
block = block.to(dtype)
block = convert_block(block, 0, config, tensor_parallel_devices, device, quant_type=quant_type, freeze=True)
cache = (DUMMY_KEY_PAST.to(dtype), DUMMY_KEY_PAST.to(dtype))
cache = (DUMMY_KEY_PAST.to(dtype=dtype, device=device), DUMMY_KEY_PAST.to(dtype=dtype, device=device))
elapsed = 0
dummy_input = torch.randn(1, n_tokens, config.hidden_size, device=device, dtype=dtype)

Loading…
Cancel
Save