Update bitsandbytes, hivemind, transformers (#290)

- new bitsandbytes supports newer *and* older GPUs
- new hivemind supports a better bfloat16 codec

Co-authored-by: Alexander Borzunov <borzunov.alexander@gmail.com>
pull/298/head
justheuristic 1 year ago committed by GitHub
parent e0cef73757
commit 987f4d2b2f
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

@ -32,12 +32,12 @@ packages = find:
python_requires = >=3.7
install_requires =
torch>=1.12
bitsandbytes==0.34.0
bitsandbytes==0.37.1
accelerate==0.15.0
huggingface-hub==0.11.1
transformers==4.25.1
transformers>=4.25.1,<5.0.0
speedtest-cli==2.1.3
hivemind==1.1.5
hivemind==1.1.6
tensor_parallel==1.0.23
humanfriendly
async-timeout>=4.0.2

@ -8,10 +8,13 @@ from typing import Optional, Tuple
import torch.nn.quantized.dynamic.modules.linear
import transformers
from packaging import version
from transformers.models.bloom.modeling_bloom import BloomBlock, _expand_mask, _make_causal_mask, build_alibi_tensor
if not os.getenv("PETALS_IGNORE_DEPENDENCY_VERSION"):
assert transformers.__version__.startswith("4.25."), "Please install transformers 4.25.1"
assert (
version.parse("4.26.0") < version.parse(transformers.__version__) < version.parse("5.0.0")
), "Please install a proper transformers version: pip install transformers>=4.26.0,<5.0.0"
class WrappedBloomBlock(BloomBlock):

Loading…
Cancel
Save