fix: make sure randomness is generated on cpu for consistency

pull/148/head
Bryce 1 year ago committed by Bryce Drennan
parent 239b235140
commit da0f1e1ee6

@ -125,7 +125,7 @@ vendorize_kdiffusion:
touch imaginairy/vendored/k_diffusion/config.py
# without this most of the k-diffusion samplers didn't work
sed -i '' -e 's#return (x - denoised) / utils.append_dims(sigma, x.ndim)#return (x - denoised) / sigma#g' imaginairy/vendored/k_diffusion/sampling.py
sed -i '' -e 's#x = x + torch.randn_like(x) \* sigma_up#x = x + torch.randn_like(x, device="cpu").to(x.device) \* sigma_up#g' imaginairy/vendored/k_diffusion/sampling.py
sed -i '' -e 's#torch.randn_like(x)#torch.randn_like(x, device="cpu").to(x.device)#g' imaginairy/vendored/k_diffusion/sampling.py
# https://github.com/AUTOMATIC1111/stable-diffusion-webui/issues/4558#issuecomment-1310387114
sed -i '' -e 's#t_fn = lambda sigma: sigma.log().neg()#t_fn = lambda sigma: sigma.to("cpu").log().neg().to(x.device)#g' imaginairy/vendored/k_diffusion/sampling.py
sed -i '' -e 's#return (x - denoised) / sigma#return ((x - denoised) / sigma.to("cpu")).to(x.device)#g' imaginairy/vendored/k_diffusion/sampling.py

@ -64,7 +64,7 @@ def get_ancestral_step(sigma_from, sigma_to, eta=1.0):
def default_noise_sampler(x):
return lambda sigma, sigma_next: torch.randn_like(x)
return lambda sigma, sigma_next: torch.randn_like(x, device="cpu").to(x.device)
class BatchedBrownianTree:
@ -147,7 +147,7 @@ def sample_euler(
if s_tmin <= sigmas[i] <= s_tmax
else 0.0
)
eps = torch.randn_like(x) * s_noise
eps = torch.randn_like(x, device="cpu").to(x.device) * s_noise
sigma_hat = sigmas[i] * (gamma + 1)
if gamma > 0:
x = x + eps * (sigma_hat**2 - sigmas[i] ** 2) ** 0.5
@ -229,7 +229,7 @@ def sample_heun(
if s_tmin <= sigmas[i] <= s_tmax
else 0.0
)
eps = torch.randn_like(x) * s_noise
eps = torch.randn_like(x, device="cpu").to(x.device) * s_noise
sigma_hat = sigmas[i] * (gamma + 1)
if gamma > 0:
x = x + eps * (sigma_hat**2 - sigmas[i] ** 2) ** 0.5
@ -281,7 +281,7 @@ def sample_dpm_2(
if s_tmin <= sigmas[i] <= s_tmax
else 0.0
)
eps = torch.randn_like(x) * s_noise
eps = torch.randn_like(x, device="cpu").to(x.device) * s_noise
sigma_hat = sigmas[i] * (gamma + 1)
if gamma > 0:
x = x + eps * (sigma_hat**2 - sigmas[i] ** 2) ** 0.5

Binary file not shown.

Before

Width:  |  Height:  |  Size: 576 KiB

After

Width:  |  Height:  |  Size: 560 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 252 KiB

After

Width:  |  Height:  |  Size: 245 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 372 KiB

After

Width:  |  Height:  |  Size: 432 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 266 KiB

After

Width:  |  Height:  |  Size: 253 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 271 KiB

After

Width:  |  Height:  |  Size: 268 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 250 KiB

After

Width:  |  Height:  |  Size: 240 KiB

@ -125,7 +125,7 @@ def test_img_to_img_from_url_cats(
img = pillow_fit_image_within(img)
img.save(f"{filename_base_for_orig_outputs}__orig.jpg")
img_path = f"{filename_base_for_outputs}.png"
assert_image_similar_to_expectation(result.img, img_path=img_path, threshold=14000)
assert_image_similar_to_expectation(result.img, img_path=img_path, threshold=17000)
@pytest.mark.parametrize("init_strength", [0, 0.05, 0.2, 1])
@ -155,6 +155,7 @@ def test_img_to_img_fruit_2_gold(
threshold_lookup = {
"k_dpm_2_a": 26000,
"k_euler_a": 18000,
"k_dpm_adaptive": 13000,
}
threshold = threshold_lookup.get(sampler_type, 11000)

Loading…
Cancel
Save