Skip to content

Commit

Permalink
[FIX] mock infer in diffusers pipeline inferencer ut. (open-mmlab#2026)
Browse files Browse the repository at this point in the history
mock infer
  • Loading branch information
liuwenran committed Sep 11, 2023
1 parent c4fe495 commit bde84a0
Showing 1 changed file with 4 additions and 9 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -26,15 +26,10 @@ def test_diffusers_pipeline_inferencer():

inferencer_instance = DiffusersPipelineInferencer(cfg, None)

def mock_encode_prompt(prompt, do_classifier_free_guidance,
num_images_per_prompt, *args, **kwargs):
batch_size = len(prompt) if isinstance(prompt, list) else 1
batch_size *= num_images_per_prompt
if do_classifier_free_guidance:
batch_size *= 2
return torch.randn(batch_size, 5, 16) # 2 for cfg

inferencer_instance.model._encode_prompt = mock_encode_prompt
def mock_infer(*args, **kwargs):
return dict(samples=torch.randn(1, 3, 64, 64))

inferencer_instance.model.infer = mock_infer

text_prompts = 'Japanese anime style, girl'
negative_prompt = 'bad face, bad hands'
Expand Down

0 comments on commit bde84a0

Please sign in to comment.