Skip to content

Commit d63db6f

Browse files
committed
remove image
1 parent f2c025c commit d63db6f

File tree

1 file changed

+0
-9
lines changed

1 file changed

+0
-9
lines changed

models/turbine_models/custom_models/sd_inference/clip_runner.py

Lines changed: 0 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,6 @@
33
from transformers import CLIPTokenizer
44
from iree import runtime as ireert
55
import torch
6-
from PIL import Image
76

87
parser = argparse.ArgumentParser()
98

@@ -70,12 +69,9 @@ def run_clip(
7069
from transformers import CLIPProcessor
7170
import requests
7271

73-
url = "http://images.cocodataset.org/val2017/000000039769.jpg"
74-
image = Image.open(requests.get(url, stream=True).raw)
7572
tokenizer = CLIPProcessor.from_pretrained("openai/clip-vit-large-patch14")
7673
text_input = tokenizer(
7774
text=prompt,
78-
images=image,
7975
truncation=True,
8076
padding=True,
8177
return_tensors="pt",
@@ -122,10 +118,6 @@ def run_torch_clip(hf_model_name, hf_auth_token, prompt):
122118
else:
123119
if hf_model_name == "openai/clip-vit-large-patch14":
124120
from transformers import CLIPProcessor
125-
import requests
126-
127-
url = "http://images.cocodataset.org/val2017/000000039769.jpg"
128-
image = Image.open(requests.get(url, stream=True).raw)
129121

130122
tokenizer = CLIPProcessor.from_pretrained("openai/clip-vit-large-patch14")
131123
hf_subfolder = "" # CLIPProcessor does not have a subfolder
@@ -138,7 +130,6 @@ def run_torch_clip(hf_model_name, hf_auth_token, prompt):
138130
)
139131
text_input = tokenizer(
140132
text=prompt,
141-
images=image,
142133
truncation=True,
143134
padding=True,
144135
return_tensors="pt",

0 commit comments

Comments
 (0)