Spaces:
Running
on
A10G
Running
on
A10G
rynmurdock
commited on
Update app.py
Browse files
app.py
CHANGED
@@ -27,6 +27,9 @@ start_time = time.time()
|
|
27 |
# TODO add to state instead of shared across all
|
28 |
glob_idx = 0
|
29 |
|
|
|
|
|
|
|
30 |
def next_image(embs, ys, calibrate_prompts):
|
31 |
global glob_idx
|
32 |
glob_idx = glob_idx + 1
|
@@ -44,10 +47,17 @@ def next_image(embs, ys, calibrate_prompts):
|
|
44 |
prompt = calibrate_prompts.pop(0)
|
45 |
print(prompt)
|
46 |
|
47 |
-
|
48 |
-
|
49 |
-
input={"prompt": prompt,}
|
50 |
)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
51 |
response = requests.get(output['file1'])
|
52 |
image = Image.open(BytesIO(response.content))
|
53 |
|
@@ -86,11 +96,17 @@ def next_image(embs, ys, calibrate_prompts):
|
|
86 |
|
87 |
im_emb_st = str(im_emb[0].cpu().detach().tolist())[1:-1]
|
88 |
|
89 |
-
|
90 |
-
|
91 |
-
"rynmurdock/zahir:42c58addd49ab57f1e309f0b9a0f271f483bbef0470758757c623648fe989e42",
|
92 |
-
input={"prompt": prompt, 'im_emb': im_emb_st}
|
93 |
)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
94 |
response = requests.get(output['file1'])
|
95 |
image = Image.open(BytesIO(response.content))
|
96 |
|
|
|
27 |
# TODO add to state instead of shared across all
|
28 |
glob_idx = 0
|
29 |
|
30 |
+
|
31 |
+
deployment = replicate.deployments.get("rynmurdock/zahir-deployment")
|
32 |
+
|
33 |
def next_image(embs, ys, calibrate_prompts):
|
34 |
global glob_idx
|
35 |
glob_idx = glob_idx + 1
|
|
|
47 |
prompt = calibrate_prompts.pop(0)
|
48 |
print(prompt)
|
49 |
|
50 |
+
prediction = deployment.predictions.create(
|
51 |
+
input={"prompt": prompt,}
|
|
|
52 |
)
|
53 |
+
prediction.wait()
|
54 |
+
output = prediction.output
|
55 |
+
|
56 |
+
# output = replicate.run(
|
57 |
+
# "rynmurdock/zahir:42c58addd49ab57f1e309f0b9a0f271f483bbef0470758757c623648fe989e42",
|
58 |
+
# input={"prompt": prompt,}
|
59 |
+
# )
|
60 |
+
|
61 |
response = requests.get(output['file1'])
|
62 |
image = Image.open(BytesIO(response.content))
|
63 |
|
|
|
96 |
|
97 |
im_emb_st = str(im_emb[0].cpu().detach().tolist())[1:-1]
|
98 |
|
99 |
+
prediction = deployment.predictions.create(
|
100 |
+
input={"prompt": prompt, 'im_emb': im_emb_st}
|
|
|
|
|
101 |
)
|
102 |
+
prediction.wait()
|
103 |
+
output = prediction.output
|
104 |
+
|
105 |
+
# output = replicate.run(
|
106 |
+
# "rynmurdock/zahir:42c58addd49ab57f1e309f0b9a0f271f483bbef0470758757c623648fe989e42",
|
107 |
+
# input={"prompt": prompt, 'im_emb': im_emb_st}
|
108 |
+
# )
|
109 |
+
|
110 |
response = requests.get(output['file1'])
|
111 |
image = Image.open(BytesIO(response.content))
|
112 |
|