charliebaby2023 commited on
Commit
12fa10e
·
verified ·
1 Parent(s): a418426

Update handle_models.py

Browse files
Files changed (1) hide show
  1. handle_models.py +10 -6
handle_models.py CHANGED
@@ -1,13 +1,13 @@
1
  import gradio as gr
2
  from config import howManyModelsToUse,num_models,max_images,inference_timeout,MAX_SEED,thePrompt,preSetPrompt,negPreSetPrompt
3
- #from all_models import models
4
  import asyncio
5
  from externalmod import gr_Interface_load, save_image, randomize_seed
6
  import os
7
  from threading import RLock
8
  lock = RLock()
9
  HF_TOKEN = os.getenv("HF_TOKEN")
10
- #default_models = static_models[:num_models]
11
  def get_current_time():
12
  from datetime import datetime
13
  now = datetime.now()
@@ -27,7 +27,9 @@ def load_fn(models,HF_TOKEN):
27
  models_load.update({model: m})
28
 
29
  async def infer(model_str, prompt, nprompt="", height=0, width=0, steps=0, cfg=0, seed=-1, timeout=inference_timeout):
30
- print(f"{prompt}")
 
 
31
  kwargs = {}
32
  if height > 0: kwargs["height"] = height
33
  if width > 0: kwargs["width"] = width
@@ -39,7 +41,8 @@ async def infer(model_str, prompt, nprompt="", height=0, width=0, steps=0, cfg=0
39
  theSeed = seed
40
  kwargs["seed"] = theSeed
41
  task = asyncio.create_task(asyncio.to_thread(models_load[model_str].fn, prompt=prompt, negative_prompt=nprompt, **kwargs, token=HF_TOKEN))
42
- await asyncio.sleep(0)
 
43
  try:
44
  result = await asyncio.wait_for(task, timeout=timeout)
45
  except asyncio.TimeoutError as e:
@@ -55,17 +58,18 @@ async def infer(model_str, prompt, nprompt="", height=0, width=0, steps=0, cfg=0
55
  result = None
56
  raise Exception() from e
57
  if task.done() and result is not None and not isinstance(result, tuple):
 
58
  with lock:
59
  png_path = model_str.replace("/", "_") + " - " + get_current_time() + "_" + str(theSeed) + ".png"
60
  image = save_image(result, png_path, model_str, prompt, nprompt, height, width, steps, cfg, theSeed)
61
  return image
62
  return None
63
 
64
- def gen_fn(model_str, prompt, nprompt="", height=0, width=0, steps=0, cfg=0, seed=-1):
65
  try:
66
  loop = asyncio.new_event_loop()
67
  result = loop.run_until_complete(infer(model_str, prompt, nprompt,
68
- height, width, steps, cfg, seed, inference_timeout))
69
  except (Exception, asyncio.CancelledError) as e:
70
  print(e)
71
  print(f"gen_fn: Task aborted: {model_str}")
 
1
  import gradio as gr
2
  from config import howManyModelsToUse,num_models,max_images,inference_timeout,MAX_SEED,thePrompt,preSetPrompt,negPreSetPrompt
3
+ from all_models import models
4
  import asyncio
5
  from externalmod import gr_Interface_load, save_image, randomize_seed
6
  import os
7
  from threading import RLock
8
  lock = RLock()
9
  HF_TOKEN = os.getenv("HF_TOKEN")
10
+ default_models = models[:num_models]
11
  def get_current_time():
12
  from datetime import datetime
13
  now = datetime.now()
 
27
  models_load.update({model: m})
28
 
29
  async def infer(model_str, prompt, nprompt="", height=0, width=0, steps=0, cfg=0, seed=-1, timeout=inference_timeout):
30
+ print(f"{prompt}\n")
31
+ print(f"{model_str}\n")
32
+ print(f"{timeout}\n")
33
  kwargs = {}
34
  if height > 0: kwargs["height"] = height
35
  if width > 0: kwargs["width"] = width
 
41
  theSeed = seed
42
  kwargs["seed"] = theSeed
43
  task = asyncio.create_task(asyncio.to_thread(models_load[model_str].fn, prompt=prompt, negative_prompt=nprompt, **kwargs, token=HF_TOKEN))
44
+ print(f"await")
45
+ await asyncio.sleep(20)
46
  try:
47
  result = await asyncio.wait_for(task, timeout=timeout)
48
  except asyncio.TimeoutError as e:
 
58
  result = None
59
  raise Exception() from e
60
  if task.done() and result is not None and not isinstance(result, tuple):
61
+ print(f"{result}")
62
  with lock:
63
  png_path = model_str.replace("/", "_") + " - " + get_current_time() + "_" + str(theSeed) + ".png"
64
  image = save_image(result, png_path, model_str, prompt, nprompt, height, width, steps, cfg, theSeed)
65
  return image
66
  return None
67
 
68
+ def gen_fn(model_str, prompt, nprompt="", height=0, width=0, steps=0, cfg=0, seed=-1, inference_timeout2=120):
69
  try:
70
  loop = asyncio.new_event_loop()
71
  result = loop.run_until_complete(infer(model_str, prompt, nprompt,
72
+ height, width, steps, cfg, seed, inference_timeout2))
73
  except (Exception, asyncio.CancelledError) as e:
74
  print(e)
75
  print(f"gen_fn: Task aborted: {model_str}")