NoufSaleh46 commited on
Commit
2188d3f
·
verified ·
1 Parent(s): 5b69df0

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +50 -35
app.py CHANGED
@@ -1,44 +1,59 @@
1
  import gradio as gr
2
- from transformers import pipeline
 
3
 
4
- # Load models
5
- gen_model = pipeline("text2text-generation", model="google/flan-t5-large")
6
- translator_en_ar = pipeline("translation", model="Helsinki-NLP/opus-mt-en-ar") # English to Arabic
7
- translator_ar_en = pipeline("translation", model="Helsinki-NLP/opus-mt-ar-en") # Arabic to English
8
- tokenizer = AutoTokenizer.from_pretrained("Helsinki-NLP/opus-mt-en-ar")
 
 
9
 
10
- def get_plant_info(plant_name, language):
11
- if language == "English":
12
- prompt = (
13
- f"Provide detailed information about {plant_name}. "
14
- f"Include its scientific name, growing conditions (light, water, soil type), "
15
- f"common uses, and how to take care of it."
16
- )
17
- response = gen_model(prompt, min_length=50, max_length=300)[0]["generated_text"]
18
- else: # Arabic
19
- translated_name = translator_ar_en(plant_name)[0]["translation_text"] # Convert Arabic input to English
20
- prompt = (
21
- f"Provide detailed information about {translated_name}. "
22
- f"Include its scientific name, growing conditions (light, water, soil type), "
23
- f"common uses, and how to take care of it."
24
- )
25
- response_en = gen_model(prompt, min_length=50, max_length=300)[0]["generated_text"]
26
- response = translator_en_ar(response_en)[0]["translation_text"] # Convert English output back to Arabic
 
27
 
28
- return response
 
 
 
 
 
29
 
30
- # Gradio UI
31
- interface = gr.Interface(
32
- fn=get_plant_info,
33
  inputs=[
34
- gr.Textbox(label="Enter Plant Name / أدخل اسم النبات"),
35
- gr.Radio(["English", "العربية"], label="Choose Language / اختر اللغة")
 
 
 
 
36
  ],
37
- outputs=gr.Textbox(label="Plant Information / معلومات النبات", lines=10),
38
- title="Plant Information App",
39
- description="Enter a plant name and select a language to get detailed information."
 
 
 
 
40
  )
41
 
42
- # Launch the app
43
- if __name__ == "__main__":
44
- demo.launch()
 
1
  import gradio as gr
2
+ from transformers import AutoModelForCausalLM, AutoTokenizer, pipeline
3
+ import torch
4
 
5
+ def get_model_name(language):
6
+ """Map language choice to the corresponding model."""
7
+ model_mapping = {
8
+ "English": "microsoft/Phi-3-mini-4k-instruct",
9
+ "Arabic": "ALLaM-AI/ALLaM-7B-Instruct-preview"
10
+ }
11
+ return model_mapping.get(language, "ALLaM-AI/ALLaM-7B-Instruct-preview") # Default to Arabic model
12
 
13
+ def load_model(model_name):
14
+ device = "cuda" if torch.cuda.is_available() else "cpu"
15
+ model = AutoModelForCausalLM.from_pretrained(
16
+ model_name,
17
+ device_map=device,
18
+ torch_dtype="auto",
19
+ trust_remote_code=True,
20
+ )
21
+ tokenizer = AutoTokenizer.from_pretrained(model_name)
22
+ generator = pipeline(
23
+ "text-generation",
24
+ model=model,
25
+ tokenizer=tokenizer,
26
+ return_full_text=False,
27
+ max_new_tokens=500,
28
+ do_sample=False
29
+ )
30
+ return generator
31
 
32
+ def generate_text(prompt, language):
33
+ model_name = get_model_name(language) # Get the model based on language choice
34
+ generator = load_model(model_name)
35
+ messages = [{"role": "user", "content": prompt}]
36
+ output = generator(messages)
37
+ return output[0]["generated_text"]
38
 
39
+ # Create Gradio interface
40
+ demo = gr.Interface(
41
+ fn=generate_text,
42
  inputs=[
43
+ gr.Textbox(lines=2, placeholder="Enter your prompt here..."),
44
+ gr.Dropdown(
45
+ choices=["English", "Arabic"], # Users choose the language, not the model name
46
+ label="Choose Language",
47
+ value="Arabic" # Default to Arabic (ALLAM model)
48
+ )
49
  ],
50
+ outputs=gr.Textbox(label="Generated Text"),
51
+ title="Text Generator",
52
+ description="Enter a prompt and generate text in English or Arabic using AI models.",
53
+ examples=[
54
+ ["Give me information about Lavender.", "English"],
55
+ ["أعطني معلومات عن نبات اللافندر", "Arabic"]
56
+ ]
57
  )
58
 
59
+ demo.launch()