Spaces:
Sleeping
Sleeping
File size: 3,106 Bytes
37d987e 2188d3f 6accc1c 2188d3f 37d987e 2188d3f c7353aa d2ab18f 69616d7 2188d3f 69616d7 2188d3f 37d987e c7353aa 2188d3f 69616d7 37d987e 69616d7 2188d3f c7353aa 2188d3f c7353aa 2188d3f 37d987e 69616d7 2188d3f 69616d7 954619d 69616d7 37d987e c7353aa |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 |
import gradio as gr
from transformers import AutoModelForCausalLM, AutoTokenizer, pipeline
import torch
import spaces
def get_model_name(language):
"""Map language choice to the corresponding model."""
model_mapping = {
"English": "microsoft/Phi-3-mini-4k-instruct",
"Arabic": "ALLaM-AI/ALLaM-7B-Instruct-preview"
}
return model_mapping.get(language, "ALLaM-AI/ALLaM-7B-Instruct-preview") # Default to Arabic model
def load_model(model_name):
device = "cuda" if torch.cuda.is_available() else "cpu"
model = AutoModelForCausalLM.from_pretrained(
model_name,
device_map=device,
torch_dtype="auto",
trust_remote_code=True,
)
tokenizer = AutoTokenizer.from_pretrained(model_name)
generator = pipeline(
"text-generation",
model=model,
tokenizer=tokenizer,
return_full_text=False,
max_new_tokens=500,
do_sample=False
)
return generator
@spaces.GPU
def generate_plant_info(plant_name, language):
model_name = get_model_name(language)
generator = load_model(model_name)
# Define prompt for the AI model
if language == "English":
prompt = (f"Provide detailed information about {plant_name}. "
f"Include its scientific name, growing conditions (light, water, soil type), "
f"common uses, and care tips.")
else:
prompt = (f"قدم معلومات مفصلة عن {plant_name}. "
f"اذكر اسمه العلمي، وظروف نموه (الضوء، الماء، نوع التربة)، "
f"استخداماته الشائعة، ونصائح العناية به.")
messages = [{"role": "user", "content": prompt}]
output = generator(messages)
return output[0]["generated_text"]
# Create Gradio interface with enhancements
demo = gr.Interface(
fn=generate_plant_info,
inputs=[
gr.Textbox(placeholder="Enter plant name (e.g., Lavender, Aloe Vera)...", label="Plant Name"),
gr.Dropdown(
choices=["English", "Arabic"],
label="Choose Language",
value="English"
)
],
outputs=gr.Textbox(label="Plant Information"),
title="🌿 AI Plant Guide - English & Arabic 🌿",
description="Enter a plant name, and AI will provide detailed information about it in English or Arabic.",
examples=[
["Lavender", "English"],
["اللافندر", "Arabic"],
["Tulip", "English"],
["الصبار", "Arabic"]
],
theme="default",
)
# Custom CSS for styling
custom_css = """
<style>
body {
font-family: Arial, sans-serif;
background-color: #f9f9f9;
}
.gri-container {
border-radius: 10px;
background-color: #fff;
box-shadow: 0 4px 20px rgba(0,0,0,0.1);
}
.gri-button {
background-color: #4CAF50;
color: white;
}
.gri-button:hover {
background-color: #45a049;
}
</style>
"""
demo.launch(share=True, css=custom_css) # Launch the interface with custom CSS
|