Spaces:
Runtime error
Runtime error
import base64 | |
import json | |
import os | |
import time | |
import zipfile | |
from pathlib import Path | |
import re | |
import uuid | |
import pymupdf | |
############################### | |
# νκ²½ μ€μ | |
############################### | |
# νμ μλ€λ©΄ μ£Όμ μ²λ¦¬ κ°λ₯ | |
os.system('pip uninstall -y magic-pdf') | |
os.system('pip install git+https://github.com/opendatalab/MinerU.git@dev') | |
os.system('wget https://github.com/opendatalab/MinerU/raw/dev/scripts/download_models_hf.py -O download_models_hf.py') | |
# λͺ¨λΈ λ€μ΄λ‘λ (λ€νΈμν¬κ° μλ νκ²½μ΄λΌλ©΄ try/exceptλ‘ λ¬Άκ±°λ μ£Όμ μ²λ¦¬) | |
try: | |
os.system('python download_models_hf.py') | |
except Exception as e: | |
print("λͺ¨λΈ λ€μ΄λ‘λ μ€ μλ¬κ° λ°μνμ΅λλ€. λ€νΈμν¬ μ°κ²°μ νμΈνκ±°λ, μλμΌλ‘ λͺ¨λΈμ λ°°μΉνμΈμ.") | |
print("μλ¬ λ©μμ§:", e) | |
############################### | |
# magic-pdf.json μ²λ¦¬ | |
############################### | |
json_path = "/home/user/magic-pdf.json" | |
if os.path.exists(json_path): | |
# κΈ°μ‘΄ νμΌ λ‘λ | |
with open(json_path, 'r', encoding='utf-8') as file: | |
data = json.load(file) | |
else: | |
# μμΌλ©΄ κΈ°λ³Έκ° μμ± | |
data = { | |
"device-mode": "cuda", # CPUλ§ μ°λ €λ©΄ "cpu" | |
"llm-aided-config": { | |
"title_aided": { | |
"api_key": os.getenv('apikey', ""), | |
"enable": bool(os.getenv('apikey')) | |
} | |
} | |
} | |
with open(json_path, 'w', encoding='utf-8') as file: | |
json.dump(data, file, indent=4) | |
# νμ μ μ λ°μ΄νΈ | |
data['device-mode'] = "cuda" # "cpu" λ±μΌλ‘ μμ κ°λ₯ | |
if os.getenv('apikey'): | |
data['llm-aided-config']['title_aided']['api_key'] = os.getenv('apikey') | |
data['llm-aided-config']['title_aided']['enable'] = True | |
with open(json_path, 'w', encoding='utf-8') as file: | |
json.dump(data, file, indent=4) | |
# paddleocr λ³΅μ¬ | |
os.system('cp -r paddleocr /home/user/.paddleocr') | |
############################### | |
# κΈ°ν λΌμ΄λΈλ¬λ¦¬ | |
############################### | |
import gradio as gr | |
from loguru import logger | |
from gradio_pdf import PDF | |
############################### | |
# magic_pdf κ΄λ ¨ | |
############################### | |
from magic_pdf.data.data_reader_writer import FileBasedDataReader | |
from magic_pdf.libs.hash_utils import compute_sha256 | |
from magic_pdf.tools.common import do_parse, prepare_env | |
############################### | |
# κ³΅ν΅ ν¨μλ€ | |
############################### | |
def create_css(): | |
""" | |
κΈ°λ³Έ CSS μ€νμΌ. | |
""" | |
return """ | |
.gradio-container { | |
width: 100vw !important; | |
min-height: 100vh !important; | |
margin: 0 !important; | |
padding: 0 !important; | |
background: linear-gradient(135deg, #EFF6FF 0%, #F5F3FF 100%); | |
display: flex; | |
flex-direction: column; | |
overflow-y: auto !important; | |
} | |
.title-area { | |
text-align: center; | |
margin: 1rem auto; | |
padding: 1rem; | |
background: white; | |
border-radius: 1rem; | |
box-shadow: 0 4px 6px -1px rgba(0, 0, 0, 0.1); | |
max-width: 800px; | |
} | |
.title-area h1 { | |
background: linear-gradient(90deg, #2563EB 0%, #7C3AED 100%); | |
-webkit-background-clip: text; | |
-webkit-text-fill-color: transparent; | |
font-size: 2.5rem; | |
font-weight: bold; | |
margin-bottom: 0.5rem; | |
} | |
.title-area p { | |
color: #6B7280; | |
font-size: 1.1rem; | |
} | |
.gr-block, .gr-box { | |
padding: 0.5rem !important; | |
} | |
""" | |
def read_fn(path): | |
disk_rw = FileBasedDataReader(os.path.dirname(path)) | |
return disk_rw.read(os.path.basename(path)) | |
def parse_pdf(doc_path, output_dir, end_page_id, is_ocr, layout_mode, formula_enable, table_enable, language): | |
os.makedirs(output_dir, exist_ok=True) | |
try: | |
file_name = f"{str(Path(doc_path).stem)}_{time.time()}" | |
pdf_data = read_fn(doc_path) | |
parse_method = "ocr" if is_ocr else "auto" | |
local_image_dir, local_md_dir = prepare_env(output_dir, file_name, parse_method) | |
do_parse( | |
output_dir, | |
file_name, | |
pdf_data, | |
[], | |
parse_method, | |
False, | |
end_page_id=end_page_id, | |
layout_model=layout_mode, | |
formula_enable=formula_enable, | |
table_enable=table_enable, | |
lang=language, | |
f_dump_orig_pdf=False | |
) | |
return local_md_dir, file_name | |
except Exception as e: | |
logger.exception(e) | |
def compress_directory_to_zip(directory_path, output_zip_path): | |
try: | |
with zipfile.ZipFile(output_zip_path, 'w', zipfile.ZIP_DEFLATED) as zipf: | |
for root, dirs, files in os.walk(directory_path): | |
for file in files: | |
file_path = os.path.join(root, file) | |
arcname = os.path.relpath(file_path, directory_path) | |
zipf.write(file_path, arcname) | |
return 0 | |
except Exception as e: | |
logger.exception(e) | |
return -1 | |
def image_to_base64(image_path): | |
with open(image_path, "rb") as image_file: | |
return base64.b64encode(image_file.read()).decode('utf-8') | |
def replace_image_with_base64(markdown_text, image_dir_path): | |
pattern = r'\!\[(?:[^\]]*)\]\(([^)]+)\)' | |
def replace(match): | |
relative_path = match.group(1) | |
full_path = os.path.join(image_dir_path, relative_path) | |
base64_image = image_to_base64(full_path) | |
return f"" | |
return re.sub(pattern, replace, markdown_text) | |
def to_pdf(file_path): | |
""" | |
μ΄λ―Έμ§(JPG/PNG λ±)λ₯Ό PDFλ‘ λ³ν. | |
TXT, CSV νμΌμ΄λ©΄ κ·Έλλ‘ κ²½λ‘ λ°ν. | |
""" | |
ext = Path(file_path).suffix.lower() | |
if ext in ['.txt', '.csv']: | |
return file_path | |
with pymupdf.open(file_path) as f: | |
if f.is_pdf: | |
return file_path | |
else: | |
pdf_bytes = f.convert_to_pdf() | |
unique_filename = f"{uuid.uuid4()}.pdf" | |
tmp_file_path = os.path.join(os.path.dirname(file_path), unique_filename) | |
with open(tmp_file_path, 'wb') as tmp_pdf_file: | |
tmp_pdf_file.write(pdf_bytes) | |
return tmp_file_path | |
def to_markdown(file_path, end_pages, is_ocr, layout_mode, formula_enable, table_enable, language, progress=gr.Progress(track_tqdm=False)): | |
""" | |
μ λ‘λλ PDF/μ΄λ―Έμ§/TXT/CSV -> λ§ν¬λ€μ΄ λ³ν | |
""" | |
ext = Path(file_path).suffix.lower() | |
if ext in ['.txt', '.csv']: | |
progress(0, "νμΌ μ½λ μ€...") | |
with open(file_path, 'r', encoding='utf-8') as f: | |
txt_content = f.read() | |
time.sleep(0.5) | |
progress(50, "νμΌ λ΄μ© μ²λ¦¬ μ€...") | |
progress(100, "λ³ν μλ£!") | |
return f"```{txt_content}```\n\n**λ³ν μλ£ (ν μ€νΈ/CSV νμΌ)**" | |
else: | |
progress(0, "PDFλ‘ λ³ν μ€...") | |
file_path = to_pdf(file_path) | |
time.sleep(0.5) | |
if end_pages > 20: | |
end_pages = 20 | |
progress(20, "λ¬Έμ νμ± μ€...") | |
local_md_dir, file_name = parse_pdf(file_path, './output', end_pages - 1, is_ocr, | |
layout_mode, formula_enable, table_enable, language) | |
time.sleep(0.5) | |
progress(50, "μμΆ(zip) μμ± μ€...") | |
archive_zip_path = os.path.join("./output", compute_sha256(local_md_dir) + ".zip") | |
zip_archive_success = compress_directory_to_zip(local_md_dir, archive_zip_path) | |
if zip_archive_success == 0: | |
logger.info("μμΆ μ±κ³΅") | |
status_message = "\n\n**λ³ν μλ£ (μμΆ μ±κ³΅)**" | |
else: | |
logger.error("μμΆ μ€ν¨") | |
status_message = "\n\n**λ³ν μλ£ (μμΆ μ€ν¨)**" | |
time.sleep(0.5) | |
progress(70, "λ§ν¬λ€μ΄ μ½λ μ€...") | |
md_path = os.path.join(local_md_dir, file_name + ".md") | |
with open(md_path, 'r', encoding='utf-8') as f: | |
txt_content = f.read() | |
time.sleep(0.5) | |
progress(90, "μ΄λ―Έμ§ base64 λ³ν μ€...") | |
md_content = replace_image_with_base64(txt_content, local_md_dir) | |
time.sleep(0.5) | |
progress(100, "λ³ν μλ£!") | |
return md_content + status_message | |
def to_markdown_comparison(file_a, file_b, end_pages, is_ocr, layout_mode, formula_enable, table_enable, language, progress=gr.Progress(track_tqdm=False)): | |
""" | |
λ νμΌμ λ³ν -> A/B λΉκ΅μ© λ§ν¬λ€μ΄ | |
""" | |
combined_md = "" | |
if file_a is not None: | |
combined_md += "### λ¬Έμ A\n" | |
md_a = to_markdown(file_a, end_pages, is_ocr, layout_mode, formula_enable, table_enable, language, progress=progress) | |
combined_md += md_a + "\n" | |
if file_b is not None: | |
combined_md += "### λ¬Έμ B\n" | |
md_b = to_markdown(file_b, end_pages, is_ocr, layout_mode, formula_enable, table_enable, language, progress=progress) | |
combined_md += md_b + "\n" | |
if file_a is not None and file_b is not None: | |
combined_md += "### λΉκ΅ λΆμ:\nλ λ¬Έμμ μ°¨μ΄μ , μ₯λ¨μ λ° μ£Όμ λ΄μ©μ λΉκ΅ λΆμνμμμ€.\n" | |
return combined_md | |
def init_model(): | |
""" | |
magic-pdf λͺ¨λΈ μ΄κΈ°ν | |
""" | |
from magic_pdf.model.doc_analyze_by_custom_model import ModelSingleton | |
try: | |
model_manager = ModelSingleton() | |
txt_model = model_manager.get_model(False, False) | |
logger.info("txt_model init final") | |
ocr_model = model_manager.get_model(True, False) | |
logger.info("ocr_model init final") | |
return 0 | |
except Exception as e: | |
logger.exception(e) | |
return -1 | |
model_init = init_model() | |
logger.info(f"model_init: {model_init}") | |
############################### | |
# μΈμ΄ λͺ©λ‘ | |
############################### | |
latin_lang = [ | |
'af','az','bs','cs','cy','da','de','es','et','fr','ga','hr','hu','id','is','it','ku', | |
'la','lt','lv','mi','ms','mt','nl','no','oc','pi','pl','pt','ro','rs_latin','sk','sl', | |
'sq','sv','sw','tl','tr','uz','vi','french','german' | |
] | |
arabic_lang = ['ar','fa','ug','ur'] | |
cyrillic_lang = ['ru','rs_cyrillic','be','bg','uk','mn','abq','ady','kbd','ava','dar','inh','che','lbe','lez','tab'] | |
devanagari_lang = ['hi','mr','ne','bh','mai','ang','bho','mah','sck','new','gom','sa','bgc'] | |
other_lang = ['ch','en','korean','japan','chinese_cht','ta','te','ka'] | |
all_lang = ['', 'auto'] | |
all_lang.extend([*other_lang, *latin_lang, *arabic_lang, *cyrillic_lang, *devanagari_lang]) | |
############################### | |
# (1) PDF Chat μ© LLM κ΄λ ¨ | |
############################### | |
import google.generativeai as genai | |
from gradio import ChatMessage | |
from typing import Iterator | |
GEMINI_API_KEY = os.getenv("GEMINI_API_KEY") | |
genai.configure(api_key=GEMINI_API_KEY) | |
model = genai.GenerativeModel("gemini-2.0-flash-thinking-exp-1219") | |
def format_chat_history(messages: list) -> list: | |
""" | |
Geminiκ° μ΄ν΄ν μ μλ (role, parts[]) νμμΌλ‘ λ³ν | |
""" | |
formatted_history = [] | |
for message in messages: | |
# Thinking μν (assistant+metadata)μ μ μΈ | |
if not (message.role == "assistant" and hasattr(message, "metadata")): | |
formatted_history.append({ | |
"role": "user" if message.role == "user" else "assistant", | |
"parts": [message.content] | |
}) | |
return formatted_history | |
def convert_chat_messages_to_gradio_format(messages): | |
""" | |
ChatMessage list -> [(μ μ λ°ν, λ΄μλ΅), ...] ννλ‘ λ³ν | |
""" | |
gradio_chat = [] | |
user_text, assistant_text = None, None | |
for msg in messages: | |
if msg.role == "user": | |
if user_text is not None or assistant_text is not None: | |
gradio_chat.append((user_text or "", assistant_text or "")) | |
user_text = msg.content | |
assistant_text = None | |
else: # assistant | |
if user_text is None: | |
user_text = "" | |
if assistant_text is None: | |
assistant_text = msg.content | |
else: | |
assistant_text += msg.content | |
if user_text is not None or assistant_text is not None: | |
gradio_chat.append((user_text or "", assistant_text or "")) | |
return gradio_chat | |
def stream_gemini_response(user_query: str, messages: list) -> Iterator[list]: | |
""" | |
Gemini μλ΅ μ€νΈλ¦¬λ° | |
""" | |
if not user_query.strip(): | |
user_query = "...(No content from user)..." | |
try: | |
print(f"\n=== [Gemini] New Request ===\nUser message: '{user_query}'") | |
chat_history = format_chat_history(messages) | |
chat = model.start_chat(history=chat_history) | |
response = chat.send_message(user_query, stream=True) | |
thought_buffer = "" | |
response_buffer = "" | |
thinking_complete = False | |
# "Thinking" μν μΆκ° | |
messages.append( | |
ChatMessage( | |
role="assistant", | |
content="", | |
metadata={"title": "βοΈ Thinking: *The thoughts produced by the model are experimental"} | |
) | |
) | |
yield convert_chat_messages_to_gradio_format(messages) | |
for chunk in response: | |
parts = chunk.candidates[0].content.parts | |
current_chunk = parts[0].text | |
if len(parts) == 2 and not thinking_complete: | |
# 첫 λ²μ§Έ ννΈ = 'Thinking' | |
thought_buffer += current_chunk | |
messages[-1] = ChatMessage( | |
role="assistant", | |
content=thought_buffer, | |
metadata={"title": "βοΈ Thinking: *The thoughts produced by the model are experimental"} | |
) | |
yield convert_chat_messages_to_gradio_format(messages) | |
# λ λ²μ§Έ ννΈ = μ΅μ’ λ΅λ³ | |
response_buffer = parts[1].text | |
messages.append(ChatMessage(role="assistant", content=response_buffer)) | |
thinking_complete = True | |
elif thinking_complete: | |
# μ΄λ―Έ μ΅μ’ λ΅λ³ λ€μ΄κ° μν | |
response_buffer += current_chunk | |
messages[-1] = ChatMessage(role="assistant", content=response_buffer) | |
else: | |
# μμ§ 'Thinking' μ€ | |
thought_buffer += current_chunk | |
messages[-1] = ChatMessage( | |
role="assistant", | |
content=thought_buffer, | |
metadata={"title": "βοΈ Thinking: *The thoughts produced by the model are experimental"} | |
) | |
yield convert_chat_messages_to_gradio_format(messages) | |
print(f"\n=== [Gemini] Final Response ===\n{response_buffer}") | |
except Exception as e: | |
print(f"\n=== [Gemini] Error ===\n{str(e)}") | |
messages.append(ChatMessage(role="assistant", content=f"μ€λ₯κ° λ°μνμ΅λλ€: {str(e)}")) | |
yield convert_chat_messages_to_gradio_format(messages) | |
def user_message(msg: str, history: list, doc_text: str) -> tuple[str, list, str]: | |
""" | |
- msg: μ μ κ° μ λ ₯μ°½μ μ λ ₯ν ν μ€νΈ | |
- doc_text: λ³νλ λ¬Έμ (conversion_md) | |
- history: ChatMessage 리μ€νΈ | |
return: | |
(1) UI μ λ ₯μ°½μ λΉμΈ κ° (""), | |
(2) μ λ°μ΄νΈλ history, | |
(3) μ€μ LLMμ μ λ¬ν user_query | |
""" | |
if doc_text.strip(): | |
user_query = f"λ€μ λ¬Έμλ₯Ό μ°Έκ³ νμ¬ λ΅λ³:\n\n{doc_text}\n\nμ§λ¬Έ: {msg}" | |
else: | |
user_query = msg | |
history.append(ChatMessage(role="user", content=user_query)) | |
return "", history, user_query | |
def reset_states(file_a, file_b): | |
""" | |
μ νμΌ μ λ‘λ μ λνμ΄κΈ°ν | |
""" | |
return [], "", "" | |
def clear_all(): | |
""" | |
λν μ 체 μ΄κΈ°ν | |
""" | |
return [], "", "" | |
############################### | |
# UI ν΅ν© | |
############################### | |
if __name__ == "__main__": | |
with gr.Blocks(title="Compare RAG CHAT", css=create_css()) as demo: | |
with gr.Tab("PDF Chat with LLM"): | |
gr.HTML(""" | |
<div class="title-area"> | |
<h1>VisionOCR</h1> | |
<p>λ κ°μ PDF/μ΄λ―Έμ§/ν μ€νΈ/CSV νμΌμ μ λ‘λνμ¬ A/B λΉκ΅ ν, μΆλ‘ LLMκ³Ό λνν©λλ€.<br> | |
ν νμΌλ§ μ λ‘λνλ©΄ ν΄λΉ νμΌλ‘ λΆμν©λλ€.</p> | |
</div> | |
""") | |
conversion_md = gr.Markdown(label="λ³ν κ²°κ³Ό", visible=True) | |
md_state = gr.State("") # λ¬Έμ λ³ν κ²°κ³Ό | |
chat_history = gr.State([]) # ChatMessage 리μ€νΈ | |
user_query_holder = gr.State("") # (μμ ) user_query μμ μ μ₯ | |
chatbot = gr.Chatbot(visible=True) | |
with gr.Row(): | |
file_a = gr.File(label="λ¬Έμ A μ λ‘λ", file_types=[".pdf", ".png", ".jpeg", ".jpg", ".txt", ".csv"], interactive=True) | |
file_b = gr.File(label="λ¬Έμ B μ λ‘λ", file_types=[".pdf", ".png", ".jpeg", ".jpg", ".txt", ".csv"], interactive=True) | |
convert_btn = gr.Button("λΉκ΅μ© λ³ννκΈ°") | |
# νμΌ μ λ‘λ μ μν μ΄κΈ°ν | |
file_a.change( | |
fn=reset_states, | |
inputs=[file_a, file_b], | |
outputs=[chat_history, md_state, chatbot] | |
) | |
file_b.change( | |
fn=reset_states, | |
inputs=[file_a, file_b], | |
outputs=[chat_history, md_state, chatbot] | |
) | |
max_pages = gr.Slider(1, 20, 10, visible=False) | |
layout_mode = gr.Dropdown(["layoutlmv3", "doclayout_yolo"], value="doclayout_yolo", visible=False) | |
language = gr.Dropdown(all_lang, value='auto', visible=False) | |
formula_enable = gr.Checkbox(value=True, visible=False) | |
is_ocr = gr.Checkbox(value=False, visible=False) | |
table_enable = gr.Checkbox(value=True, visible=False) | |
convert_btn.click( | |
fn=to_markdown_comparison, | |
inputs=[file_a, file_b, max_pages, is_ocr, layout_mode, formula_enable, table_enable, language], | |
outputs=conversion_md, | |
show_progress=True | |
) | |
gr.Markdown("## μΆλ‘ LLMκ³Ό λν") | |
gr.Markdown( | |
"### λΉκ΅ μμ :\n" | |
"- λ νμΌμ λΉκ΅νμ¬ λ΄μ©μμ μ°¨μ΄μ μ μμΈνκ² μ€λͺ νλΌ.\n" | |
"- λ νμΌμ λΉκ΅νμ¬ μ΄λ κ²μ΄ λ μ°μν μ μμ΄λ λ΄μ©μΈμ§ μ€λͺ νλΌ.\n" | |
"- λ λ¬Έμ κ°μ λ Όλ¦¬μ κ΅¬μ± λ° μ£Όμ μ μ°¨μ΄μ μ λΆμνλΌ.\n" | |
"- λ λ¬Έμμ μ€νμΌκ³Ό νν λ°©μμ μ°¨μ΄λ₯Ό λΉκ΅νλΌ." | |
) | |
with gr.Row(): | |
chat_input = gr.Textbox(lines=1, placeholder="μ§λ¬Έμ μ λ ₯νμΈμ...") | |
clear_btn = gr.Button("λν μ΄κΈ°ν") | |
# (μμ ) user_message -> (chat_input, chat_history, conversion_md) | |
# => outputs=[chat_input, chat_history, user_query_holder] | |
# μ΄ μ€ user_query_holder(μ€μ μ§μ)λ stream_gemini_responseλ‘ μ λ¬ | |
chat_input.submit( | |
fn=user_message, | |
inputs=[chat_input, chat_history, conversion_md], | |
outputs=[chat_input, chat_history, user_query_holder] | |
).then( | |
fn=stream_gemini_response, | |
inputs=[user_query_holder, chat_history], | |
outputs=chatbot | |
) | |
clear_btn.click( | |
fn=clear_all, | |
inputs=[], | |
outputs=[chat_history, md_state, chatbot] | |
) | |
# demo.launch(server_name="0.0.0.0", server_port=7860, debug=True, ssr_mode=True) | |
# 곡μ λ§ν¬λ₯Ό μν κ²½μ° share=True μ€μ | |
demo.launch(server_name="0.0.0.0", server_port=7860, debug=True, ssr_mode=True, share=False) | |