gourisankar85 commited on
Commit
e77d0f3
·
verified ·
1 Parent(s): 010d51d

Upload 2 files

Browse files
Files changed (1) hide show
  1. app.py +88 -39
app.py CHANGED
@@ -1,8 +1,6 @@
1
  import logging
2
  import gradio as gr
3
  from utils.document_utils import initialize_logging
4
- from retriever.chat_manager import chat_response
5
- # Note: DocumentManager is now initialized in retrieve_documents.py
6
  from globals import app_config
7
 
8
  # Configure logging
@@ -13,47 +11,54 @@ def load_sample_question(question):
13
  return question
14
 
15
  def clear_selection():
16
- return [], "", [] # Reset doc_selector to empty list
17
 
18
  def process_uploaded_file(file, current_selection):
19
  """Process uploaded file using DocumentManager and update UI."""
20
- status, page_list, filename, _ = app_config.doc_manager.process_document(file.name if file else None)
21
-
22
- # Update current selection to include new file if not already present
23
- updated_selection = current_selection if current_selection else []
24
- if filename and filename not in updated_selection:
25
- updated_selection.append(filename)
26
-
27
- return (
28
- status,
29
- page_list,
30
- gr.update(choices=app_config.doc_manager.get_uploaded_documents(), value=updated_selection)
31
- )
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
32
 
33
  def update_doc_selector(selected_docs):
34
  """Keep selected documents in sync."""
35
  return selected_docs
36
 
37
  # UI Configuration
38
- models = ["qwen-2.5-32b", "gemma2-9b-it"]
 
39
  example_questions = [
40
- "What is communication server?",
41
  "Show me an example of a configuration file.",
42
  "How to create Protected File Directories ?",
43
- "What are the attributes Azureblobstorage?",
44
  "What is Mediator help?",
45
  "Why AzureBlobStorage port is used?"
46
  ]
47
- all_questions = [
48
- "Can you explain Communication Server architecture?",
49
- "Why does the other instance of my multi-instance qmgr seem to hang after a failover? Queue manager will not start after failover.",
50
- "Explain the concept of blockchain.",
51
- "What is the capital of France?",
52
- "Do Surface Porosity and Pore Size Influence Mechanical Properties and Cellular Response to PEEK?",
53
- "How does a vaccine work?",
54
- "Tell me the step-by-step instruction for front-door installation.",
55
- "What are the risk factors for heart disease?",
56
- ]
57
 
58
  with gr.Blocks(css="""
59
  .chatbot .user {
@@ -88,6 +93,13 @@ with gr.Blocks(css="""
88
  # 🤖 IntelliDoc: AI Document Explorer
89
  **AI Document Explorer** allows you to upload PDF documents and interact with them using AI-powered analysis and summarization. Ask questions, extract key insights, and gain a deeper understanding of your documents effortlessly.
90
  """)
 
 
 
 
 
 
 
91
  with gr.Row():
92
  # Left Sidebar
93
  with gr.Column(scale=2):
@@ -104,18 +116,19 @@ with gr.Blocks(css="""
104
  upload_status = gr.Textbox(label="Upload Status", interactive=False)
105
 
106
  # Process uploaded file and update UI
107
- upload_btn.change(
108
  process_uploaded_file,
109
  inputs=[upload_btn, doc_selector],
110
  outputs=[
111
  upload_status,
112
- gr.State(), # page_list
113
- doc_selector # Update choices and value together
 
114
  ]
115
  )
116
  clear_btn.click(
117
  clear_selection,
118
- outputs=[doc_selector, upload_status, gr.State()]
119
  )
120
  # Reinitialize LLM when the model changes
121
  model_selector.change(
@@ -131,19 +144,20 @@ with gr.Blocks(css="""
131
  with gr.Row():
132
  chat_input = gr.Textbox(label="Ask additional questions about the document...", show_label=False, placeholder="Ask additional questions about the document...", elem_id="chat-input", lines=3)
133
  chat_btn = gr.Button("🚀 Send", variant="primary", elem_id="send-button", scale=0)
134
- chat_btn.click(chat_response, inputs=[chat_input, doc_selector, chat_history], outputs=chat_history).then(
135
  lambda: "", # Return an empty string to clear the chat_input
136
  outputs=chat_input
137
  )
138
 
139
  # Right Sidebar (Sample Questions & History)
140
  with gr.Column(scale=2):
141
- gr.Markdown("## Frequently asked questions:")
142
  with gr.Column():
143
- gr.Examples(
144
- examples=example_questions,
145
- inputs=chat_input,
146
- label=""
 
147
  )
148
  '''question_dropdown = gr.Dropdown(
149
  label="",
@@ -152,9 +166,44 @@ with gr.Blocks(css="""
152
  info="Choose a question from the dropdown to populate the query box."
153
  )'''
154
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
155
  #gr.Markdown("## Logs")
156
  #history = gr.Textbox(label="Previous Queries", interactive=False)
157
 
158
-
159
  if __name__ == "__main__":
160
  interface.launch()
 
1
  import logging
2
  import gradio as gr
3
  from utils.document_utils import initialize_logging
 
 
4
  from globals import app_config
5
 
6
  # Configure logging
 
11
  return question
12
 
13
  def clear_selection():
14
+ return gr.update(value=[]), "", "" # Reset doc_selector to empty list
15
 
16
  def process_uploaded_file(file, current_selection):
17
  """Process uploaded file using DocumentManager and update UI."""
18
+ try:
19
+ if file is None:
20
+ # When file input is cleared, preserve current selection and choices
21
+ uploaded_docs = app_config.doc_manager.get_uploaded_documents()
22
+ return (
23
+ "",
24
+ gr.update(choices=uploaded_docs, value=current_selection or []),
25
+ False,
26
+ ""
27
+ )
28
+
29
+ status, filename, doc_id = app_config.doc_manager.process_document(file.name if file else None)
30
+
31
+ updated_selection = current_selection if current_selection else []
32
+ if filename and filename not in updated_selection:
33
+ updated_selection.append(filename)
34
+ trigger_summary = bool(filename)
35
+ logging.info(f"Processed file: {filename}, Trigger summary: {trigger_summary}")
36
+
37
+ return (
38
+ status,
39
+ gr.update(choices=app_config.doc_manager.get_uploaded_documents(), value=updated_selection),
40
+ trigger_summary,
41
+ filename
42
+ )
43
+ except Exception as e:
44
+ logging.error(f"Error in process_uploaded_file: {e}")
45
+ return "Error processing file", gr.update(choices=[]), False, ''
46
 
47
  def update_doc_selector(selected_docs):
48
  """Keep selected documents in sync."""
49
  return selected_docs
50
 
51
  # UI Configuration
52
+ models = [ "gemma2-9b-it", "llama3-70b-8192"]
53
+
54
  example_questions = [
55
+ "What is the architecture of the Communication Server?",
56
  "Show me an example of a configuration file.",
57
  "How to create Protected File Directories ?",
58
+ "What functionalities are available in the Communication Server setups?",
59
  "What is Mediator help?",
60
  "Why AzureBlobStorage port is used?"
61
  ]
 
 
 
 
 
 
 
 
 
 
62
 
63
  with gr.Blocks(css="""
64
  .chatbot .user {
 
93
  # 🤖 IntelliDoc: AI Document Explorer
94
  **AI Document Explorer** allows you to upload PDF documents and interact with them using AI-powered analysis and summarization. Ask questions, extract key insights, and gain a deeper understanding of your documents effortlessly.
95
  """)
96
+ summary_query_state = gr.State() # State to hold the summary query
97
+ trigger_summary_state = gr.State() # State to hold trigger flag
98
+ filename_state = gr.State() # State to hold file name
99
+ chunks_state = gr.State()
100
+ summary_text_state = gr.State()
101
+ sample_questions_state = gr.State()
102
+
103
  with gr.Row():
104
  # Left Sidebar
105
  with gr.Column(scale=2):
 
116
  upload_status = gr.Textbox(label="Upload Status", interactive=False)
117
 
118
  # Process uploaded file and update UI
119
+ upload_event = upload_btn.change(
120
  process_uploaded_file,
121
  inputs=[upload_btn, doc_selector],
122
  outputs=[
123
  upload_status,
124
+ doc_selector,
125
+ trigger_summary_state, # Store trigger_summary
126
+ filename_state
127
  ]
128
  )
129
  clear_btn.click(
130
  clear_selection,
131
+ outputs=[doc_selector, upload_status, filename_state]
132
  )
133
  # Reinitialize LLM when the model changes
134
  model_selector.change(
 
144
  with gr.Row():
145
  chat_input = gr.Textbox(label="Ask additional questions about the document...", show_label=False, placeholder="Ask additional questions about the document...", elem_id="chat-input", lines=3)
146
  chat_btn = gr.Button("🚀 Send", variant="primary", elem_id="send-button", scale=0)
147
+ chat_btn.click(app_config.chat_manager.generate_chat_response, inputs=[chat_input, doc_selector, chat_history], outputs=chat_history).then(
148
  lambda: "", # Return an empty string to clear the chat_input
149
  outputs=chat_input
150
  )
151
 
152
  # Right Sidebar (Sample Questions & History)
153
  with gr.Column(scale=2):
154
+ gr.Markdown("## Sample questions for this document:")
155
  with gr.Column():
156
+ sample_questions = gr.Dropdown(
157
+ label="Select a sample question",
158
+ choices=[],
159
+ interactive=True,
160
+ allow_custom_value=True # Allows users to type custom questions if needed
161
  )
162
  '''question_dropdown = gr.Dropdown(
163
  label="",
 
166
  info="Choose a question from the dropdown to populate the query box."
167
  )'''
168
 
169
+ # After upload, generate "Auto Summary" message only if trigger_summary is True
170
+ upload_event.then(
171
+ fn=lambda trigger, filename: "Can you provide summary of the document" if trigger and filename else None,
172
+ inputs=[trigger_summary_state, filename_state],
173
+ outputs=[summary_query_state]
174
+ ).then(
175
+ fn=lambda query, history: history + [{"role": "user", "content": ""}, {"role": "assistant", "content": "Generating summary of the document, please wait..."}] if query else history,
176
+ inputs=[summary_query_state, chat_history],
177
+ outputs=[chat_history]
178
+ ).then(
179
+ fn=lambda trigger, filename: app_config.doc_manager.get_chunks(filename) if trigger and filename else None,
180
+ inputs=[trigger_summary_state, filename_state],
181
+ outputs=[chunks_state]
182
+ ).then(
183
+ fn=lambda chunks: app_config.chat_manager.generate_summary(chunks) if chunks else None,
184
+ inputs=[chunks_state],
185
+ outputs=[summary_text_state]
186
+ ).then(
187
+ fn=lambda summary, history: history + [{"role": "assistant", "content": summary}] if summary else history,
188
+ inputs=[summary_text_state, chat_history],
189
+ outputs=[chat_history]
190
+ ).then(
191
+ fn=lambda chunks: app_config.chat_manager.generate_sample_questions(chunks) if chunks else [],
192
+ inputs=[chunks_state],
193
+ outputs=[sample_questions_state]
194
+ ).then(
195
+ fn=lambda questions: gr.update(choices=questions if questions else ["No questions available"]),
196
+ inputs=[sample_questions_state],
197
+ outputs=[sample_questions]
198
+ )
199
+ # Populate chat_input when a question is selected
200
+ sample_questions.change(
201
+ fn=lambda question: question,
202
+ inputs=[sample_questions],
203
+ outputs=[chat_input]
204
+ )
205
  #gr.Markdown("## Logs")
206
  #history = gr.Textbox(label="Previous Queries", interactive=False)
207
 
 
208
  if __name__ == "__main__":
209
  interface.launch()