Spaces:
Paused
Paused
Commit
·
e38650f
1
Parent(s):
3a2f487
Update llm.py
Browse files
llm.py
CHANGED
@@ -120,7 +120,7 @@ def construct_index(
|
|
120 |
index = GPTVectorStoreIndex.from_documents(documents, service_context=service_context)
|
121 |
else:
|
122 |
index = GPTListIndex.from_documents(documents, service_context=service_context)
|
123 |
-
index.storage_context.persist(persist_dir=
|
124 |
except Exception as e:
|
125 |
print(e)
|
126 |
return None
|
@@ -163,7 +163,7 @@ def chat_ai(
|
|
163 |
print(response)
|
164 |
|
165 |
if response is None:
|
166 |
-
response =
|
167 |
response = parse_text(response)
|
168 |
|
169 |
context.append({"role": "user", "content": question})
|
@@ -184,17 +184,17 @@ def ask_ai(
|
|
184 |
):
|
185 |
logging.debug("Querying index...")
|
186 |
prompt_helper = PromptHelper(
|
187 |
-
|
188 |
-
|
189 |
-
|
190 |
)
|
191 |
llm = LlamaCpp(model_path=model_path,
|
192 |
-
n_ctx=
|
193 |
use_mlock=True,
|
194 |
n_parts=-1,
|
195 |
temperature=temprature,
|
196 |
top_p=0.40,
|
197 |
-
last_n_tokens_size=
|
198 |
n_threads=4,
|
199 |
f16_kv=True,
|
200 |
max_tokens=200
|
@@ -212,8 +212,9 @@ def ask_ai(
|
|
212 |
vector_store=SimpleVectorStore.from_persist_dir(persist_dir="./index"),
|
213 |
index_store=SimpleIndexStore.from_persist_dir(persist_dir="./index"),
|
214 |
)
|
215 |
-
|
216 |
-
|
|
|
217 |
|
218 |
if response is not None:
|
219 |
logging.info(f"Response: {response}")
|
@@ -278,3 +279,5 @@ def search_construct(question, search_mode, index_select):
|
|
278 |
print("\n".join(links))
|
279 |
search_index_name = " ".join(search_terms.split(","))
|
280 |
construct_index(links, search_index_name, "GPTVectorStoreIndex")
|
|
|
|
|
|
120 |
index = GPTVectorStoreIndex.from_documents(documents, service_context=service_context)
|
121 |
else:
|
122 |
index = GPTListIndex.from_documents(documents, service_context=service_context)
|
123 |
+
index.storage_context.persist(persist_dir="./index")
|
124 |
except Exception as e:
|
125 |
print(e)
|
126 |
return None
|
|
|
163 |
print(response)
|
164 |
|
165 |
if response is None:
|
166 |
+
response = "Please upload a document first"
|
167 |
response = parse_text(response)
|
168 |
|
169 |
context.append({"role": "user", "content": question})
|
|
|
184 |
):
|
185 |
logging.debug("Querying index...")
|
186 |
prompt_helper = PromptHelper(
|
187 |
+
300,
|
188 |
+
200,
|
189 |
+
-20000
|
190 |
)
|
191 |
llm = LlamaCpp(model_path=model_path,
|
192 |
+
n_ctx=300,
|
193 |
use_mlock=True,
|
194 |
n_parts=-1,
|
195 |
temperature=temprature,
|
196 |
top_p=0.40,
|
197 |
+
last_n_tokens_size=100,
|
198 |
n_threads=4,
|
199 |
f16_kv=True,
|
200 |
max_tokens=200
|
|
|
212 |
vector_store=SimpleVectorStore.from_persist_dir(persist_dir="./index"),
|
213 |
index_store=SimpleIndexStore.from_persist_dir(persist_dir="./index"),
|
214 |
)
|
215 |
+
if storage_context is not None:
|
216 |
+
index = load_index_from_storage(service_context=service_context, storage_context=storage_context)
|
217 |
+
response = query_llm(index, question, service_context)
|
218 |
|
219 |
if response is not None:
|
220 |
logging.info(f"Response: {response}")
|
|
|
279 |
print("\n".join(links))
|
280 |
search_index_name = " ".join(search_terms.split(","))
|
281 |
construct_index(links, search_index_name, "GPTVectorStoreIndex")
|
282 |
+
print(f"Index {search_index_name} constructed.")
|
283 |
+
return search_index_name + "_GPTVectorStoreIndex"
|