remove unused code
Browse files
app.py
CHANGED
|
@@ -119,9 +119,8 @@ async def chat(query,history,audience,sources,reports):
|
|
| 119 |
reports = []
|
| 120 |
|
| 121 |
inputs = {"user_input": query,"audience": audience_prompt,"sources":sources}
|
| 122 |
-
result = agent.astream_events(inputs,version = "v1")
|
| 123 |
-
|
| 124 |
-
|
| 125 |
# path_reformulation = "/logs/reformulation/final_output"
|
| 126 |
# path_keywords = "/logs/keywords/final_output"
|
| 127 |
# path_retriever = "/logs/find_documents/final_output"
|
|
@@ -187,84 +186,6 @@ async def chat(query,history,audience,sources,reports):
|
|
| 187 |
|
| 188 |
yield history,docs_html,output_query,output_language,gallery #,output_query,output_keywords
|
| 189 |
|
| 190 |
-
|
| 191 |
-
|
| 192 |
-
# if event["event"] == "on_chat_model_stream" and event["name"] != "transform_query": # if streaming answer
|
| 193 |
-
# if start_streaming == False:
|
| 194 |
-
# start_streaming = True
|
| 195 |
-
# # history[-1] = (query,"")
|
| 196 |
-
# history.append(ChatMessage(role="assistant", content = ""))
|
| 197 |
-
# if type(history[-1].metadata) != dict :
|
| 198 |
-
# # print("metadata : ", history[-1].metadata)
|
| 199 |
-
# # history.append(ChatMessage(role="assistant", content = ""))
|
| 200 |
-
|
| 201 |
-
# last_message_content = history[-1].content
|
| 202 |
-
# last_message_content += event["data"]["chunk"].content
|
| 203 |
-
# last_message_content = parse_output_llm_with_sources(last_message_content)
|
| 204 |
-
# history[-1] = ChatMessage(role="assistant", content = last_message_content)
|
| 205 |
-
# # new_token = event["data"]["chunk"].content
|
| 206 |
-
# # # time.sleep(0.01)
|
| 207 |
-
# # previous_answer = history[-1][1]
|
| 208 |
-
# # previous_answer = previous_answer if previous_answer is not None else ""
|
| 209 |
-
# # answer_yet = previous_answer + new_token
|
| 210 |
-
# # answer_yet = parse_output_llm_with_sources(answer_yet)
|
| 211 |
-
# # history[-1] = (query,answer_yet)
|
| 212 |
-
|
| 213 |
-
|
| 214 |
-
# elif event["name"] == "retrieve_documents" and event["event"] == "on_chain_end": # when documents are retrieved
|
| 215 |
-
# try:
|
| 216 |
-
# docs = event["data"]["output"]["documents"]
|
| 217 |
-
# docs_html = []
|
| 218 |
-
# for i, d in enumerate(docs, 1):
|
| 219 |
-
# docs_html.append(make_html_source(d, i))
|
| 220 |
-
# docs_html = "".join(docs_html)
|
| 221 |
-
# except Exception as e:
|
| 222 |
-
# print(f"Error getting documents: {e}")
|
| 223 |
-
# print(event)
|
| 224 |
-
|
| 225 |
-
# # elif event["name"] == "retrieve_documents" and event["event"] == "on_chain_start":
|
| 226 |
-
# # print(event)
|
| 227 |
-
# # questions = event["data"]["input"]["questions"]
|
| 228 |
-
# # questions = "\n".join([f"{i+1}. {q['question']} ({q['source']})" for i,q in enumerate(questions)])
|
| 229 |
-
# # answer_yet = "🔄️ Searching in the knowledge base\n{questions}"
|
| 230 |
-
# # history[-1] = (query,answer_yet)
|
| 231 |
-
|
| 232 |
-
# # TODO append step de tool avec les questions qui sont utilisées pour la recherche
|
| 233 |
-
# for event_name,(event_description,display_output) in steps_display.items(): # display steps
|
| 234 |
-
# if event["name"] == event_name:
|
| 235 |
-
# if event["event"] == "on_chain_start":
|
| 236 |
-
# # answer_yet = f"<p><span class='loader'></span>{event_description}</p>"
|
| 237 |
-
# # answer_yet = make_toolbox(event_description, "", checked = False)
|
| 238 |
-
# answer_yet = event_description
|
| 239 |
-
# # answer_yet = ChatMessage(role="assistant", content = "processing", metadata={'title' :event_description})
|
| 240 |
-
|
| 241 |
-
# history.append(ChatMessage(role="assistant", content = "", metadata={'title' :event_description}))
|
| 242 |
-
# # history[-1] = (query,answer_yet)
|
| 243 |
-
# # elif event["event"] == "on_chain_end":
|
| 244 |
-
# # answer_yet = ""
|
| 245 |
-
# # history[-1] = (query,answer_yet)
|
| 246 |
-
# # if display_output:
|
| 247 |
-
# # print(event["data"]["output"])
|
| 248 |
-
|
| 249 |
-
# # if op['path'] == path_reformulation: # reforulated question
|
| 250 |
-
# # try:
|
| 251 |
-
# # output_language = op['value']["language"] # str
|
| 252 |
-
# # output_query = op["value"]["question"]
|
| 253 |
-
# # except Exception as e:
|
| 254 |
-
# # raise gr.Error(f"ClimateQ&A Error: {e} - The error has been noted, try another question and if the error remains, you can contact us :)")
|
| 255 |
-
|
| 256 |
-
# # if op["path"] == path_keywords:
|
| 257 |
-
# # try:
|
| 258 |
-
# # output_keywords = op['value']["keywords"] # str
|
| 259 |
-
# # output_keywords = " AND ".join(output_keywords)
|
| 260 |
-
# # except Exception as e:
|
| 261 |
-
# # pass
|
| 262 |
-
|
| 263 |
-
|
| 264 |
-
|
| 265 |
-
# history = [tuple(x) for x in history]
|
| 266 |
-
# yield history,docs_html,output_query,output_language,gallery,output_query,output_keywords
|
| 267 |
-
|
| 268 |
except Exception as e:
|
| 269 |
print(event, "has failed")
|
| 270 |
raise gr.Error(f"{e}")
|
|
@@ -327,23 +248,9 @@ async def chat(query,history,audience,sources,reports):
|
|
| 327 |
history[-1] = (history[-1][0],answer_yet)
|
| 328 |
history = [tuple(x) for x in history]
|
| 329 |
|
| 330 |
-
# gallery = [x.metadata["image_path"] for x in docs if (len(x.metadata["image_path"]) > 0 and "IAS" in x.metadata["image_path"])]
|
| 331 |
-
# if len(gallery) > 0:
|
| 332 |
-
# gallery = list(set("|".join(gallery).split("|")))
|
| 333 |
-
# gallery = [get_image_from_azure_blob_storage(x) for x in gallery]
|
| 334 |
-
|
| 335 |
yield history,docs_html,output_query,output_language,gallery#,output_query,output_keywords
|
| 336 |
|
| 337 |
|
| 338 |
-
|
| 339 |
-
# else:
|
| 340 |
-
# docs_string = "No relevant passages found in the climate science reports (IPCC and IPBES)"
|
| 341 |
-
# complete_response = "**No relevant passages found in the climate science reports (IPCC and IPBES), you may want to ask a more specific question (specifying your question on climate issues).**"
|
| 342 |
-
# messages.append({"role": "assistant", "content": complete_response})
|
| 343 |
-
# gradio_format = make_pairs([a["content"] for a in messages[1:]])
|
| 344 |
-
# yield gradio_format, messages, docs_string
|
| 345 |
-
|
| 346 |
-
|
| 347 |
def save_feedback(feed: str, user_id):
|
| 348 |
if len(feed) > 1:
|
| 349 |
timestamp = str(datetime.now().timestamp())
|
|
@@ -387,56 +294,6 @@ papers_cols_widths = {
|
|
| 387 |
papers_cols = list(papers_cols_widths.keys())
|
| 388 |
papers_cols_widths = list(papers_cols_widths.values())
|
| 389 |
|
| 390 |
-
# async def find_papers(query, keywords,after):
|
| 391 |
-
|
| 392 |
-
# summary = ""
|
| 393 |
-
|
| 394 |
-
# df_works = oa.search(keywords,after = after)
|
| 395 |
-
# df_works = df_works.dropna(subset=["abstract"])
|
| 396 |
-
# df_works = oa.rerank(query,df_works,reranker)
|
| 397 |
-
# df_works = df_works.sort_values("rerank_score",ascending=False)
|
| 398 |
-
# G = oa.make_network(df_works)
|
| 399 |
-
|
| 400 |
-
# height = "750px"
|
| 401 |
-
# network = oa.show_network(G,color_by = "rerank_score",notebook=False,height = height)
|
| 402 |
-
# network_html = network.generate_html()
|
| 403 |
-
|
| 404 |
-
# network_html = network_html.replace("'", "\"")
|
| 405 |
-
# css_to_inject = "<style>#mynetwork { border: none !important; } .card { border: none !important; }</style>"
|
| 406 |
-
# network_html = network_html + css_to_inject
|
| 407 |
-
|
| 408 |
-
|
| 409 |
-
# network_html = f"""<iframe style="width: 100%; height: {height};margin:0 auto" name="result" allow="midi; geolocation; microphone; camera;
|
| 410 |
-
# display-capture; encrypted-media;" sandbox="allow-modals allow-forms
|
| 411 |
-
# allow-scripts allow-same-origin allow-popups
|
| 412 |
-
# allow-top-navigation-by-user-activation allow-downloads" allowfullscreen=""
|
| 413 |
-
# allowpaymentrequest="" frameborder="0" srcdoc='{network_html}'></iframe>"""
|
| 414 |
-
|
| 415 |
-
|
| 416 |
-
# docs = df_works["content"].head(15).tolist()
|
| 417 |
-
|
| 418 |
-
# df_works = df_works.reset_index(drop = True).reset_index().rename(columns = {"index":"doc"})
|
| 419 |
-
# df_works["doc"] = df_works["doc"] + 1
|
| 420 |
-
# df_works = df_works[papers_cols]
|
| 421 |
-
|
| 422 |
-
# yield df_works,network_html,summary
|
| 423 |
-
|
| 424 |
-
# chain = make_rag_papers_chain(llm)
|
| 425 |
-
# result = chain.astream_log({"question": query,"docs": docs,"language":"English"})
|
| 426 |
-
# path_answer = "/logs/StrOutputParser/streamed_output/-"
|
| 427 |
-
|
| 428 |
-
# async for op in result:
|
| 429 |
-
|
| 430 |
-
# op = op.ops[0]
|
| 431 |
-
|
| 432 |
-
# if op['path'] == path_answer: # reforulated question
|
| 433 |
-
# new_token = op['value'] # str
|
| 434 |
-
# summary += new_token
|
| 435 |
-
# else:
|
| 436 |
-
# continue
|
| 437 |
-
# yield df_works,network_html,summary
|
| 438 |
-
|
| 439 |
-
|
| 440 |
|
| 441 |
# --------------------------------------------------------------------
|
| 442 |
# Gradio
|
|
@@ -467,15 +324,12 @@ def vote(data: gr.LikeData):
|
|
| 467 |
|
| 468 |
|
| 469 |
with gr.Blocks(title="Climate Q&A", css_paths=os.getcwd()+ "/style.css", theme=theme,elem_id = "main-component") as demo:
|
| 470 |
-
# user_id_state = gr.State([user_id])
|
| 471 |
|
| 472 |
with gr.Tab("ClimateQ&A"):
|
| 473 |
|
| 474 |
with gr.Row(elem_id="chatbot-row"):
|
| 475 |
with gr.Column(scale=2):
|
| 476 |
-
# state = gr.State([system_template])
|
| 477 |
chatbot = gr.Chatbot(
|
| 478 |
-
# value=[(None,init_prompt)],
|
| 479 |
value = [ChatMessage(role="assistant", content=init_prompt)],
|
| 480 |
type = "messages",
|
| 481 |
show_copy_button=True,
|
|
@@ -483,7 +337,7 @@ with gr.Blocks(title="Climate Q&A", css_paths=os.getcwd()+ "/style.css", theme=t
|
|
| 483 |
elem_id="chatbot",
|
| 484 |
layout = "panel",
|
| 485 |
avatar_images = (None,"https://i.ibb.co/YNyd5W2/logo4.png"),
|
| 486 |
-
)
|
| 487 |
|
| 488 |
# bot.like(vote,None,None)
|
| 489 |
|
|
@@ -491,8 +345,7 @@ with gr.Blocks(title="Climate Q&A", css_paths=os.getcwd()+ "/style.css", theme=t
|
|
| 491 |
|
| 492 |
with gr.Row(elem_id = "input-message"):
|
| 493 |
textbox=gr.Textbox(placeholder="Ask me anything here!",show_label=False,scale=7,lines = 1,interactive = True,elem_id="input-textbox")
|
| 494 |
-
|
| 495 |
-
|
| 496 |
|
| 497 |
with gr.Column(scale=1, variant="panel",elem_id = "right-panel"):
|
| 498 |
|
|
@@ -630,8 +483,6 @@ with gr.Blocks(title="Climate Q&A", css_paths=os.getcwd()+ "/style.css", theme=t
|
|
| 630 |
|
| 631 |
dropdown_samples.change(change_sample_questions,dropdown_samples,samples)
|
| 632 |
|
| 633 |
-
# query_papers.submit(generate_keywords,[query_papers], [keywords_papers])
|
| 634 |
-
# search_papers.click(find_papers,[query_papers,keywords_papers,after], [papers_dataframe,citations_network,papers_summary])
|
| 635 |
|
| 636 |
demo.queue()
|
| 637 |
|
|
|
|
| 119 |
reports = []
|
| 120 |
|
| 121 |
inputs = {"user_input": query,"audience": audience_prompt,"sources":sources}
|
| 122 |
+
result = agent.astream_events(inputs,version = "v1")
|
| 123 |
+
|
|
|
|
| 124 |
# path_reformulation = "/logs/reformulation/final_output"
|
| 125 |
# path_keywords = "/logs/keywords/final_output"
|
| 126 |
# path_retriever = "/logs/find_documents/final_output"
|
|
|
|
| 186 |
|
| 187 |
yield history,docs_html,output_query,output_language,gallery #,output_query,output_keywords
|
| 188 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 189 |
except Exception as e:
|
| 190 |
print(event, "has failed")
|
| 191 |
raise gr.Error(f"{e}")
|
|
|
|
| 248 |
history[-1] = (history[-1][0],answer_yet)
|
| 249 |
history = [tuple(x) for x in history]
|
| 250 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 251 |
yield history,docs_html,output_query,output_language,gallery#,output_query,output_keywords
|
| 252 |
|
| 253 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 254 |
def save_feedback(feed: str, user_id):
|
| 255 |
if len(feed) > 1:
|
| 256 |
timestamp = str(datetime.now().timestamp())
|
|
|
|
| 294 |
papers_cols = list(papers_cols_widths.keys())
|
| 295 |
papers_cols_widths = list(papers_cols_widths.values())
|
| 296 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 297 |
|
| 298 |
# --------------------------------------------------------------------
|
| 299 |
# Gradio
|
|
|
|
| 324 |
|
| 325 |
|
| 326 |
with gr.Blocks(title="Climate Q&A", css_paths=os.getcwd()+ "/style.css", theme=theme,elem_id = "main-component") as demo:
|
|
|
|
| 327 |
|
| 328 |
with gr.Tab("ClimateQ&A"):
|
| 329 |
|
| 330 |
with gr.Row(elem_id="chatbot-row"):
|
| 331 |
with gr.Column(scale=2):
|
|
|
|
| 332 |
chatbot = gr.Chatbot(
|
|
|
|
| 333 |
value = [ChatMessage(role="assistant", content=init_prompt)],
|
| 334 |
type = "messages",
|
| 335 |
show_copy_button=True,
|
|
|
|
| 337 |
elem_id="chatbot",
|
| 338 |
layout = "panel",
|
| 339 |
avatar_images = (None,"https://i.ibb.co/YNyd5W2/logo4.png"),
|
| 340 |
+
)
|
| 341 |
|
| 342 |
# bot.like(vote,None,None)
|
| 343 |
|
|
|
|
| 345 |
|
| 346 |
with gr.Row(elem_id = "input-message"):
|
| 347 |
textbox=gr.Textbox(placeholder="Ask me anything here!",show_label=False,scale=7,lines = 1,interactive = True,elem_id="input-textbox")
|
| 348 |
+
|
|
|
|
| 349 |
|
| 350 |
with gr.Column(scale=1, variant="panel",elem_id = "right-panel"):
|
| 351 |
|
|
|
|
| 483 |
|
| 484 |
dropdown_samples.change(change_sample_questions,dropdown_samples,samples)
|
| 485 |
|
|
|
|
|
|
|
| 486 |
|
| 487 |
demo.queue()
|
| 488 |
|