|
|
import gradio as gr |
|
|
from huggingface_hub import InferenceClient |
|
|
import os |
|
|
import requests |
|
|
|
|
|
CHAT_URL = os.getenv("CHAT_URL") |
|
|
PROJECT_ID = os.getenv("PROJECT_ID") |
|
|
|
|
|
def respond( |
|
|
message, |
|
|
history: list[dict[str, str]], |
|
|
system_message, |
|
|
token, |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
): |
|
|
""" |
|
|
For more information on `huggingface_hub` Inference API support, please check the docs: https://huggingface.co/docs/huggingface_hub/v0.22.2/en/guides/inference |
|
|
""" |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
req = requests.post( |
|
|
CHAT_URL, |
|
|
json={ |
|
|
"project_id": PROJECT_ID, |
|
|
"session_id":system_message, |
|
|
"user_input":message, |
|
|
"update_variables": { |
|
|
"alfamidi":"", |
|
|
"alfagift":"", |
|
|
"karir":"", |
|
|
"layanan":"", |
|
|
"program":"", |
|
|
"voucher":"", |
|
|
"results":"", |
|
|
"identitas":"", |
|
|
"kerjasama":"" |
|
|
}, |
|
|
"output_variables": ["results"] |
|
|
}, |
|
|
headers={"Authorization" : f"Bearer {token}"} |
|
|
) |
|
|
|
|
|
out = req.json()["data"]["results"] |
|
|
|
|
|
print("[OUT]",out) |
|
|
|
|
|
return out.encode('utf-8').decode('unicode_escape') |
|
|
|
|
|
|
|
|
""" |
|
|
For information on how to customize the ChatInterface, peruse the gradio docs: https://www.gradio.app/docs/chatinterface |
|
|
""" |
|
|
chatbot = gr.ChatInterface( |
|
|
respond, |
|
|
type="messages", |
|
|
additional_inputs=[ |
|
|
gr.Textbox(value="<TEST_123>", label="session_id"), |
|
|
gr.Textbox(value="[TOKEN]", label="token"), |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
], |
|
|
) |
|
|
|
|
|
with gr.Blocks() as demo: |
|
|
chatbot.render() |
|
|
title = gr.HTML("<h3>Use #ai to ask the ai</h3>") |
|
|
|
|
|
|
|
|
if __name__ == "__main__": |
|
|
demo.launch() |
|
|
|