little-huang's picture
Update app.py
137a032 verified
import gradio as gr
from transformers import AutoTokenizer
import json
import os
from huggingface_hub import login
# Fetch HF Token
HUGGINGFACEHUB_API_TOKEN = os.environ.get("HF_TOKEN", "")
default_model = "Qwen/Qwen3-30B-A3B-Instruct-2507"
demo_conversation = """[
{"role": "system", "content": "你是一个专业的客服人员。请始终保持礼貌、清晰和乐于助人。"},
{"role": "user", "content": "你好呀"},
{"role": "assistant", "content": "您好!很高兴见到您!😊"},
{"role": "user", "content": "我能问个问题吗?"}
]"""
description_text = """# Chat Template Viewer
### This space helps visualize chat formatting using Hugging Face models.
"""
default_tools = []
def apply_chat_template(model_name, test_conversation, add_generation_prompt, cleanup_whitespace, hf_token, tools):
try:
if hf_token:
login(token=hf_token) # Ensure login is successful
try:
tokenizer = AutoTokenizer.from_pretrained(model_name, trust_remote_code=True)
except Exception:
tokenizer = AutoTokenizer.from_pretrained(model_name, trust_remote_code=True, use_fast=False)
print('use_fast=False')
except Exception as e:
return f"Error: Could not load model {model_name} or invalid HF token. {str(e)}"
try:
conversation = json.loads(test_conversation)
formatted = tokenizer.apply_chat_template(
conversation,
tokenize=False,
add_generation_prompt=add_generation_prompt,
tools=tools
)
return formatted
except Exception as e:
return f"Error: {str(e)}"
with gr.Blocks() as demo:
gr.Markdown(description_text)
with gr.Row():
with gr.Column():
model_name_input = gr.Textbox(label="Model Name", placeholder="Enter model name", value=default_model)
hf_token_input = gr.Textbox(label="Hugging Face Token (optional)", placeholder="Enter your HF token", type="password")
conversation_input = gr.TextArea(value=demo_conversation, lines=8, label="Conversation")
add_generation_prompt_checkbox = gr.Checkbox(value=False, label="Add generation prompt")
cleanup_whitespace_checkbox = gr.Checkbox(value=True, label="Cleanup template whitespace")
format_button = gr.Button("Format Conversation")
with gr.Column():
output = gr.TextArea(label="Formatted Conversation", interactive=False, lines=12)
# Use gr.State() to pass default_tools correctly
tools_state = gr.State(default_tools)
format_button.click(
fn=apply_chat_template,
inputs=[
model_name_input,
conversation_input,
add_generation_prompt_checkbox,
cleanup_whitespace_checkbox,
hf_token_input,
tools_state # Wrapped inside gr.State()
],
outputs=output
)
demo.launch()