Claude
Refactor app.py to replace Streamlit with Gradio for the Chinese text generation demo; update requirements.txt to include Gradio.
1b5a324 unverified
import gradio as gr
from transformers import pipeline
import os
def load_generator():
return pipeline("text-generation", model="Qwen/Qwen3-4B-Instruct-2507", token=os.environ.get("HUGGINGFACEHUB_API_TOKEN"))
generator = load_generator()
def generate_text(prompt):
if not prompt:
return "Please enter a prompt."
output = generator(prompt, max_new_tokens=500, prefix="用简体中文回应。")[0]['generated_text']
return output
# Example prompts for better user guidance
examples = [
["寫一篇有關製作中式點心的說明文。"],
["寫一篇有關學校的描寫文。"],
["寫一篇有關與家人旅行的記敘文。"],
["寫一篇題為《地鐵眾生相》的描寫文。"]
]
with gr.Blocks(title="Chinese Text Generation Demo", theme=gr.themes.Soft()) as demo:
gr.Markdown("# Chinese Text Generation Demo")
gr.Markdown("Generate Chinese text using Qwen language model")
with gr.Row():
with gr.Column(scale=1):
prompt_input = gr.Textbox(
label="Enter your prompt",
lines=4,
placeholder="Enter your Chinese text prompt here...",
max_lines=8
)
generate_btn = gr.Button("Generate", variant="primary", size="lg")
with gr.Column(scale=2):
output_text = gr.Textbox(
label="Generated Text",
lines=12,
interactive=False,
show_copy_button=True
)
# Event handler
generate_btn.click(
fn=generate_text,
inputs=prompt_input,
outputs=output_text,
show_progress=True
)
# Examples section using gr.Examples
gr.Examples(
examples=examples,
inputs=prompt_input,
outputs=output_text,
fn=generate_text,
cache_examples=True,
examples_per_page=4
)
if __name__ == "__main__":
demo.queue().launch()