ThieLin commited on
Commit
fe010cc
·
verified ·
1 Parent(s): e9199db
Files changed (1) hide show
  1. app.py +14 -16
app.py CHANGED
@@ -2,29 +2,27 @@ import gradio as gr
2
  from transformers import pipeline
3
  from sentence_transformers import SentenceTransformer, util
4
 
5
- # Carregamento dos modelos
6
- model_a = pipeline("text-generation", model="tiiuae/falcon-7b-instruct")
7
- model_b = pipeline("text-generation", model="mistralai/Mistral-7B-Instruct-v0.1")
8
  similarity_model = SentenceTransformer("sentence-transformers/paraphrase-MiniLM-L6-v2")
9
 
10
- def comparar_respostas(prompt):
11
- resp_a = model_a(prompt, max_new_tokens=80)[0]["generated_text"]
12
- resp_b = model_b(prompt, max_new_tokens=80)[0]["generated_text"]
 
13
  emb_a = similarity_model.encode(resp_a, convert_to_tensor=True)
14
  emb_b = similarity_model.encode(resp_b, convert_to_tensor=True)
15
  similaridade = util.cos_sim(emb_a, emb_b).item()
16
 
17
  return resp_a.strip(), resp_b.strip(), f"{similaridade:.4f}"
18
 
19
- interface = gr.Interface(
20
- fn=comparar_respostas,
21
- inputs=gr.Textbox(label="Digite seu prompt"),
22
  outputs=[
23
- gr.Textbox(label="Resposta do Modelo A (Falcon)"),
24
- gr.Textbox(label="Resposta do Modelo B (Mistral)"),
25
- gr.Textbox(label="Similaridade entre as respostas")
26
  ],
27
- title="Comparador de Modelos LLM - Hugging Face"
28
- )
29
-
30
- interface.launch()
 
2
  from transformers import pipeline
3
  from sentence_transformers import SentenceTransformer, util
4
 
5
+ generator_a = pipeline("text-generation", model="gpt2")
6
+ generator_b = pipeline("text2text-generation", model="google/flan-t5-base")
 
7
  similarity_model = SentenceTransformer("sentence-transformers/paraphrase-MiniLM-L6-v2")
8
 
9
+ def comparar(prompt):
10
+ resp_a = generator_a(prompt, max_new_tokens=60, temperature=0.7)[0]["generated_text"]
11
+ resp_b = generator_b(prompt, max_new_tokens=60, temperature=0.7)[0]["generated_text"]
12
+
13
  emb_a = similarity_model.encode(resp_a, convert_to_tensor=True)
14
  emb_b = similarity_model.encode(resp_b, convert_to_tensor=True)
15
  similaridade = util.cos_sim(emb_a, emb_b).item()
16
 
17
  return resp_a.strip(), resp_b.strip(), f"{similaridade:.4f}"
18
 
19
+ gr.Interface(
20
+ fn=comparar,
21
+ inputs=gr.Textbox(label="Digite um prompt"),
22
  outputs=[
23
+ gr.Textbox(label="Resposta do GPT-2"),
24
+ gr.Textbox(label="Resposta do Flan-T5"),
25
+ gr.Textbox(label="Similaridade entre respostas")
26
  ],
27
+ title="Comparador de Modelos LLM Leves"
28
+ ).launch()