SRI2005 commited on
Commit
dc4b3bc
·
verified ·
1 Parent(s): 5cbcc7b

Create app.py

Browse files
Files changed (1) hide show
  1. app.py +56 -0
app.py ADDED
@@ -0,0 +1,56 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import gradio as gr
2
+ from transformers import AutoTokenizer, AutoModelForCausalLM
3
+ from PyPDF2 import PdfReader
4
+ import torch
5
+
6
+ # Load IBM Granite model (use a smaller one if needed, e.g., granite-3.0-3b-instruct)
7
+ model_name = "ibm-granite/granite-3.0-8b-instruct"
8
+ tokenizer = AutoTokenizer.from_pretrained(model_name)
9
+ model = AutoModelForCausalLM.from_pretrained(model_name, device_map="auto")
10
+
11
+ # Global variable for PDF context
12
+ pdf_context = ""
13
+
14
+ def upload_pdf(file):
15
+ global pdf_context
16
+ if file is None:
17
+ return "No file uploaded."
18
+ reader = PdfReader(file)
19
+ pdf_context = ""
20
+ for page in reader.pages:
21
+ pdf_context += page.extract_text() + "\n"
22
+ return "PDF uploaded and text extracted successfully!"
23
+
24
+ def chat(message, history):
25
+ # Build messages with history and PDF context
26
+ messages = [{"role": "system", "content": f"You are a helpful assistant. Answer based on this context: {pdf_context}"}]
27
+ for user_msg, assistant_msg in history:
28
+ messages.append({"role": "user", "content": user_msg})
29
+ messages.append({"role": "assistant", "content": assistant_msg})
30
+ messages.append({"role": "user", "content": message})
31
+
32
+ # Apply chat template and generate
33
+ input_text = tokenizer.apply_chat_template(messages, tokenize=False, add_generation_prompt=True)
34
+ inputs = tokenizer(input_text, return_tensors="pt").to(model.device)
35
+ outputs = model.generate(**inputs, max_new_tokens=300, temperature=0.7)
36
+ response = tokenizer.decode(outputs[0][len(inputs["input_ids"][0]):], skip_special_tokens=True)
37
+ return response
38
+
39
+ # Gradio interface
40
+ with gr.Blocks() as demo:
41
+ gr.Markdown("# Basic PDF Q&A Chat with IBM Granite")
42
+
43
+ with gr.Row():
44
+ pdf_input = gr.File(label="Upload PDF", file_types=[".pdf"])
45
+ upload_btn = gr.Button("Upload PDF")
46
+
47
+ status = gr.Textbox(label="Status")
48
+
49
+ chat_interface = gr.ChatInterface(
50
+ fn=chat,
51
+ title="Ask questions about the PDF"
52
+ )
53
+
54
+ upload_btn.click(upload_pdf, inputs=pdf_input, outputs=status)
55
+
56
+ demo.launch()