test / app.py
ratyim's picture
Update app.py
9c3145b verified
raw
history blame contribute delete
No virus
1.01 kB
import gradio as gr
from transformers import pipeline, AutoModelForCausalLM, AutoTokenizer
# Load the model and tokenizer
model_name = "FreedomIntelligence/HuatuoGPT-Vision-7B"
model = AutoModelForCausalLM.from_pretrained(model_name)
tokenizer = AutoTokenizer.from_pretrained(model_name)
pipe = pipeline("text-generation", model=model, tokenizer=tokenizer)
# Function to generate a response using the model
def generate_response(user_input):
messages = [{"role": "user", "content": user_input}]
response = pipe(messages)[0]['generated_text']
return response
# Gradio interface
iface = gr.Interface(
fn=generate_response, # The function to call to generate the output
inputs="text", # Single text input field
outputs="text", # Single text output field
title="HuatuoGPT-Vision-7B", # Title of the interface
description="A text generation model powered by HuatuoGPT-Vision-7B. Ask anything!",
)
# Launch the interface
iface.launch()