Noshitha
Add application file
3bc5722
import gradio as gr
from transformers import AutoTokenizer, AutoModelForCausalLM, pipeline
from peft import PeftModel
# Global cache for the pipeline
pipe = None
def load_model():
"""Load the TinyLlama model with LoRA adapters (cached)."""
global pipe
if pipe is None:
base_model = "TinyLlama/TinyLlama-1.1B-Chat-v1.0"
adapter_model = "Noshitha98/TinyLlama-ToS-Finetuned"
tokenizer = AutoTokenizer.from_pretrained(base_model)
model = AutoModelForCausalLM.from_pretrained(base_model)
model = PeftModel.from_pretrained(model, adapter_model)
pipe = pipeline("text-generation", model=model, tokenizer=tokenizer)
return pipe
def check_clause(text):
generator = load_model()
if not text.strip():
return [("⚠️ Please enter contract text.", None)]
prompt = f"Detect unusual or risky clauses in this contract:\n\n{text}\n\nResponse:"
output = generator(prompt, max_length=512, do_sample=False)[0]["generated_text"]
# Clean the response
response = output.split("Response:")[-1].strip()
# Simple heuristic: highlight sentences with "risk", "penalty", "terminate", "discretion"
risky_keywords = ["penalty", "terminate", "discretion", "risk", "sole", "modify", "liability"]
highlights = []
for sentence in text.split(". "):
label = "risky" if any(word.lower() in sentence.lower() for word in risky_keywords) else None
highlights.append((sentence.strip(), label))
return highlights
# Gradio UI with highlighted output
iface = gr.Interface(
fn=check_clause,
inputs=gr.Textbox(lines=8, placeholder="Paste contract clause here..."),
outputs=gr.HighlightedText(
color_map={"risky": "red"}
),
title="Contract Clause Checker",
description="Paste contract text to detect unusual or anomalous clauses using TinyLlama-ToS-Finetuned."
)
iface.launch()