bug-fixer / bug_detector.py
venky2k1
Initial commit
bf718e9
raw
history blame
574 Bytes
from transformers import AutoTokenizer, AutoModelForSeq2SeqLM
import torch
# Load CodeT5 or similar model
model_name = "Salesforce/codet5-base"
tokenizer = AutoTokenizer.from_pretrained(model_name)
model = AutoModelForSeq2SeqLM.from_pretrained(model_name)
def fix_code(code: str) -> str:
input_text = f"fix: {code}"
inputs = tokenizer(input_text, return_tensors="pt", padding=True, truncation=True, max_length=512)
with torch.no_grad():
output = model.generate(**inputs, max_length=512)
return tokenizer.decode(output[0], skip_special_tokens=True)