Spaces:
Runtime error
Runtime error
File size: 1,609 Bytes
055d938 304c5a2 350dfeb 304c5a2 350dfeb 304c5a2 350dfeb 304c5a2 350dfeb 304c5a2 350dfeb 304c5a2 350dfeb 304c5a2 350dfeb 304c5a2 350dfeb 86f2f38 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 |
import gradio as gr
from bug_detector import fix_code
def run_bugfixer(code):
return fix_code(code)
gr.Interface(
fn=run_bugfixer,
inputs=gr.Textbox(label="Paste Buggy Code", lines=15),
outputs=gr.Textbox(label="Suggested Fixed Code"),
title="Code Fixer using CodeT5+"
).launch()
from flask import Flask, request, jsonify
from flask_cors import CORS # Fix CORS issue
import torch
from transformers import RobertaTokenizer, RobertaForSequenceClassification
app = Flask(__name__)
CORS(app) # Enable CORS
# Load CodeBERT model
tokenizer = RobertaTokenizer.from_pretrained("microsoft/codebert-base")
model = RobertaForSequenceClassification.from_pretrained("microsoft/codebert-base")
@app.route("/")
def home():
return "Bug Detection and Fixing API is running!"
@app.route("/detect", methods=["POST"])
def detect_bug():
try:
data = request.get_json()
code = data.get("code", "")
if not code:
return jsonify({"error": "No code provided"}), 400
# Tokenize and classify
inputs = tokenizer(code, return_tensors="pt", truncation=True, padding=True)
outputs = model(input_ids=inputs["input_ids"], attention_mask=inputs["attention_mask"])
prediction = torch.argmax(outputs.logits, dim=1).item()
bug_status = "buggy" if prediction == 1 else "clean"
return jsonify({"status": bug_status})
except Exception as e:
return jsonify({"error": str(e)}), 500 # Handle errors properly
if __name__ == "__main__":
app.run(host="0.0.0.0", port=5000) # Ensure compatibility with Docker
|