Spaces:
Runtime error
Runtime error
venky2k1
commited on
Commit
Β·
d3e257b
1
Parent(s):
819eb9b
Initial commit for deployment
Browse files- Dockerfile +0 -17
- __pycache__/bug_detector.cpython-313.pyc +0 -0
- app.py +8 -26
- app_ui.py +18 -14
- bug_detector.py +4 -5
Dockerfile
DELETED
@@ -1,17 +0,0 @@
|
|
1 |
-
# Use Python as the base image
|
2 |
-
FROM python:3.9
|
3 |
-
|
4 |
-
# Set working directory
|
5 |
-
WORKDIR /app
|
6 |
-
|
7 |
-
# Copy project files
|
8 |
-
COPY . /app
|
9 |
-
|
10 |
-
# Install dependencies
|
11 |
-
RUN pip install flask torch transformers
|
12 |
-
|
13 |
-
# Expose the API port
|
14 |
-
EXPOSE 5000
|
15 |
-
|
16 |
-
# Start Flask server
|
17 |
-
CMD ["python", "app.py"]
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
__pycache__/bug_detector.cpython-313.pyc
CHANGED
Binary files a/__pycache__/bug_detector.cpython-313.pyc and b/__pycache__/bug_detector.cpython-313.pyc differ
|
|
app.py
CHANGED
@@ -1,52 +1,34 @@
|
|
1 |
-
import
|
2 |
-
from
|
3 |
-
|
4 |
-
def run_bugfixer(code):
|
5 |
-
return fix_code(code)
|
6 |
-
|
7 |
-
gr.Interface(
|
8 |
-
fn=run_bugfixer,
|
9 |
-
inputs=gr.Textbox(label="Paste Buggy Code", lines=15),
|
10 |
-
outputs=gr.Textbox(label="Suggested Fixed Code"),
|
11 |
-
title="Automated BUg Fixing and Dection"
|
12 |
-
).launch()
|
13 |
-
from flask import Flask, request, jsonify
|
14 |
-
from flask_cors import CORS # Fix CORS issue
|
15 |
import torch
|
16 |
from transformers import RobertaTokenizer, RobertaForSequenceClassification
|
17 |
|
18 |
app = Flask(__name__)
|
19 |
-
CORS(app)
|
20 |
|
21 |
-
# Load CodeBERT model
|
22 |
tokenizer = RobertaTokenizer.from_pretrained("microsoft/codebert-base")
|
23 |
model = RobertaForSequenceClassification.from_pretrained("microsoft/codebert-base")
|
24 |
|
25 |
@app.route("/")
|
26 |
def home():
|
27 |
-
return "Bug Detection and Fixing API is
|
28 |
|
29 |
@app.route("/detect", methods=["POST"])
|
30 |
def detect_bug():
|
31 |
try:
|
32 |
data = request.get_json()
|
33 |
code = data.get("code", "")
|
34 |
-
|
35 |
if not code:
|
36 |
return jsonify({"error": "No code provided"}), 400
|
37 |
|
38 |
-
# Tokenize and classify
|
39 |
inputs = tokenizer(code, return_tensors="pt", truncation=True, padding=True)
|
40 |
outputs = model(input_ids=inputs["input_ids"], attention_mask=inputs["attention_mask"])
|
41 |
prediction = torch.argmax(outputs.logits, dim=1).item()
|
|
|
|
|
42 |
|
43 |
-
bug_status = "buggy" if prediction == 1 else "clean"
|
44 |
-
|
45 |
-
return jsonify({"status": bug_status})
|
46 |
-
|
47 |
except Exception as e:
|
48 |
-
return jsonify({"error": str(e)}), 500
|
49 |
|
50 |
if __name__ == "__main__":
|
51 |
-
app.run(host="0.0.0.0", port=5000)
|
52 |
-
|
|
|
1 |
+
ifrom flask import Flask, request, jsonify
|
2 |
+
from flask_cors import CORS
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
3 |
import torch
|
4 |
from transformers import RobertaTokenizer, RobertaForSequenceClassification
|
5 |
|
6 |
app = Flask(__name__)
|
7 |
+
CORS(app)
|
8 |
|
|
|
9 |
tokenizer = RobertaTokenizer.from_pretrained("microsoft/codebert-base")
|
10 |
model = RobertaForSequenceClassification.from_pretrained("microsoft/codebert-base")
|
11 |
|
12 |
@app.route("/")
|
13 |
def home():
|
14 |
+
return "β
Bug Detection and Fixing API is Running!"
|
15 |
|
16 |
@app.route("/detect", methods=["POST"])
|
17 |
def detect_bug():
|
18 |
try:
|
19 |
data = request.get_json()
|
20 |
code = data.get("code", "")
|
|
|
21 |
if not code:
|
22 |
return jsonify({"error": "No code provided"}), 400
|
23 |
|
|
|
24 |
inputs = tokenizer(code, return_tensors="pt", truncation=True, padding=True)
|
25 |
outputs = model(input_ids=inputs["input_ids"], attention_mask=inputs["attention_mask"])
|
26 |
prediction = torch.argmax(outputs.logits, dim=1).item()
|
27 |
+
status = "π Buggy" if prediction == 1 else "β
Clean"
|
28 |
+
return jsonify({"status": status})
|
29 |
|
|
|
|
|
|
|
|
|
30 |
except Exception as e:
|
31 |
+
return jsonify({"error": str(e)}), 500
|
32 |
|
33 |
if __name__ == "__main__":
|
34 |
+
app.run(host="0.0.0.0", port=5000)
|
|
app_ui.py
CHANGED
@@ -1,18 +1,22 @@
|
|
1 |
import gradio as gr
|
2 |
-
from
|
3 |
|
4 |
-
def
|
5 |
fixed = fix_code(code)
|
6 |
-
return
|
7 |
|
8 |
-
|
9 |
-
|
10 |
-
|
11 |
-
|
12 |
-
|
13 |
-
|
14 |
-
|
15 |
-
|
16 |
-
|
17 |
-
|
18 |
-
|
|
|
|
|
|
|
|
|
|
1 |
import gradio as gr
|
2 |
+
from bug_detector import fix_code
|
3 |
|
4 |
+
def run_bugfixer(code):
|
5 |
fixed = fix_code(code)
|
6 |
+
return fixed
|
7 |
|
8 |
+
gr.Interface(
|
9 |
+
fn=run_bugfixer,
|
10 |
+
inputs=gr.Textbox(label="π₯ Paste Buggy Code", lines=15),
|
11 |
+
outputs=gr.Textbox(label="π οΈ Suggested Fixed Code", lines=15, interactive=True), # β
EDITABLE OUTPUT
|
12 |
+
title="π Automated Bug Fixing and Detection",
|
13 |
+
description="""
|
14 |
+
<h3 style='color: orange;'>Developed by:</h3>
|
15 |
+
<ul>
|
16 |
+
<li><strong>Akanksha K P</strong></li>
|
17 |
+
<li><strong>Divyashree</strong></li>
|
18 |
+
</ul>
|
19 |
+
<h4 style='color: cyan;'>Guide: Prof. Alakananda K P</h4>
|
20 |
+
""",
|
21 |
+
theme="soft"
|
22 |
+
).launch()
|
bug_detector.py
CHANGED
@@ -1,14 +1,13 @@
|
|
1 |
from transformers import AutoTokenizer, AutoModelForSeq2SeqLM
|
2 |
import torch
|
3 |
|
4 |
-
# Load CodeT5 or similar model
|
5 |
model_name = "Salesforce/codet5-base"
|
6 |
tokenizer = AutoTokenizer.from_pretrained(model_name)
|
7 |
model = AutoModelForSeq2SeqLM.from_pretrained(model_name)
|
8 |
|
9 |
-
def fix_code(code
|
10 |
-
|
11 |
-
inputs = tokenizer(
|
12 |
with torch.no_grad():
|
13 |
-
output = model.generate(**inputs, max_length=512)
|
14 |
return tokenizer.decode(output[0], skip_special_tokens=True)
|
|
|
1 |
from transformers import AutoTokenizer, AutoModelForSeq2SeqLM
|
2 |
import torch
|
3 |
|
|
|
4 |
model_name = "Salesforce/codet5-base"
|
5 |
tokenizer = AutoTokenizer.from_pretrained(model_name)
|
6 |
model = AutoModelForSeq2SeqLM.from_pretrained(model_name)
|
7 |
|
8 |
+
def fix_code(code):
|
9 |
+
prompt = f"fix Python: {code}"
|
10 |
+
inputs = tokenizer(prompt, return_tensors="pt", padding=True, truncation=True, max_length=512)
|
11 |
with torch.no_grad():
|
12 |
+
output = model.generate(**inputs, max_length=512, num_beams=4, early_stopping=True)
|
13 |
return tokenizer.decode(output[0], skip_special_tokens=True)
|