Spaces:
Paused
Paused
removed encoding and sentence models
Browse files- app.py +35 -35
- library.ipynb +1 -1
app.py
CHANGED
@@ -1,6 +1,6 @@
|
|
1 |
from flask import Flask,request,render_template,send_file,jsonify
|
2 |
import os
|
3 |
-
from transformers import AutoTokenizer, AutoModel
|
4 |
import anvil.server
|
5 |
import pathlib
|
6 |
import textwrap
|
@@ -13,15 +13,15 @@ from keys_min import server_uplink
|
|
13 |
print(server_uplink)
|
14 |
anvil.server.connect(server_uplink)
|
15 |
|
16 |
-
from sentence_transformers import SentenceTransformer
|
17 |
-
from sentence_transformers.util import cos_sim
|
18 |
# model = SentenceTransformer('thenlper/gte-large')
|
19 |
-
model = SentenceTransformer('BAAI/bge-large-en')
|
20 |
|
21 |
-
@anvil.server.callable
|
22 |
-
def encode(sentence = None):
|
23 |
-
|
24 |
-
|
25 |
|
26 |
app=Flask(__name__)
|
27 |
MESSAGED={'title':'API Server for ICAPP',
|
@@ -29,8 +29,8 @@ MESSAGED={'title':'API Server for ICAPP',
|
|
29 |
'call_gemini(text,key)','call_gpt(text,key,model)',
|
30 |
'task_id<=launch(func_name,*args)','poll(task_id)']}
|
31 |
|
32 |
-
tokenizer = AutoTokenizer.from_pretrained('allenai/specter')
|
33 |
-
encoder = AutoModel.from_pretrained('allenai/specter')
|
34 |
|
35 |
anvil.server.callable(call_gpt)
|
36 |
anvil.server.callable(call_gemini)
|
@@ -64,15 +64,15 @@ def poll(task_id):
|
|
64 |
return str(result)
|
65 |
else: return str(result)
|
66 |
|
67 |
-
@anvil.server.callable
|
68 |
-
def encode_anvil(text):
|
69 |
-
|
70 |
-
|
71 |
-
|
72 |
-
|
73 |
-
|
74 |
-
|
75 |
-
|
76 |
|
77 |
@anvil.server.callable
|
78 |
def reset_service():
|
@@ -86,22 +86,22 @@ def print_results_table():
|
|
86 |
global service
|
87 |
return(service.results)
|
88 |
|
89 |
-
@app.route('/encode',methods=['GET','POST'])
|
90 |
-
def encode():
|
91 |
-
|
92 |
-
|
93 |
-
|
94 |
-
|
95 |
-
|
96 |
-
|
97 |
-
|
98 |
-
|
99 |
-
|
100 |
-
|
101 |
-
|
102 |
-
|
103 |
-
|
104 |
-
|
105 |
|
106 |
@app.route('/',methods=['GET', 'POST'])
|
107 |
def home():
|
|
|
1 |
from flask import Flask,request,render_template,send_file,jsonify
|
2 |
import os
|
3 |
+
# from transformers import AutoTokenizer, AutoModel
|
4 |
import anvil.server
|
5 |
import pathlib
|
6 |
import textwrap
|
|
|
13 |
print(server_uplink)
|
14 |
anvil.server.connect(server_uplink)
|
15 |
|
16 |
+
# from sentence_transformers import SentenceTransformer
|
17 |
+
# from sentence_transformers.util import cos_sim
|
18 |
# model = SentenceTransformer('thenlper/gte-large')
|
19 |
+
# model = SentenceTransformer('BAAI/bge-large-en')
|
20 |
|
21 |
+
# @anvil.server.callable
|
22 |
+
# def encode(sentence = None):
|
23 |
+
# vec = model.encode(sentence)
|
24 |
+
# return [float(val) if isinstance(val, (int, float, np.float32)) else 0.0 for val in vec]
|
25 |
|
26 |
app=Flask(__name__)
|
27 |
MESSAGED={'title':'API Server for ICAPP',
|
|
|
29 |
'call_gemini(text,key)','call_gpt(text,key,model)',
|
30 |
'task_id<=launch(func_name,*args)','poll(task_id)']}
|
31 |
|
32 |
+
# tokenizer = AutoTokenizer.from_pretrained('allenai/specter')
|
33 |
+
# encoder = AutoModel.from_pretrained('allenai/specter')
|
34 |
|
35 |
anvil.server.callable(call_gpt)
|
36 |
anvil.server.callable(call_gemini)
|
|
|
64 |
return str(result)
|
65 |
else: return str(result)
|
66 |
|
67 |
+
# @anvil.server.callable
|
68 |
+
# def encode_anvil(text):
|
69 |
+
# inputs = tokenizer(text, padding=True, truncation=True,
|
70 |
+
# return_tensors="pt", max_length=512)
|
71 |
+
# result = encoder(**inputs)
|
72 |
+
# embeddings = result.last_hidden_state[:, 0, :]
|
73 |
+
# emb_array = embeddings.detach().numpy()
|
74 |
+
# embedding=emb_array.tolist()
|
75 |
+
# return embedding
|
76 |
|
77 |
@anvil.server.callable
|
78 |
def reset_service():
|
|
|
86 |
global service
|
87 |
return(service.results)
|
88 |
|
89 |
+
# @app.route('/encode',methods=['GET','POST'])
|
90 |
+
# def encode():
|
91 |
+
# print(request)
|
92 |
+
# if request.method=='GET':
|
93 |
+
# text=request.args.get('text')
|
94 |
+
# elif request.method=='POST':
|
95 |
+
# data=request.get_json()
|
96 |
+
# if 'text' in data: text=data["text"]
|
97 |
+
# if text=='' or text is None: return -1
|
98 |
+
# inputs = tokenizer(text, padding=True, truncation=True,
|
99 |
+
# return_tensors="pt", max_length=512)
|
100 |
+
# result = encoder(**inputs)
|
101 |
+
# embeddings = result.last_hidden_state[:, 0, :]
|
102 |
+
# emb_array = embeddings.detach().numpy()
|
103 |
+
# embedding=emb_array.tolist()
|
104 |
+
# return jsonify({'embedding': embedding})
|
105 |
|
106 |
@app.route('/',methods=['GET', 'POST'])
|
107 |
def home():
|
library.ipynb
CHANGED
@@ -73,7 +73,7 @@
|
|
73 |
],
|
74 |
"metadata": {
|
75 |
"kernelspec": {
|
76 |
-
"display_name": "
|
77 |
"language": "python",
|
78 |
"name": "python3"
|
79 |
},
|
|
|
73 |
],
|
74 |
"metadata": {
|
75 |
"kernelspec": {
|
76 |
+
"display_name": "py310exp",
|
77 |
"language": "python",
|
78 |
"name": "python3"
|
79 |
},
|