Bhanushray commited on
Commit
12abd70
Β·
verified Β·
1 Parent(s): f829b9d

Upload 2 files

Browse files
Files changed (2) hide show
  1. Dockerfile +1 -1
  2. app.py +26 -23
Dockerfile CHANGED
@@ -13,7 +13,7 @@ COPY requirements.txt .
13
  RUN pip install --no-cache-dir -r requirements.txt
14
 
15
  # Create necessary directories (but don't download models here!)
16
- RUN mkdir -p /app/modelsBioembed /app/models_folder /app/Samples
17
 
18
  # Copy the entire project to the container
19
  COPY . .
 
13
  RUN pip install --no-cache-dir -r requirements.txt
14
 
15
  # Create necessary directories (but don't download models here!)
16
+ RUN mkdir -p /app/modelsBioembed /app/models_folder /app/Samples /app/numba_cache /app/hf_cache
17
 
18
  # Copy the entire project to the container
19
  COPY . .
app.py CHANGED
@@ -21,6 +21,9 @@ os.makedirs(UPLOAD_FOLDER, exist_ok=True)
21
  os.environ["TMPDIR"] = bio_model_dir
22
  os.environ["TEMP"] = bio_model_dir
23
  os.environ["TMP"] = bio_model_dir
 
 
 
24
 
25
  # πŸ”— Dropbox Links for Model Files
26
  DROPBOX_LINKS = {
@@ -31,33 +34,33 @@ DROPBOX_LINKS = {
31
  "special_tokens_map.json": "https://www.dropbox.com/scl/fi/t3lvmp5x28d1zjac3j7ec/special_tokens_map.json?rlkey=z2xbompa54iu4y9qgb5bvmfc9&st=zrxlpjdt&dl=1"
32
  }
33
 
34
- # πŸ“₯ Function to Download Model Files
35
- def download_model_files():
36
- for filename, url in DROPBOX_LINKS.items():
37
- file_path = os.path.join(bio_model_dir, filename)
38
- if not os.path.exists(file_path): # Avoid re-downloading
39
- print(f"Downloading {filename}...")
40
- response = requests.get(url, stream=True)
41
- if response.status_code == 200:
42
- with open(file_path, "wb") as f:
43
- for chunk in response.iter_content(chunk_size=1024):
44
- f.write(chunk)
45
- print(f"Downloaded: {filename}")
46
- else:
47
- print(f"Failed to download {filename}")
48
  # def download_model_files():
49
  # for filename, url in DROPBOX_LINKS.items():
50
  # file_path = os.path.join(bio_model_dir, filename)
 
 
 
 
 
 
 
 
 
 
 
 
 
51
 
52
- # print(f"Downloading {filename} (forcing overwrite)...")
53
- # response = requests.get(url, stream=True)
54
- # if response.status_code == 200:
55
- # with open(file_path, "wb") as f:
56
- # for chunk in response.iter_content(chunk_size=1024):
57
- # f.write(chunk)
58
- # print(f"Downloaded: {filename}")
59
- # else:
60
- # print(f"Failed to download {filename}")
61
 
62
  # πŸ“₯ Download models before starting
63
  download_model_files()
 
21
  os.environ["TMPDIR"] = bio_model_dir
22
  os.environ["TEMP"] = bio_model_dir
23
  os.environ["TMP"] = bio_model_dir
24
+ os.environ['NUMBA_CACHE_DIR'] = '/app/numba_cache'
25
+ os.environ['TRANSFORMERS_CACHE'] = '/app/hf_cache'
26
+
27
 
28
  # πŸ”— Dropbox Links for Model Files
29
  DROPBOX_LINKS = {
 
34
  "special_tokens_map.json": "https://www.dropbox.com/scl/fi/t3lvmp5x28d1zjac3j7ec/special_tokens_map.json?rlkey=z2xbompa54iu4y9qgb5bvmfc9&st=zrxlpjdt&dl=1"
35
  }
36
 
37
+ # # πŸ“₯ Function to Download Model Files
 
 
 
 
 
 
 
 
 
 
 
 
 
38
  # def download_model_files():
39
  # for filename, url in DROPBOX_LINKS.items():
40
  # file_path = os.path.join(bio_model_dir, filename)
41
+ # if not os.path.exists(file_path): # Avoid re-downloading
42
+ # print(f"Downloading {filename}...")
43
+ # response = requests.get(url, stream=True)
44
+ # if response.status_code == 200:
45
+ # with open(file_path, "wb") as f:
46
+ # for chunk in response.iter_content(chunk_size=1024):
47
+ # f.write(chunk)
48
+ # print(f"Downloaded: {filename}")
49
+ # else:
50
+ # print(f"Failed to download {filename}")
51
+ def download_model_files():
52
+ for filename, url in DROPBOX_LINKS.items():
53
+ file_path = os.path.join(bio_model_dir, filename)
54
 
55
+ print(f"Downloading {filename} (forcing overwrite)...")
56
+ response = requests.get(url, stream=True)
57
+ if response.status_code == 200:
58
+ with open(file_path, "wb") as f:
59
+ for chunk in response.iter_content(chunk_size=1024):
60
+ f.write(chunk)
61
+ print(f"Downloaded: {filename}")
62
+ else:
63
+ print(f"Failed to download {filename}")
64
 
65
  # πŸ“₯ Download models before starting
66
  download_model_files()