Uploading VAE in neuro-symbolic-ai/eb-langvae-stella_en_1.5B_v5-Mistral-7B-v0.3-l128
Browse files- README.md +13 -0
- decoder.pt +3 -0
- decoder_cfg.json +1 -0
- encoder.pt +3 -0
- encoder_cfg.json +1 -0
- environment.json +1 -0
- model_config.json +1 -0
README.md
ADDED
@@ -0,0 +1,13 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
---
|
2 |
+
language: en
|
3 |
+
tags:
|
4 |
+
- pythae
|
5 |
+
license: apache-2.0
|
6 |
+
---
|
7 |
+
|
8 |
+
### Downloading this model from the Hub
|
9 |
+
This model was trained with pythae. It can be downloaded or reloaded using the method `load_from_hf_hub`
|
10 |
+
```python
|
11 |
+
>>> from pythae.models import AutoModel
|
12 |
+
>>> model = AutoModel.load_from_hf_hub(hf_hub_path="your_hf_username/repo_name")
|
13 |
+
```
|
decoder.pt
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:7aea9cd6de8fa4310432471fd2a2163f4ad365738c21a44a1666157b859a743b
|
3 |
+
size 557993814
|
decoder_cfg.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"model_path": "mistralai/Mistral-7B-v0.3", "latent_size": 128, "max_len": 32, "conditional": false, "device_map": null}
|
encoder.pt
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:a2d02f4ac9d13b86744107d68e95be0c867524ca58d4714ff304097ce46fb0a0
|
3 |
+
size 1574172
|
encoder_cfg.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"model_path": "NovaSearch/stella_en_1.5B_v5", "latent_size": 128, "automodel_preset": {"cls": "AutoModel", "pooling_method": "mean", "normalize": true}, "caching": true}
|
environment.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"name": "EnvironmentConfig", "python_version": "3.11"}
|
model_config.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"name": "VAEConfig", "input_dim": null, "latent_dim": 128, "uses_default_encoder": false, "uses_default_decoder": false, "reconstruction_loss": "mse"}
|