Spaces:
Sleeping
Sleeping
OCI DS 6.7B
Browse files- app.py +1 -1
- requirements.txt +2 -1
app.py
CHANGED
|
@@ -8,7 +8,7 @@ MAX_MAX_NEW_TOKENS = 1024
|
|
| 8 |
MAX_INPUT_TOKEN_LENGTH = 2048
|
| 9 |
|
| 10 |
# base_model_name = "m-a-p/OpenCodeInterpreter-DS-6.7B"
|
| 11 |
-
base_model_name = "m-a-p/OpenCodeInterpreter-DS-
|
| 12 |
model = AutoModelForCausalLM.from_pretrained(base_model_name, torch_dtype=torch.bfloat16, device_map="cpu")
|
| 13 |
|
| 14 |
tokenizer = AutoTokenizer.from_pretrained(base_model_name)
|
|
|
|
| 8 |
MAX_INPUT_TOKEN_LENGTH = 2048
|
| 9 |
|
| 10 |
# base_model_name = "m-a-p/OpenCodeInterpreter-DS-6.7B"
|
| 11 |
+
base_model_name = "m-a-p/OpenCodeInterpreter-DS-6.7B"
|
| 12 |
model = AutoModelForCausalLM.from_pretrained(base_model_name, torch_dtype=torch.bfloat16, device_map="cpu")
|
| 13 |
|
| 14 |
tokenizer = AutoTokenizer.from_pretrained(base_model_name)
|
requirements.txt
CHANGED
|
@@ -6,4 +6,5 @@ scipy==1.11.2
|
|
| 6 |
sentencepiece==0.1.99
|
| 7 |
spaces==0.16.1
|
| 8 |
torch==2.0.0
|
| 9 |
-
transformers==4.36.2
|
|
|
|
|
|
| 6 |
sentencepiece==0.1.99
|
| 7 |
spaces==0.16.1
|
| 8 |
torch==2.0.0
|
| 9 |
+
transformers==4.36.2
|
| 10 |
+
#
|