Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .ci/scripts/gather_test_models.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@
"dl3": "linux.4xlarge.memory",
"emformer_join": "linux.4xlarge.memory",
"emformer_predict": "linux.4xlarge.memory",
"phi-4-mini": "linux.4xlarge.memory",
"phi_4_mini": "linux.4xlarge.memory",
}
}

Expand Down
4 changes: 2 additions & 2 deletions .ci/scripts/test_model.sh
Original file line number Diff line number Diff line change
Expand Up @@ -100,11 +100,11 @@ test_model() {
rm "./${MODEL_NAME}.pte"
return # Skip running with portable executor runnner since portable doesn't support Qwen's biased linears.
fi
if [[ "${MODEL_NAME}" == "phi-4-mini" ]]; then
if [[ "${MODEL_NAME}" == "phi_4_mini" ]]; then
# Install requirements for export_llama
bash examples/models/llama/install_requirements.sh
# Test export_llama script: python3 -m examples.models.llama.export_llama.
"${PYTHON_EXECUTABLE}" -m examples.models.llama.export_llama --model "${MODEL_NAME}" -c examples/models/llama/params/demo_rand_params.pth -p examples/models/phi-4-mini/config.json
"${PYTHON_EXECUTABLE}" -m examples.models.llama.export_llama --model "${MODEL_NAME}" -c examples/models/llama/params/demo_rand_params.pth -p examples/models/phi_4_mini/config.json
run_portable_executor_runner
rm "./${MODEL_NAME}.pte"
return
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/pull.yml
Original file line number Diff line number Diff line change
Expand Up @@ -106,7 +106,7 @@ jobs:
- model: emformer_join
backend: xnnpack-quantization-delegation
runner: linux.4xlarge.memory
- model: phi-4-mini
- model: phi_4_mini
backend: portable
runner: linux.4xlarge.memory
- model: llama3_2_vision_encoder
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/trunk.yml
Original file line number Diff line number Diff line change
Expand Up @@ -72,7 +72,7 @@ jobs:
backend: portable
- model: softmax
backend: portable
- model: phi-4-mini
- model: phi_4_mini
backend: portable
- model: qwen2_5
backend: portable
Expand Down
4 changes: 2 additions & 2 deletions examples/models/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@ class Model(str, Enum):
Llava = "llava"
EfficientSam = "efficient_sam"
Qwen25 = "qwen2_5"
Phi4Mini = "phi-4-mini"
Phi4Mini = "phi_4_mini"

def __str__(self) -> str:
return self.value
Expand Down Expand Up @@ -80,7 +80,7 @@ def __str__(self) -> str:
str(Model.Llava): ("llava", "LlavaModel"),
str(Model.EfficientSam): ("efficient_sam", "EfficientSAM"),
str(Model.Qwen25): ("qwen2_5", "Qwen2_5Model"),
str(Model.Phi4Mini): ("phi-4-mini", "Phi4MiniModel"),
str(Model.Phi4Mini): ("phi_4_mini", "Phi4MiniModel"),
}

__all__ = [
Expand Down
2 changes: 1 addition & 1 deletion examples/models/llama/export_llama_lib.py
Original file line number Diff line number Diff line change
Expand Up @@ -95,7 +95,7 @@
"llama3_2",
"static_llama",
"qwen2_5",
"phi-4-mini",
"phi_4_mini",
"smollm2",
]
TORCHTUNE_DEFINED_MODELS = ["llama3_2_vision"]
Expand Down
Loading