Files
004_comission/uukssw/quote1/_ref/neural-compressor/.azure-pipelines/model-test.yml
louiscklaw 2627562070 update,
2025-01-31 22:10:02 +08:00

174 lines
5.4 KiB
YAML

trigger: none
pr:
autoCancel: true
drafts: false
branches:
include:
- master
paths:
include:
- neural_compressor
- setup.py
- requirements.txt
- .azure-pipelines/model-test.yml
- .azure-pipelines/template/docker-template.yml
- .azure-pipelines/scripts/models
- examples/tensorflow/oob_models/quantization/ptq
- .azure-pipelines/model-test.yml
- .azure-pipelines/scripts/fwk_version.sh
- .azure-pipelines/scripts/install_nc.sh
exclude:
- test
- neural_compressor/common
- neural_compressor/torch
- neural_compressor/tensorflow
- neural_compressor/onnxrt
pool: MODEL_PERF_TEST_TF
variables:
OUT_SCRIPT_PATH: $(Build.SourcesDirectory)/.azure-pipelines/scripts/models
SCRIPT_PATH: /neural-compressor/.azure-pipelines/scripts
parameters:
- name: TensorFlow_Model
displayName: Run TensorFlow models?
type: boolean
default: true
- name: PyTorch_Model
displayName: Run PyTorch models?
type: boolean
default: true
- name: ONNX_Model
displayName: Run ONNX models?
type: boolean
default: true
- name: TensorFlowModelList
type: object
default:
- resnet50v1.5
- ssd_resnet50_v1
- name: PyTorchModelList
type: object
default:
- resnet18_fx
- name: ONNXModelList
type: object
default:
- resnet50-v1-12
stages:
- stage: TensorFlowModels
displayName: Run TensorFlow Model
pool: MODEL_PERF_TEST
dependsOn: []
condition: and(succeeded(), eq('${{ parameters.TensorFlow_Model }}', 'true'))
jobs:
- ${{ each model in parameters.TensorFlowModelList }}:
- job:
displayName: ${{ model }}
steps:
- template: template/model-template.yml
parameters:
modelName: ${{ model }}
framework: "tensorflow"
- stage: PyTorchModels
displayName: Run PyTorch Model
pool: MODEL_PERF_TEST
dependsOn: []
condition: and(succeeded(), eq('${{ parameters.PyTorch_Model }}', 'true'))
jobs:
- ${{ each model in parameters.PyTorchModelList }}:
- job:
displayName: ${{ model }}
steps:
- template: template/model-template.yml
parameters:
modelName: ${{ model }}
framework: "pytorch"
- stage: ONNXModels
displayName: Run ONNX Model
pool: MODEL_PERF_TEST
dependsOn: []
condition: and(succeeded(), eq('${{ parameters.ONNX_Model }}', 'true'))
jobs:
- ${{ each model in parameters.ONNXModelList }}:
- job:
displayName: ${{ model }}
steps:
- template: template/model-template.yml
parameters:
modelName: ${{ model }}
framework: "onnxrt"
- stage: GenerateLogs
displayName: Generate Report
pool:
vmImage: "ubuntu-latest"
dependsOn: [TensorFlowModels, PyTorchModels, ONNXModels]
jobs:
- job: GenerateReport
steps:
- script: |
echo ${BUILD_SOURCESDIRECTORY}
rm -fr ${BUILD_SOURCESDIRECTORY} || sudo rm -fr ${BUILD_SOURCESDIRECTORY} || true
echo y | docker system prune --all
displayName: "Clean workspace"
- checkout: self
clean: true
displayName: "Checkout out Repo"
- task: DownloadPipelineArtifact@2
inputs:
artifact:
patterns: "**/*_summary.log"
path: $(OUT_SCRIPT_PATH)
- task: DownloadPipelineArtifact@2
inputs:
artifact:
patterns: "**/*_tuning_info.log"
path: $(OUT_SCRIPT_PATH)
- task: UsePythonVersion@0
displayName: "Use Python 3.10"
inputs:
versionSpec: "3.10"
- script: |
cd ${OUT_SCRIPT_PATH}
mkdir generated
mkdir last_generated
pip install requests
python -u collect_log_all.py --logs_dir $(OUT_SCRIPT_PATH) --output_dir generated --build_id=$(Build.BuildId)
displayName: "Collect all logs"
- task: DownloadPipelineArtifact@2
continueOnError: true
inputs:
source: "specific"
artifact: "FinalReport"
patterns: "**.log"
path: $(OUT_SCRIPT_PATH)/last_generated
project: $(System.TeamProject)
pipeline: "Model-Test"
runVersion: "specific"
runId: $(refer_buildId)
displayName: "Download last logs"
- script: |
echo "------ Generating final report.html ------"
cd ${OUT_SCRIPT_PATH}
/usr/bin/bash generate_report.sh --WORKSPACE generated --output_dir generated --last_logt_dir last_generated
displayName: "Generate report"
- task: PublishPipelineArtifact@1
inputs:
targetPath: $(OUT_SCRIPT_PATH)/generated
artifact: FinalReport
publishLocation: "pipeline"
displayName: "Publish report"
- script: |
if [ $(is_perf_reg) == 'true' ]; then
echo "Some benchmark regression occurred or the reference data need to be updated, please check artifacts and reports."
exit 1
fi
displayName: "Specify regression"