update,
This commit is contained in:
@@ -0,0 +1,42 @@
|
||||
parameters:
|
||||
- name: codeScanFileName
|
||||
type: string
|
||||
- name: uploadPath
|
||||
type: string
|
||||
|
||||
- name: codeScanContainerName
|
||||
type: string
|
||||
default: "codeScan"
|
||||
- name: scanModule
|
||||
type: string
|
||||
default: "neural_compressor"
|
||||
|
||||
steps:
|
||||
- template: docker-template.yml
|
||||
parameters:
|
||||
dockerConfigName: "commonDockerConfig"
|
||||
repoName: "code-scan"
|
||||
repoTag: "1.0"
|
||||
dockerFileName: "DockerfileCodeScan"
|
||||
containerName: ${{ parameters.codeScanContainerName }}
|
||||
|
||||
- script: |
|
||||
docker exec ${{ parameters.codeScanContainerName }} bash -c "bash /neural-compressor/.azure-pipelines/scripts/codeScan/${{ parameters.codeScanFileName }}/${{ parameters.codeScanFileName }}.sh \
|
||||
--scan_module=${{ parameters.scanModule }}"
|
||||
displayName: "${{ parameters.codeScanFileName }} Check"
|
||||
|
||||
- task: PublishPipelineArtifact@1
|
||||
condition: succeededOrFailed()
|
||||
inputs:
|
||||
targetPath: .azure-pipelines/scripts/codeScan/scanLog/${{ parameters.uploadPath }}
|
||||
artifact: $(System.JobAttempt)_${{ parameters.codeScanFileName }}
|
||||
publishLocation: "pipeline"
|
||||
displayName: "PublishPipelineArtifact"
|
||||
|
||||
- task: Bash@3
|
||||
condition: always()
|
||||
inputs:
|
||||
targetType: "inline"
|
||||
script: |
|
||||
docker exec ${{ parameters.codeScanContainerName }} bash -c "rm -fr /neural-compressor/* && rm -fr /neural-compressor/.* || true"
|
||||
displayName: "Docker clean up"
|
@@ -0,0 +1,103 @@
|
||||
parameters:
|
||||
- name: dockerConfigName
|
||||
type: string
|
||||
default: "commonDockerConfig"
|
||||
- name: repoName
|
||||
type: string
|
||||
default: "neural-compressor"
|
||||
- name: repoTag
|
||||
type: string
|
||||
default: "py310"
|
||||
- name: dockerFileName
|
||||
type: string
|
||||
default: "Dockerfile"
|
||||
- name: containerName
|
||||
type: string
|
||||
- name: repo
|
||||
type: string
|
||||
default: "https://github.com/intel/neural-compressor"
|
||||
- name: imageSource
|
||||
type: string
|
||||
default: "build"
|
||||
|
||||
steps:
|
||||
- task: Bash@3
|
||||
inputs:
|
||||
targetType: "inline"
|
||||
script: |
|
||||
docker ps -a
|
||||
if [[ $(docker ps -a | grep -i '${{ parameters.containerName }}'$) ]]; then
|
||||
docker start $(docker ps -aq --filter "name=${{ parameters.containerName }}")
|
||||
echo "remove left files through container ..."
|
||||
docker exec ${{ parameters.containerName }} bash -c "ls -a /neural-compressor && rm -fr /neural-compressor/* && rm -fr /neural-compressor/.* && ls -a /neural-compressor || true"
|
||||
fi
|
||||
displayName: "Docker workspace clean up"
|
||||
|
||||
- ${{ if eq(parameters.dockerConfigName, 'commonDockerConfig') }}:
|
||||
- script: |
|
||||
rm -fr ${BUILD_SOURCESDIRECTORY} || sudo rm -fr ${BUILD_SOURCESDIRECTORY} || true
|
||||
displayName: "Clean workspace"
|
||||
|
||||
- checkout: self
|
||||
clean: true
|
||||
displayName: "Checkout out Repo"
|
||||
fetchDepth: 0
|
||||
|
||||
- ${{ if eq(parameters.dockerConfigName, 'gitCloneDockerConfig') }}:
|
||||
- script: |
|
||||
rm -fr ${BUILD_SOURCESDIRECTORY} || sudo rm -fr ${BUILD_SOURCESDIRECTORY} || true
|
||||
mkdir ${BUILD_SOURCESDIRECTORY}
|
||||
chmod 777 ${BUILD_SOURCESDIRECTORY}
|
||||
displayName: "Clean workspace"
|
||||
|
||||
- checkout: none
|
||||
|
||||
- script: |
|
||||
git clone ${{ parameters.repo }} ${BUILD_SOURCESDIRECTORY}
|
||||
git config --global --add safe.directory ${BUILD_SOURCESDIRECTORY}
|
||||
cd ${BUILD_SOURCESDIRECTORY}
|
||||
git checkout master
|
||||
displayName: "Checkout out master"
|
||||
|
||||
- ${{ if eq(parameters.imageSource, 'build') }}:
|
||||
- script: |
|
||||
docker image prune -a -f
|
||||
if [[ ! $(docker images | grep -i ${{ parameters.repoName }}:${{ parameters.repoTag }}) ]]; then
|
||||
docker build -f ${BUILD_SOURCESDIRECTORY}/.azure-pipelines/docker/${{parameters.dockerFileName}}.devel -t ${{ parameters.repoName }}:${{ parameters.repoTag }} .
|
||||
fi
|
||||
docker images | grep -i ${{ parameters.repoName }}
|
||||
if [[ $? -ne 0 ]]; then
|
||||
echo "NO Such Repo"
|
||||
exit 1
|
||||
fi
|
||||
displayName: "Build develop docker image"
|
||||
|
||||
- ${{ if eq(parameters.imageSource, 'pull') }}:
|
||||
- script: |
|
||||
docker pull vault.habana.ai/gaudi-docker/1.19.0/ubuntu22.04/habanalabs/pytorch-installer-2.5.1:latest
|
||||
displayName: "Pull habana docker image"
|
||||
|
||||
- script: |
|
||||
docker stop $(docker ps -aq --filter "name=${{ parameters.containerName }}")
|
||||
docker rm -vf ${{ parameters.containerName }} || true
|
||||
env | sort
|
||||
displayName: "Clean docker container"
|
||||
|
||||
- ${{ if ne(parameters.containerName, '') }}:
|
||||
- task: Bash@3
|
||||
inputs:
|
||||
targetType: "inline"
|
||||
script: |
|
||||
if [[ "${{ parameters.imageSource }}" == "build" ]]; then
|
||||
docker run -dit --disable-content-trust --privileged --name=${{ parameters.containerName }} --shm-size="2g" \
|
||||
-v ${BUILD_SOURCESDIRECTORY}:/neural-compressor -v /tf_dataset:/tf_dataset -v /tf_dataset2:/tf_dataset2 \
|
||||
${{ parameters.repoName }}:${{ parameters.repoTag }}
|
||||
else
|
||||
docker run -dit --disable-content-trust --privileged --name=${{ parameters.containerName }} --shm-size="2g" \
|
||||
--runtime=habana -e HABANA_VISIBLE_DEVICES=all -e OMPI_MCA_btl_vader_single_copy_mechanism=none --cap-add=sys_nice --net=host --ipc=host \
|
||||
-v ${BUILD_SOURCESDIRECTORY}:/neural-compressor vault.habana.ai/gaudi-docker/1.19.0/ubuntu22.04/habanalabs/pytorch-installer-2.5.1:latest
|
||||
docker exec ${{ parameters.containerName }} bash -c "ln -sf \$(which python3) /usr/bin/python"
|
||||
fi
|
||||
echo "Show the container list after docker run ... "
|
||||
docker ps -a
|
||||
displayName: "Docker run - ${{ parameters.containerName }} Container"
|
@@ -0,0 +1,80 @@
|
||||
parameters:
|
||||
- name: modelName
|
||||
type: string
|
||||
default: "resnet50v1.5"
|
||||
- name: framework
|
||||
type: string
|
||||
default: "tensorflow"
|
||||
- name: APIVersion
|
||||
type: string
|
||||
default: ""
|
||||
|
||||
- name: modelContainerName
|
||||
type: string
|
||||
default: "model"
|
||||
|
||||
steps:
|
||||
- template: docker-template.yml
|
||||
parameters:
|
||||
dockerConfigName: "commonDockerConfig"
|
||||
repoName: "neural-compressor"
|
||||
repoTag: "py310"
|
||||
dockerFileName: "Dockerfile"
|
||||
containerName: ${{ parameters.modelContainerName }}
|
||||
|
||||
- script: |
|
||||
docker exec ${{ parameters.modelContainerName }} bash -c "cd /neural-compressor/.azure-pipelines/scripts/models \
|
||||
&& bash run_${{ parameters.framework }}_models_trigger.sh --model=${{ parameters.modelName }} --mode='env_setup'"
|
||||
displayName: Env setup
|
||||
|
||||
- task: DownloadPipelineArtifact@2
|
||||
continueOnError: true
|
||||
inputs:
|
||||
source: "specific"
|
||||
artifact: ${{ parameters.framework }}_${{ parameters.modelName }}
|
||||
patterns: "**_summary.log"
|
||||
path: $(Build.SourcesDirectory)/.azure-pipelines/scripts/models/${{ parameters.modelName }}_refer_log
|
||||
project: $(System.TeamProject)
|
||||
pipeline: "Model-Test"
|
||||
runVersion: "specific"
|
||||
runId: $(refer_buildId)
|
||||
retryDownloadCount: 3
|
||||
displayName: "Download refer logs"
|
||||
|
||||
- script: |
|
||||
docker exec ${{ parameters.modelContainerName }} bash -c "cd /neural-compressor/.azure-pipelines/scripts/models \
|
||||
&& bash run_${{ parameters.framework }}_models_trigger.sh --model=${{ parameters.modelName }} --mode='tuning'"
|
||||
displayName: Quantization
|
||||
|
||||
- ${{ if ne(parameters.APIVersion, '3x') }}:
|
||||
- script: |
|
||||
docker exec ${{ parameters.modelContainerName }} bash -c "cd /neural-compressor/.azure-pipelines/scripts/models \
|
||||
&& bash run_${{ parameters.framework }}_models_trigger.sh --model=${{ parameters.modelName }} --mode='int8_benchmark' --USE_TUNE_ACC=$(USE_TUNE_ACC) --PERF_STABLE_CHECK=$(PERF_STABLE_CHECK)"
|
||||
displayName: INT8 Benchmark
|
||||
|
||||
- script: |
|
||||
docker exec ${{ parameters.modelContainerName }} bash -c "cd /neural-compressor/.azure-pipelines/scripts/models \
|
||||
&& bash run_${{ parameters.framework }}_models_trigger.sh --model=${{ parameters.modelName }} --mode='fp32_benchmark' --USE_TUNE_ACC=$(USE_TUNE_ACC) --PERF_STABLE_CHECK=$(PERF_STABLE_CHECK)"
|
||||
displayName: FP32 Benchmark
|
||||
|
||||
- task: Bash@3
|
||||
inputs:
|
||||
targetType: "inline"
|
||||
script: |
|
||||
docker exec ${{ parameters.modelContainerName }} bash -c "cd /neural-compressor/.azure-pipelines/scripts/models \
|
||||
&& bash run_${{ parameters.framework }}_models_trigger.sh --model=${{ parameters.modelName }} --mode='collect_log' --BUILD_BUILDID=$(Build.BuildId)"
|
||||
displayName: Collect log
|
||||
|
||||
- task: PublishPipelineArtifact@1
|
||||
inputs:
|
||||
targetPath: $(Build.SourcesDirectory)/.azure-pipelines/scripts/models/${{ parameters.modelName }}/
|
||||
artifact: ${{ parameters.framework }}_${{ parameters.modelName }}
|
||||
publishLocation: "pipeline"
|
||||
|
||||
- task: Bash@3
|
||||
condition: always()
|
||||
inputs:
|
||||
targetType: "inline"
|
||||
script: |
|
||||
docker exec ${{ parameters.modelContainerName }} bash -c "rm -fr /neural-compressor/* && rm -fr /neural-compressor/.* || true"
|
||||
displayName: "Docker clean up"
|
@@ -0,0 +1,61 @@
|
||||
parameters:
|
||||
- name: dockerConfigName
|
||||
type: string
|
||||
default: "commonDockerConfig"
|
||||
- name: repo
|
||||
type: string
|
||||
default: "https://github.com/intel/neural-compressor"
|
||||
- name: utScriptFileName
|
||||
type: string
|
||||
- name: uploadPath
|
||||
type: string
|
||||
- name: utArtifact
|
||||
type: string
|
||||
- name: utTestMode
|
||||
type: string
|
||||
default: "coverage"
|
||||
- name: utContainerName
|
||||
type: string
|
||||
default: "utTest"
|
||||
- name: imageSource
|
||||
type: string
|
||||
default: "build"
|
||||
|
||||
steps:
|
||||
- template: docker-template.yml
|
||||
parameters:
|
||||
dockerConfigName: ${{ parameters.dockerConfigName }}
|
||||
repoName: "neural-compressor"
|
||||
repoTag: "py310"
|
||||
dockerFileName: "Dockerfile"
|
||||
containerName: ${{ parameters.utContainerName }}
|
||||
repo: ${{ parameters.repo }}
|
||||
imageSource: ${{ parameters.imageSource }}
|
||||
|
||||
- script: |
|
||||
docker exec ${{ parameters.utContainerName }} bash -c "cd /neural-compressor/.azure-pipelines/scripts \
|
||||
&& bash install_nc.sh ${{ parameters.utScriptFileName }} \
|
||||
&& bash ut/${{ parameters.utScriptFileName }}.sh ${{ parameters.utTestMode }}"
|
||||
displayName: "Run UT"
|
||||
|
||||
- task: PublishPipelineArtifact@1
|
||||
condition: succeededOrFailed()
|
||||
inputs:
|
||||
targetPath: ${{ parameters.uploadPath }}
|
||||
artifact: $(System.JobAttempt)_${{ parameters.utArtifact }}_report
|
||||
publishLocation: "pipeline"
|
||||
|
||||
- ${{ if eq(parameters.utTestMode, 'coverage') }}:
|
||||
- task: PublishPipelineArtifact@1
|
||||
inputs:
|
||||
targetPath: ${{ parameters.uploadPath }}
|
||||
artifact: ${{ parameters.utArtifact }}_coverage
|
||||
publishLocation: "pipeline"
|
||||
|
||||
- task: Bash@3
|
||||
condition: always()
|
||||
inputs:
|
||||
targetType: "inline"
|
||||
script: |
|
||||
docker exec ${{ parameters.utContainerName }} bash -c "rm -fr /neural-compressor/* && rm -fr /neural-compressor/.* || true"
|
||||
displayName: "Docker clean up"
|
Reference in New Issue
Block a user