Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
137 changes: 75 additions & 62 deletions .azuredevops/ado-ci-pipeline-ms-hosted.yml
Original file line number Diff line number Diff line change
@@ -1,62 +1,75 @@
# Azure DevOps pipeline for CI (Microsoft-hosted version)
# As the Microsoft-hosted agent option has a limit of 10GB of storage for disk outputs from a pipeline,
# this causes an issue when the Docker images for modules under src require more than 10GB of storage.
# If you will run into space issues (or other limitations with a Microsoft hosted agent option outlined in
# https://learn.microsoft.com/en-us/azure/devops/pipelines/agents/hosted?view=azure-devops&tabs=yaml#capabilities-and-limitations),
# consider using the .azuredevops/ado-ci-pipeline-self-hosted.yml version or using scale set agents, see
# this link for more info: https://learn.microsoft.com/en-us/azure/devops/pipelines/agents/scale-set-agents?view=azure-devops
# Note that docker images will only be build for src directories that contain at least one test file, so the
# total space consumed by Docker builds will be dependent on which modules under src contain tests.
# For setting up the pipeline in ADO see:
# https://learn.microsoft.com/en-us/azure/devops/pipelines/agents/pools-queues?view=azure-devops&tabs=yaml%2Cbrowser


trigger:
- main

pool:
vmImage: 'ubuntu-latest'

steps:
- task: UsePythonVersion@0
displayName: "Use Python 3.11"
inputs:
versionSpec: 3.11

- script: |
python -m venv venv
source venv/bin/activate
python -m pip install --upgrade pip
pip install -r requirements-dev.txt
pip install pytest-azurepipelines
displayName: "Install requirements"

# files under venv will be automatically excluded from ruff check by default https://docs.astral.sh/ruff/settings/#exclude
- bash: |
source venv/bin/activate
ruff check --output-format azure
displayName: "Run ruff linter"

- task: Bash@3
inputs:
targetType: 'filePath'
filePath: ci-tests.sh
env:
BUILD_ARTIFACTSTAGINGDIRECTORY: $(Build.ArtifactStagingDirectory)
displayName: "Run pytest in docker containers"

- task: PublishTestResults@2
inputs:
testResultsFiles: '**/test-results-*.xml'
searchFolder: $(Build.ArtifactStagingDirectory)
condition: succeededOrFailed()

# Publish code coverage results
- task: PublishCodeCoverageResults@1
inputs:
codeCoverageTool: 'Cobertura' # Available options: 'JaCoCo', 'Cobertura'
summaryFileLocation: '$(Build.ArtifactStagingDirectory)/coverage.xml'
pathToSources: src/
#reportDirectory: # Optional
#additionalCodeCoverageFiles: # Optional
failIfCoverageEmpty: false # Optional
# Azure DevOps pipeline for CI (Microsoft-hosted version)
# As the Microsoft-hosted agent option has a limit of 10GB of storage for disk outputs from a pipeline,
# this causes an issue when the Docker images for modules under src require more than 10GB of storage.
# If you will run into space issues (or other limitations with a Microsoft hosted agent option outlined in
# https://learn.microsoft.com/en-us/azure/devops/pipelines/agents/hosted?view=azure-devops&tabs=yaml#capabilities-and-limitations),
# consider using the .azuredevops/ado-ci-pipeline-self-hosted.yml version or using scale set agents, see
# this link for more info: https://learn.microsoft.com/en-us/azure/devops/pipelines/agents/scale-set-agents?view=azure-devops
# Note that docker images will only be build for src directories that contain at least one test file, so the
# total space consumed by Docker builds will be dependent on which modules under src contain tests.
# For setting up the pipeline in ADO see:
# https://learn.microsoft.com/en-us/azure/devops/pipelines/agents/pools-queues?view=azure-devops&tabs=yaml%2Cbrowser
#
# Templates are located in .azuredevops/templates/

trigger:
- main

stages:
- stage: Lint
displayName: 'Lint'
jobs:
- job: RunLinter
displayName: 'Run ruff linter'
pool:
vmImage: 'ubuntu-latest'
steps:
- task: UsePythonVersion@0
displayName: 'Use Python 3.11'
inputs:
versionSpec: 3.11

- script: |
python -m venv venv
source venv/bin/activate
python -m pip install --upgrade pip
pip install -r requirements-dev.txt
displayName: 'Install requirements'

- bash: |
source venv/bin/activate
ruff check --output-format azure
displayName: 'Run ruff linter'

- stage: Test
displayName: 'Test Devcontainers'
dependsOn: []
jobs:
- template: templates/test-devcontainer-job.yml
parameters:
projects:
- name: 'cpu-project'
configPath: 'src/sample_cpu_project/.devcontainer/devcontainer.json'
projectPath: 'src/sample_cpu_project'
- name: 'gpu-project'
configPath: 'src/sample_pytorch_gpu_project/.devcontainer/devcontainer.json'
projectPath: 'src/sample_pytorch_gpu_project'
- name: 'notebooks'
configPath: 'notebooks/.devcontainer/devcontainer.json'
projectPath: 'notebooks'
smokeTestOnly: true
pool:
vmImage: 'ubuntu-latest'

- job: PublishCoverage
displayName: 'Publish Coverage'
dependsOn: [Test_cpu_project, Test_gpu_project, Test_notebooks]
condition: succeededOrFailed()
pool:
vmImage: 'ubuntu-latest'
steps:
- template: templates/merge-coverage.yml
parameters:
coverageArtifacts:
- 'coverage-cpu-project'
- 'coverage-gpu-project'
137 changes: 59 additions & 78 deletions .azuredevops/ado-ci-pipeline-self-hosted.yml
Original file line number Diff line number Diff line change
@@ -1,82 +1,63 @@
# Azure DevOps pipeline for CI (self-hoseted version)
# As the Microsoft-hosted agent option has a limit of 10GB of storage for disk outputs from a pipeline,
# this causes an issue when the Docker images for modules under src require more than 10GB of storage.
# The self-hosted agent option allows the storage to be increased based on the VM size. This version
# includes extra clean-up and space management steps relating to docker builds, but it otherwise equivalent
# to the .azuredevops/ado-ci-pipeline-ms-hosted.yml version.
# For setting up a CI pipeline with a self-hosted Linux agent see:
# https://learn.microsoft.com/en-us/azure/devops/pipelines/agents/v2-linux?view=azure-devops
# Note that the CI scripts that this pipeline runs (ci-tests.sh) is designed to be run on a Linux agent,
# but could be adapated to other OSs.

# Azure DevOps CI Pipeline (Self-Hosted Agent)
#
# This pipeline runs on self-hosted agents and supports both Linux and macOS (Apple Silicon).
# Templates are located in .azuredevops/templates/

trigger:
- main

pool:
name: Default
demands:
- agent.name -equals mc-ubuntu-agent
workspace:
clean: all

steps:
- script: |
docker image prune -f
docker container prune -f
displayName: "Docker Cleanup"

- script: |
df -h
displayName: "Check agent VM space"

- task: UsePythonVersion@0
displayName: "Use Python 3.11"
inputs:
versionSpec: 3.11

- script: |
python -m venv venv
source venv/bin/activate
python -m pip install --upgrade pip
pip install -r requirements-dev.txt
pip install pytest-azurepipelines
displayName: "Install requirements"

- task: UseDotNet@2
inputs:
packageType: 'sdk'
workingDirectory: "src/"
version: '6.x'

# files under venv will be automatically excluded from ruff check by default https://docs.astral.sh/ruff/settings/#exclude
- bash: |
source venv/bin/activate
ruff check --output-format azure
displayName: "Run ruff linter"

- task: Bash@3
inputs:
targetType: 'filePath'
filePath: ci-tests.sh
displayName: "Run pytest in docker containers"

- task: PublishTestResults@2
inputs:
testResultsFiles: "/tmp/artifact_output/**/test-results-*.xml"
condition: succeededOrFailed()

# Publish code coverage results
- task: PublishCodeCoverageResults@1
inputs:
codeCoverageTool: 'Cobertura' # Available options: 'JaCoCo', 'Cobertura'
summaryFileLocation: '/tmp/artifact_output/coverage.xml'
pathToSources: src/
#reportDirectory: # Optional
#additionalCodeCoverageFiles: # Optional
failIfCoverageEmpty: false # Optional

- bash: |
sudo rm -rfv /home/azureuser/myagent/_work/* /home/azureuser/myagent/_work/.* || true
displayName: "Clean-up _work dir"
condition: always()
stages:
- stage: Lint
displayName: 'Lint'
jobs:
- job: RunLinter
displayName: 'Run ruff linter'
pool:
name: Default
workspace:
clean: all
steps:
- script: |
python3 -m venv venv
source venv/bin/activate
pip install --upgrade pip
pip install -r requirements-dev.txt
displayName: 'Install requirements'

- bash: |
source venv/bin/activate
ruff check --output-format azure
displayName: 'Run ruff linter'

- stage: Test
displayName: 'Test Devcontainers'
dependsOn: []
jobs:
- template: templates/test-devcontainer-job.yml
parameters:
projects:
- name: 'cpu-project'
configPath: 'src/sample_cpu_project/.devcontainer/devcontainer.json'
projectPath: 'src/sample_cpu_project'
- name: 'gpu-project'
configPath: 'src/sample_pytorch_gpu_project/.devcontainer/devcontainer.json'
projectPath: 'src/sample_pytorch_gpu_project'
- name: 'notebooks'
configPath: 'notebooks/.devcontainer/devcontainer.json'
projectPath: 'notebooks'
smokeTestOnly: true
pool:
name: Default

- job: PublishCoverage
displayName: 'Publish Coverage'
dependsOn: [Test_cpu_project, Test_gpu_project, Test_notebooks]
condition: succeededOrFailed()
pool:
name: Default
steps:
- template: templates/merge-coverage.yml
parameters:
coverageArtifacts:
- 'coverage-cpu-project'
- 'coverage-gpu-project'
55 changes: 55 additions & 0 deletions .azuredevops/templates/merge-coverage.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,55 @@
parameters:
- name: coverageArtifacts
type: object
default: []

steps:
- checkout: none

- ${{ each artifact in parameters.coverageArtifacts }}:
- task: DownloadPipelineArtifact@2
inputs:
artifact: '${{ artifact }}'
path: '$(Pipeline.Workspace)/coverage/${{ artifact }}'
continueOnError: true

- bash: |
set -ex

# Install .NET SDK
ARCH_FLAG=""
[[ "$(uname -m)" == "arm64" ]] && ARCH_FLAG="--architecture arm64"
curl -sSL https://dot.net/v1/dotnet-install.sh | bash /dev/stdin --channel 8.0 --install-dir $HOME/.dotnet $ARCH_FLAG

# Make .NET available for subsequent tasks
echo "##vso[task.prependpath]$HOME/.dotnet"
echo "##vso[task.prependpath]$HOME/.dotnet/tools"
displayName: 'Install .NET SDK'

- bash: |
set -ex

export DOTNET_ROOT=$HOME/.dotnet
export PATH="$DOTNET_ROOT:$DOTNET_ROOT/tools:$PATH"

# Install ReportGenerator
dotnet tool install -g dotnet-reportgenerator-globaltool 2>/dev/null || dotnet tool update -g dotnet-reportgenerator-globaltool

# Find and merge coverage files
COVERAGE_FILES=$(find $(Pipeline.Workspace)/coverage -name "coverage.xml" 2>/dev/null | paste -sd ";" -)
if [ -z "$COVERAGE_FILES" ]; then
echo "##vso[task.logissue type=error]No coverage files found"
exit 1
fi

echo "Merging coverage files: $COVERAGE_FILES"
mkdir -p $(Pipeline.Workspace)/coverage/merged
reportgenerator \
"-reports:$COVERAGE_FILES" \
"-targetdir:$(Pipeline.Workspace)/coverage/merged" \
"-reporttypes:Cobertura;HtmlInline_AzurePipelines"
displayName: 'Merge coverage reports'

- task: PublishCodeCoverageResults@2
inputs:
summaryFileLocation: '$(Pipeline.Workspace)/coverage/merged/Cobertura.xml'
25 changes: 25 additions & 0 deletions .azuredevops/templates/publish-test-results.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,25 @@
# Template: Publish Test Results
# Publishes test results and coverage artifacts from a project
#
# Parameters:
# projectPath: Path to project directory containing test-results.xml and coverage.xml
# projectName: Name for the test run and coverage artifact

parameters:
- name: projectPath
type: string
- name: projectName
type: string

steps:
- task: PublishTestResults@2
inputs:
testResultsFiles: '${{ parameters.projectPath }}/test-results.xml'
testRunTitle: '${{ parameters.projectName }}'
condition: succeededOrFailed()

- task: PublishPipelineArtifact@1
inputs:
targetPath: '$(System.DefaultWorkingDirectory)/${{ parameters.projectPath }}/coverage.xml'
artifact: 'coverage-${{ parameters.projectName }}'
condition: succeededOrFailed()
Loading
Loading