summaryrefslogtreecommitdiff
path: root/.azure-pipelines/templates/coverage.yml
blob: 79d52321171eee0e952b34debc31d44913b62a3a (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
# This template adds a job for processing code coverage data.
# It will upload results to Azure Pipelines and codecov.io.
# Use it from a job stage that completes after all other jobs have completed.
# This can be done by placing it in a separate summary stage that runs after the test stage(s) have completed.

jobs:
  - job: Coverage
    displayName: Code Coverage
    container: $[ variables.defaultContainer ]
    workspace:
      clean: all
    steps:
      - checkout: self
        fetchDepth: $(fetchDepth)
        path: $(checkoutPath)
      - task: DownloadPipelineArtifact@2
        displayName: Download Coverage Data
        inputs:
          path: coverage/
          patterns: "Coverage */*=coverage.combined"
      - bash: .azure-pipelines/scripts/combine-coverage.py coverage/
        displayName: Combine Coverage Data
      - bash: .azure-pipelines/scripts/report-coverage.sh
        displayName: Generate Coverage Report
        condition: gt(variables.coverageFileCount, 0)
      - task: PublishCodeCoverageResults@1
        inputs:
          codeCoverageTool: Cobertura
          # Azure Pipelines only accepts a single coverage data file.
          # That means only Python or PowerShell coverage can be uploaded, but not both.
          # Set the "pipelinesCoverage" variable to determine which type is uploaded.
          # Use "coverage" for Python and "coverage-powershell" for PowerShell.
          summaryFileLocation: "$(outputPath)/reports/$(pipelinesCoverage).xml"
        displayName: Publish to Azure Pipelines
        condition: gt(variables.coverageFileCount, 0)
      - bash: .azure-pipelines/scripts/publish-codecov.py "$(outputPath)"
        displayName: Publish to codecov.io
        condition: gt(variables.coverageFileCount, 0)
        continueOnError: true