Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
31 changes: 20 additions & 11 deletions .github/workflows/test.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -5,24 +5,33 @@ on:
branches:
- main
- 'release/*'
schedule:
- cron: '0 0 * * *' # Runs at midnight UTC every day

jobs:
test:
strategy:
matrix:
version: [14, 16, 18, 20]
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: ./
- uses: actions/checkout@v4
- name: Upload reports using a glob pattern
uses: ./
with:
api-key: ${{secrets.DD_API_KEY}}
datadog-site: datad0g.com
api-key: ${{secrets.DD_API_KEY_CI_VISIBILITY}}
logs: "true"
files: ./ci/fixtures/
service: junit-upload-github-action
files: '**/fixtures/**'
service: junit-upload-github-action-tests
env: ci
tags: "foo:bar,alpha:bravo"
- name: Tag pipeline with ci version
tags: "foo:bar,alpha:bravo,test.node.version:${{ matrix.version}}"
node-version: ${{ matrix.version}}
- name: Check that test data can be queried
run: |
datadog-ci tag --level pipeline --tags "datadogci.version:`npm list -g @datadog/datadog-ci | grep @datadog/datadog-ci | awk -F' ' '{print $2}' | awk -F'@' '{print $3}' | tr -d '\n'`"
npm install @datadog/datadog-api-client
node ./check-junit-upload.js
env:
DATADOG_API_KEY: ${{secrets.DD_API_KEY}}
DATADOG_SITE: datad0g.com
EXTRA_TAGS: "@foo:bar @alpha:bravo @test.node.version:${{ matrix.version}}"
DD_API_KEY: ${{ secrets.DD_API_KEY_CI_VISIBILITY }}
DD_APP_KEY: ${{ secrets.DD_APP_KEY_CI_VISIBILITY }}
DD_SERVICE: junit-upload-github-action-tests
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@ The action has the following options:
| `datadog-site` | The Datadog site to upload the files to. | True | `datadoghq.com` |
| `files` | Path to file or folder containing XML files to upload | True | `.` |
| `concurrency` | Controls the maximum number of concurrent file uploads | True | `20` |
| `node-version` | The node version to use to install the datadog-ci. It must be `>=10.24.1` | True | `16` |
| `node-version` | The node version to use to install the datadog-ci. It must be `>=14` | True | `20` |
| `tags` | Optional extra tags to add to the tests | False | |
| `env` | Optional environment to add to the tests | False | |
| `logs` | When set to "true" enables forwarding content from the XML reports as Logs. The content inside `<system-out>`, `<system-err>`, and `<failure>` is collected as logs. Logs from elements inside a `<testcase>` are automatically connected to the test. | False | |
Expand Down
17 changes: 7 additions & 10 deletions action.yaml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
# Composite action to upload junit test result files to Datadog CI Visibility
name: 'Datadog JUnitXML Upload'
description: 'Upload JUnitXML reports files to Datadog CI Visibility'
name: "Datadog JUnitXML Upload"
description: "Upload JUnitXML reports files to Datadog CI Visibility"
inputs:
api-key:
required: true
Expand All @@ -23,7 +23,7 @@ inputs:
node-version:
required: true
description: The node version used to install datadog-ci
default: "16"
default: "20"
tags:
required: false
description: Datadog tags to associate with the uploaded test results.
Expand All @@ -34,24 +34,21 @@ inputs:
required: false
description: Set to "true" to enable forwarding content from XML reports as logs.
extra-args:
default: ''
default: ""
description: Extra args to be passed to the datadog-ci cli.
required: false
runs:
using: "composite"
steps:
- name: Install node
uses: actions/setup-node@v3
uses: actions/setup-node@v4
with:
node-version: ${{ inputs.node-version }}
- name: Get Datadog CLI
shell: bash
run: npm install -g @datadog/datadog-ci
- name: Upload the JUnit files
if: ${{ inputs.logs == 'true' }}
shell: bash
run: |
datadog-ci junit upload \
npx @datadog/datadog-ci junit upload \
--service ${{ inputs.service }} \
--logs \
--max-concurrency ${{ inputs.concurrency }} \
Expand All @@ -66,7 +63,7 @@ runs:
if: ${{ inputs.logs != 'true' }}
shell: bash
run: |
datadog-ci junit upload \
npx @datadog/datadog-ci junit upload \
--service ${{ inputs.service }} \
--max-concurrency ${{ inputs.concurrency }} \
${{ inputs.extra-args }} \
Expand Down
61 changes: 61 additions & 0 deletions check-junit-upload.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,61 @@
'use strict'

const { client, v2 } = require("@datadog/datadog-api-client")

const configuration = client.createConfiguration();
const apiInstance = new v2.CIVisibilityTestsApi(configuration);

const EXPECTED_NUM_TESTS = 32

const params = {
filterQuery: `@test.service:${process.env.DD_SERVICE} @git.commit.sha:${process.env.GITHUB_SHA}`,
filterFrom: new Date(new Date().getTime() + -300 * 1000), // Last 5 minutes
filterTo: new Date(),
pageLimit: 50,
};

const CHECK_INTERVAL_SECONDS = 10
const MAX_NUM_ATTEMPTS = 10

function getTestData (extraFilter) {
const finalFilterQuery = `${params.filterQuery} ${extraFilter}`
console.log(`🔎 Querying CI Visibility tests with ${finalFilterQuery}.`)
return apiInstance
.listCIAppTestEvents({
...params,
filterQuery: `${finalFilterQuery}`,
})
.then(data => data.data)
.catch(error => console.error(error))
}

function waitFor (waitSeconds) {
return new Promise(resolve => setTimeout(() => resolve(), waitSeconds * 1000))
}

async function checkJunitUpload () {
let numAttempts = 0
let isSuccess = false
let data = []
while (numAttempts++ < MAX_NUM_ATTEMPTS && !isSuccess) {
data = await getTestData(`test_level:test ${process.env.EXTRA_TAGS}`)
if (data.length === EXPECTED_NUM_TESTS) {
isSuccess = true
} else {
const isLastAttempt = numAttempts === MAX_NUM_ATTEMPTS
if (!isLastAttempt) {
console.log(`🔁 Attempt number ${numAttempts} failed, retrying in ${CHECK_INTERVAL_SECONDS} seconds.`)
await waitFor(CHECK_INTERVAL_SECONDS)
}
}
}
if (isSuccess) {
console.log(`✅ Successful check: the API returned ${data.length} tests.`)
process.exit(0)
} else {
console.log(`❌ Failed check: the API returned ${data.length} tests but ${EXPECTED_NUM_TESTS} were expected.`)
process.exit(1)
}
}

checkJunitUpload()
Original file line number Diff line number Diff line change
@@ -1,27 +1,27 @@
<?xml version="1.0" encoding="UTF-8"?>
<!-- Python / unittest-xml-report -->
<testsuite errors="0" failures="1" file="api/tests_unittest.py" name="api.tests_unittest.TestStringMethods-20200211105522" skipped="1" tests="5" time="0.007" timestamp="2020-02-11T10:55:22">
<testcase classname="api.tests_unittest.TestStringMethods" file="api/tests_unittest.py" line="8" name="test_isupper" time="0.000" timestamp="2020-02-11T10:55:22">
<testsuite errors="0" failures="1" file="api/tests_unittest.py" name="api.tests_unittest.TestStringMethods-20200211105522" skipped="1" tests="5" time="0.007">
Copy link
Copy Markdown
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

removed timestamps from this report, otherwise it wouldn't get processed by our backend

<testcase classname="api.tests_unittest.TestStringMethods" file="api/tests_unittest.py" line="8" name="test_isupper" time="0.000">
<system-out>
<![CDATA[]]> </system-out>
<system-err>
<![CDATA[]]> </system-err>
</testcase>
<testcase classname="api.tests_unittest.TestStringMethods" file="api/tests_unittest.py" line="12" name="test_split" time="0.000" timestamp="2020-02-11T10:55:22">
<testcase classname="api.tests_unittest.TestStringMethods" file="api/tests_unittest.py" line="12" name="test_split" time="0.000">
<system-out>
<![CDATA[]]> </system-out>
<system-err>
<![CDATA[]]> </system-err>
</testcase>
<testcase classname="api.tests_unittest.TestStringMethods" file="api/tests_unittest.py" line="5" name="test_upper" time="0.000" timestamp="2020-02-11T10:55:22">
<testcase classname="api.tests_unittest.TestStringMethods" file="api/tests_unittest.py" line="5" name="test_upper" time="0.000">
<system-out>
<![CDATA[]]>
Some output
</system-out>
<system-err>
<![CDATA[]]> </system-err>
</testcase>
<testcase classname="api.tests_unittest.TestStringMethods" file="api/tests_unittest.py" line="23" name="test_failure_unittest" time="0.005" timestamp="2020-02-11T10:55:22">
<testcase classname="api.tests_unittest.TestStringMethods" file="api/tests_unittest.py" line="23" name="test_failure_unittest" time="0.005">
<failure message="True != False" type="AssertionError">
<![CDATA[Traceback (most recent call last):
File "/Home/api/tests_unittest.py", line 24, in test_failure_unittest
Expand All @@ -33,7 +33,7 @@ AssertionError: True != False
<system-err>
<![CDATA[]]> </system-err>
</testcase>
<testcase classname="api.tests_unittest.TestStringMethods" file="api/tests_unittest.py" line="19" name="test_skipped_unittest" time="0.000" timestamp="2020-02-11T10:55:22">
<testcase classname="api.tests_unittest.TestStringMethods" file="api/tests_unittest.py" line="19" name="test_skipped_unittest" time="0.000">
<skipped message="demonstrating skipping" type="skip"/>
<system-out>
<![CDATA[]]> </system-out>
Expand Down