See all operations
POST: projects/{project_id}/test-runs/record
Description
Records the results of executing an automated test
You need to use this overload when you want to be able to set Test Run custom properties
How to Execute
To access this REST web service, you need to use the following URL (make sure to replace any parameters (eg {project_id}) with the relevant value (eg 1):
https://api.inflectra.com/Spira/Services/v7_0/RestService.svc/projects/{project_id}/test-runs/record
Request Parameters
Name
|
Description
|
project_id
|
The id of the current project
|
Request Body
Property
|
Description
|
TestRunFormatId
|
The format of the automation results (1=Plain Text, 2=HTML) stored in the 'RunnerStackTrace' field - required
|
RunnerName
|
The name of the external automated tool that executed the test - required
|
RunnerTestName
|
The name of the test case as it is known in the external tool - required
|
RunnerAssertCount
|
The number of assertions/errors reported during the automated test execution
|
RunnerMessage
|
The summary result of the test case - required
|
RunnerStackTrace
|
The detailed trace of test results reported back from the automated testing tool - required
|
AutomationHostId
|
The id of the automation host that the result is being recorded for
|
AutomationEngineId
|
The id of the automation engine that the result is being recorded for
|
AutomationEngineToken
|
The token of the automation engine that the result is being recorded for (read-only)
|
AutomationAttachmentId
|
The id of the attachment that is being used to store the test script (file or url)
|
Parameters
|
The list of test case parameters that have been provided
|
ScheduledDate
|
The datetime the test was scheduled for
|
TestRunSteps
|
The list of test steps that comprise the automated test
These are optional for automated test runs. The status of the test run steps
does not change the overall status of the automated test run. They are used to
simply make reporting clearer inside the system. They will also update the status of
appropriate Test Step(s) if a valid test step id is provided.
|
TestRunId
|
The id of the test run
|
Name
|
The name of the test run (usually the same as the test case)
|
TestCaseId
|
The id of the test case that the test run is an instance of
|
TestCaseGuid
|
The guid of the test case that the test run is an instance of
|
TestRunTypeId
|
The id of the type of test run (automated vs. manual)
|
TesterId
|
The id of the user that executed the test
The authenticated user is used if no value is provided
|
ExecutionStatusId
|
The id of overall execution status for the test run
Failed = 1;
Passed = 2;
NotRun = 3;
NotApplicable = 4;
Blocked = 5;
Caution = 6;
|
ReleaseId
|
The id of the release that the test run should be reported against
|
TestSetId
|
The id of the test set that the test run should be reported against
|
TestSetTestCaseId
|
The id of the unique test case entry in the test set
|
StartDate
|
The date/time that the test execution was started
|
EndDate
|
The date/time that the test execution was completed
|
BuildId
|
The id of the build that the test was executed against
|
EstimatedDuration
|
The estimated duration of how long the test should take to execute (read-only)
This field is populated from the test case being executed
|
ActualDuration
|
The actual duration of how long the test should take to execute (read-only)
This field is calculated from the start/end dates provided during execution
|
TestConfigurationId
|
The id of the specific test configuration that was used
|
ProjectId
|
The id of the project that the artifact belongs to
|
ProjectGuid
|
The guid of the project that the artifact belongs to
|
ConcurrencyDate
|
The datetime used to track optimistic concurrency to prevent edit conflicts
|
CustomProperties
|
The list of associated custom properties/fields for this artifact
|
Tags
|
The list of meta-tags that should be associated with the artifact
|
Guid
|
The unique identifier for the artifact
|
<RemoteAutomatedTestRun xmlns:i="http://www.w3.org/2001/XMLSchema-instance" xmlns="http://schemas.datacontract.org/2004/07/Inflectra.SpiraTest.Web.Services.v7_0.DataObjects">
<ArtifactTypeId>0</ArtifactTypeId>
<ConcurrencyDate>0001-01-01T00:00:00</ConcurrencyDate>
<CustomProperties i:nil="true" />
<Guid i:nil="true" />
<IsAttachments>false</IsAttachments>
<ProjectGuid i:nil="true" />
<ProjectId>0</ProjectId>
<Tags i:nil="true" />
<ActualDuration i:nil="true" />
<BuildId i:nil="true" />
<EndDate i:nil="true" />
<EstimatedDuration i:nil="true" />
<ExecutionStatusId>0</ExecutionStatusId>
<Name i:nil="true" />
<ReleaseGuid i:nil="true" />
<ReleaseId i:nil="true" />
<ReleaseVersionNumber i:nil="true" />
<StartDate>0001-01-01T00:00:00</StartDate>
<TestCaseGuid i:nil="true" />
<TestCaseId>0</TestCaseId>
<TestConfigurationId i:nil="true" />
<TestRunId i:nil="true" />
<TestRunTypeId>0</TestRunTypeId>
<TestSetGuid i:nil="true" />
<TestSetId i:nil="true" />
<TestSetTestCaseId i:nil="true" />
<TesterGuid i:nil="true" />
<TesterId i:nil="true" />
<AutomationAttachmentId i:nil="true" />
<AutomationEngineId i:nil="true" />
<AutomationEngineToken i:nil="true" />
<AutomationHostId i:nil="true" />
<Parameters i:nil="true" />
<RunnerAssertCount i:nil="true" />
<RunnerMessage i:nil="true" />
<RunnerName i:nil="true" />
<RunnerStackTrace i:nil="true" />
<RunnerTestName i:nil="true" />
<ScheduledDate i:nil="true" />
<TestRunFormatId>0</TestRunFormatId>
<TestRunSteps i:nil="true" />
</RemoteAutomatedTestRun>
{"TestRunFormatId":0,
"RunnerName":null,
"RunnerTestName":null,
"RunnerAssertCount":null,
"RunnerMessage":null,
"RunnerStackTrace":null,
"AutomationHostId":null,
"AutomationEngineId":null,
"AutomationEngineToken":null,
"AutomationAttachmentId":null,
"Parameters":null,
"ScheduledDate":null,
"TestRunSteps":null,
"TestRunId":null,
"Name":null,
"TestCaseId":0,
"TestCaseGuid":null,
"TestRunTypeId":0,
"TesterId":null,
"TesterGuid":null,
"ExecutionStatusId":0,
"ReleaseId":null,
"ReleaseGuid":null,
"TestSetId":null,
"TestSetGuid":null,
"TestSetTestCaseId":null,
"StartDate":"0001-01-01T00:00:00",
"EndDate":null,
"BuildId":null,
"EstimatedDuration":null,
"ActualDuration":null,
"TestConfigurationId":null,
"ReleaseVersionNumber":null,
"ProjectId":0,
"ProjectGuid":null,
"ArtifactTypeId":0,
"ConcurrencyDate":"0001-01-01T00:00:00",
"CustomProperties":null,
"IsAttachments":false,
"Tags":null,
"Guid":null}
Return Data
The JSON and XML examples below show the shape of one entry that will be returned. It does not show an example of how that entry will be populated.
Property
|
Description
|
TestRunFormatId
|
The format of the automation results (1=Plain Text, 2=HTML) stored in the 'RunnerStackTrace' field - required
|
RunnerName
|
The name of the external automated tool that executed the test - required
|
RunnerTestName
|
The name of the test case as it is known in the external tool - required
|
RunnerAssertCount
|
The number of assertions/errors reported during the automated test execution
|
RunnerMessage
|
The summary result of the test case - required
|
RunnerStackTrace
|
The detailed trace of test results reported back from the automated testing tool - required
|
AutomationHostId
|
The id of the automation host that the result is being recorded for
|
AutomationEngineId
|
The id of the automation engine that the result is being recorded for
|
AutomationEngineToken
|
The token of the automation engine that the result is being recorded for (read-only)
|
AutomationAttachmentId
|
The id of the attachment that is being used to store the test script (file or url)
|
Parameters
|
The list of test case parameters that have been provided
|
ScheduledDate
|
The datetime the test was scheduled for
|
TestRunSteps
|
The list of test steps that comprise the automated test
These are optional for automated test runs. The status of the test run steps
does not change the overall status of the automated test run. They are used to
simply make reporting clearer inside the system. They will also update the status of
appropriate Test Step(s) if a valid test step id is provided.
|
TestRunId
|
The id of the test run
|
Name
|
The name of the test run (usually the same as the test case)
|
TestCaseId
|
The id of the test case that the test run is an instance of
|
TestCaseGuid
|
The guid of the test case that the test run is an instance of
|
TestRunTypeId
|
The id of the type of test run (automated vs. manual)
|
TesterId
|
The id of the user that executed the test
The authenticated user is used if no value is provided
|
TesterGuid
|
The guid of the tester.
|
ExecutionStatusId
|
The id of overall execution status for the test run
Failed = 1;
Passed = 2;
NotRun = 3;
NotApplicable = 4;
Blocked = 5;
Caution = 6;
|
ReleaseId
|
The id of the release that the test run should be reported against
|
ReleaseGuid
|
The guid of the release
|
TestSetId
|
The id of the test set that the test run should be reported against
|
TestSetGuid
|
The guid of the test set
|
TestSetTestCaseId
|
The id of the unique test case entry in the test set
|
StartDate
|
The date/time that the test execution was started
|
EndDate
|
The date/time that the test execution was completed
|
BuildId
|
The id of the build that the test was executed against
|
EstimatedDuration
|
The estimated duration of how long the test should take to execute (read-only)
This field is populated from the test case being executed
|
ActualDuration
|
The actual duration of how long the test should take to execute (read-only)
This field is calculated from the start/end dates provided during execution
|
TestConfigurationId
|
The id of the specific test configuration that was used
|
ReleaseVersionNumber
|
version number of the release this test run was run against.
|
ProjectId
|
The id of the project that the artifact belongs to
|
ProjectGuid
|
The guid of the project that the artifact belongs to
|
ArtifactTypeId
|
The type of artifact that we have
|
ConcurrencyDate
|
The datetime used to track optimistic concurrency to prevent edit conflicts
|
CustomProperties
|
The list of associated custom properties/fields for this artifact
|
IsAttachments
|
Does this artifact have any attachments?
|
Tags
|
The list of meta-tags that should be associated with the artifact
|
Guid
|
The unique identifier for the artifact
|
<RemoteAutomatedTestRun xmlns:i="http://www.w3.org/2001/XMLSchema-instance" xmlns="http://schemas.datacontract.org/2004/07/Inflectra.SpiraTest.Web.Services.v7_0.DataObjects">
<ArtifactTypeId>0</ArtifactTypeId>
<ConcurrencyDate>0001-01-01T00:00:00</ConcurrencyDate>
<CustomProperties i:nil="true" />
<Guid i:nil="true" />
<IsAttachments>false</IsAttachments>
<ProjectGuid i:nil="true" />
<ProjectId>0</ProjectId>
<Tags i:nil="true" />
<ActualDuration i:nil="true" />
<BuildId i:nil="true" />
<EndDate i:nil="true" />
<EstimatedDuration i:nil="true" />
<ExecutionStatusId>0</ExecutionStatusId>
<Name i:nil="true" />
<ReleaseGuid i:nil="true" />
<ReleaseId i:nil="true" />
<ReleaseVersionNumber i:nil="true" />
<StartDate>0001-01-01T00:00:00</StartDate>
<TestCaseGuid i:nil="true" />
<TestCaseId>0</TestCaseId>
<TestConfigurationId i:nil="true" />
<TestRunId i:nil="true" />
<TestRunTypeId>0</TestRunTypeId>
<TestSetGuid i:nil="true" />
<TestSetId i:nil="true" />
<TestSetTestCaseId i:nil="true" />
<TesterGuid i:nil="true" />
<TesterId i:nil="true" />
<AutomationAttachmentId i:nil="true" />
<AutomationEngineId i:nil="true" />
<AutomationEngineToken i:nil="true" />
<AutomationHostId i:nil="true" />
<Parameters i:nil="true" />
<RunnerAssertCount i:nil="true" />
<RunnerMessage i:nil="true" />
<RunnerName i:nil="true" />
<RunnerStackTrace i:nil="true" />
<RunnerTestName i:nil="true" />
<ScheduledDate i:nil="true" />
<TestRunFormatId>0</TestRunFormatId>
<TestRunSteps i:nil="true" />
</RemoteAutomatedTestRun>
{"TestRunFormatId":0,
"RunnerName":null,
"RunnerTestName":null,
"RunnerAssertCount":null,
"RunnerMessage":null,
"RunnerStackTrace":null,
"AutomationHostId":null,
"AutomationEngineId":null,
"AutomationEngineToken":null,
"AutomationAttachmentId":null,
"Parameters":null,
"ScheduledDate":null,
"TestRunSteps":null,
"TestRunId":null,
"Name":null,
"TestCaseId":0,
"TestCaseGuid":null,
"TestRunTypeId":0,
"TesterId":null,
"TesterGuid":null,
"ExecutionStatusId":0,
"ReleaseId":null,
"ReleaseGuid":null,
"TestSetId":null,
"TestSetGuid":null,
"TestSetTestCaseId":null,
"StartDate":"0001-01-01T00:00:00",
"EndDate":null,
"BuildId":null,
"EstimatedDuration":null,
"ActualDuration":null,
"TestConfigurationId":null,
"ReleaseVersionNumber":null,
"ProjectId":0,
"ProjectGuid":null,
"ArtifactTypeId":0,
"ConcurrencyDate":"0001-01-01T00:00:00",
"CustomProperties":null,
"IsAttachments":false,
"Tags":null,
"Guid":null}