See all operations
GET: projects/{project_id}/test-runs/{test_run_id}/automated
Description
Retrieves a single automated test run in the system including the automation-specific information
How to Execute
To access this REST web service, you need to use the following URL:
https://api.inflectra.com/Spira/Services/v4_0/RestService.svc/projects/{project_id}/test-runs/{test_run_id}/automated
Request Parameters
Name
|
Description
|
project_id
|
The id of the current project
|
test_run_id
|
The id of the test run
|
Request Body
Return Data
The JSON and XML examples below show the shape of one entry that will be returned. It does not show an example of how that entry will be populated.
Property
|
Description
|
TestRunFormatId
|
The format of the automation results (1=Plain Text, 2=HTML) stored in the 'RunnerStackTrace' field
|
RunnerName
|
The name of the external automated tool that executed the test
|
RunnerTestName
|
The name of the test case as it is known in the external tool
|
RunnerAssertCount
|
The number of assertions/errors reported during the automated test execution
|
RunnerMessage
|
The summary result of the test case
|
RunnerStackTrace
|
The detailed trace of test results reported back from the automated testing tool
|
AutomationHostId
|
The id of the automation host that the result is being recorded for
|
AutomationEngineId
|
The id of the automation engine that the result is being recorded for
|
AutomationEngineToken
|
The token of the automation engine that the result is being recorded for (read-only)
|
AutomationAttachmentId
|
The id of the attachment that is being used to store the test script (file or url)
|
Parameters
|
The list of test case parameters that have been provided
|
ScheduledDate
|
The datetime the test was scheduled for
|
TestRunSteps
|
The list of test steps that comprise the automated test
These are optional for automated test runs. The status of the test run steps
does not change the overall status of the automated test run. They are used to
simply make reporting clearer inside the system. They will also update the status of
appropriate Test Step(s) if a valid test step id is provided.
|
TestRunId
|
The id of the test run
|
Name
|
The name of the test run (usually the same as the test case)
|
TestCaseId
|
The id of the test case that the test run is an instance of
|
TestRunTypeId
|
The id of the type of test run (automated vs. manual)
|
TesterId
|
The id of the user that executed the test
The authenticated user is used if no value is provided
|
ExecutionStatusId
|
The id of overall execution status for the test run
Failed = 1;
Passed = 2;
NotRun = 3;
NotApplicable = 4;
Blocked = 5;
Caution = 6;
|
ReleaseId
|
The id of the release that the test run should be reported against
|
TestSetId
|
The id of the test set that the test run should be reported against
|
TestSetTestCaseId
|
The id of the unique test case entry in the test set
|
StartDate
|
The date/time that the test execution was started
|
EndDate
|
The date/time that the test execution was completed
|
BuildId
|
The id of the build that the test was executed against
|
EstimatedDuration
|
The estimated duration of how long the test should take to execute (read-only)
This field is populated from the test case being executed
|
ActualDuration
|
The actual duration of how long the test should take to execute (read-only)
This field is calculated from the start/end dates provided during execution
|
ProjectId
|
The id of the project that the artifact belongs to
The current project is always used for Insert operations for security reasons
|
ArtifactTypeId
|
The type of artifact that we have (read-only)
|
ConcurrencyDate
|
The datetime used to track optimistic concurrency to prevent edit conflicts
|
CustomProperties
|
The list of associated custom properties/fields for this artifact
|
<RemoteAutomatedTestRun xmlns:i="http://www.w3.org/2001/XMLSchema-instance" xmlns="http://schemas.datacontract.org/2004/07/Inflectra.SpiraTest.Web.Services.v4_0.DataObjects">
<ArtifactTypeId>0</ArtifactTypeId>
<ConcurrencyDate>0001-01-01T00:00:00</ConcurrencyDate>
<CustomProperties i:nil="true" />
<ProjectId i:nil="true" />
<ActualDuration i:nil="true" />
<BuildId i:nil="true" />
<EndDate i:nil="true" />
<EstimatedDuration i:nil="true" />
<ExecutionStatusId>0</ExecutionStatusId>
<Name i:nil="true" />
<ReleaseId i:nil="true" />
<StartDate>0001-01-01T00:00:00</StartDate>
<TestCaseId>0</TestCaseId>
<TestRunId i:nil="true" />
<TestRunTypeId>0</TestRunTypeId>
<TestSetId i:nil="true" />
<TestSetTestCaseId i:nil="true" />
<TesterId i:nil="true" />
<AutomationAttachmentId i:nil="true" />
<AutomationEngineId i:nil="true" />
<AutomationEngineToken i:nil="true" />
<AutomationHostId i:nil="true" />
<Parameters i:nil="true" />
<RunnerAssertCount i:nil="true" />
<RunnerMessage i:nil="true" />
<RunnerName i:nil="true" />
<RunnerStackTrace i:nil="true" />
<RunnerTestName i:nil="true" />
<ScheduledDate i:nil="true" />
<TestRunFormatId>0</TestRunFormatId>
<TestRunSteps i:nil="true" />
</RemoteAutomatedTestRun>
{"ArtifactTypeId":0,
"ConcurrencyDate":"\/Date(-62135578800000-0500)\/",
"CustomProperties":null,
"ProjectId":null,
"ActualDuration":null,
"BuildId":null,
"EndDate":null,
"EstimatedDuration":null,
"ExecutionStatusId":0,
"Name":null,
"ReleaseId":null,
"StartDate":"\/Date(-62135578800000-0500)\/",
"TestCaseId":0,
"TestRunId":null,
"TestRunTypeId":0,
"TestSetId":null,
"TestSetTestCaseId":null,
"TesterId":null,
"AutomationAttachmentId":null,
"AutomationEngineId":null,
"AutomationEngineToken":null,
"AutomationHostId":null,
"Parameters":null,
"RunnerAssertCount":null,
"RunnerMessage":null,
"RunnerName":null,
"RunnerStackTrace":null,
"RunnerTestName":null,
"ScheduledDate":null,
"TestRunFormatId":0,
"TestRunSteps":null}