See all operations
POST: projects/{project_id}/test-runs/record-multiple
Description
Records the results of executing multiple automated tests
You need to use this overload when you want to be able to execute a large batch of test runs.
It's faster than TestRun_RecordAutomated1 for large numbers of test runs.
*However* it does not refresh any of the other items in the project (test cases, requirements, test sets)
that also have summarized forms of this data. So once you're done loading data, you *must*
call the Project_RefreshProgressExecutionStatusCaches() command once.
How to Execute
To access this REST web service, you need to use the following URL (make sure to replace any parameters (eg {project_id}) with the relevant value (eg 1):
https://api.inflectra.com/spira/services/v7_0/RestService.svc/projects/{project_id}/test-runs/record-multiple
Request Parameters
Name
|
Description
|
project_id
|
The id of the current project
|
Request Body
<ArrayOfRemoteAutomatedTestRun xmlns:i="http://www.w3.org/2001/XMLSchema-instance" xmlns="http://schemas.datacontract.org/2004/07/Inflectra.SpiraTest.Web.Services.v7_0.DataObjects">
<RemoteAutomatedTestRun>
<ArtifactTypeId>0</ArtifactTypeId>
<ConcurrencyDate>0001-01-01T00:00:00</ConcurrencyDate>
<CustomProperties i:nil="true" />
<Guid i:nil="true" />
<IsAttachments>false</IsAttachments>
<ProjectGuid i:nil="true" />
<ProjectId>0</ProjectId>
<Tags i:nil="true" />
<ActualDuration i:nil="true" />
<BuildId i:nil="true" />
<EndDate i:nil="true" />
<EstimatedDuration i:nil="true" />
<ExecutionStatusId>0</ExecutionStatusId>
<Name i:nil="true" />
<ReleaseGuid i:nil="true" />
<ReleaseId i:nil="true" />
<ReleaseVersionNumber i:nil="true" />
<StartDate>0001-01-01T00:00:00</StartDate>
<TestCaseGuid i:nil="true" />
<TestCaseId>0</TestCaseId>
<TestConfigurationId i:nil="true" />
<TestRunId i:nil="true" />
<TestRunTypeId>0</TestRunTypeId>
<TestSetGuid i:nil="true" />
<TestSetId i:nil="true" />
<TestSetTestCaseId i:nil="true" />
<TesterGuid i:nil="true" />
<TesterId i:nil="true" />
<AutomationAttachmentId i:nil="true" />
<AutomationEngineId i:nil="true" />
<AutomationEngineToken i:nil="true" />
<AutomationHostId i:nil="true" />
<Parameters i:nil="true" />
<RunnerAssertCount i:nil="true" />
<RunnerMessage i:nil="true" />
<RunnerName i:nil="true" />
<RunnerStackTrace i:nil="true" />
<RunnerTestName i:nil="true" />
<ScheduledDate i:nil="true" />
<TestRunFormatId>0</TestRunFormatId>
<TestRunSteps i:nil="true" />
</RemoteAutomatedTestRun>
</ArrayOfRemoteAutomatedTestRun>
[{"TestRunFormatId":0,
"RunnerName":null,
"RunnerTestName":null,
"RunnerAssertCount":null,
"RunnerMessage":null,
"RunnerStackTrace":null,
"AutomationHostId":null,
"AutomationEngineId":null,
"AutomationEngineToken":null,
"AutomationAttachmentId":null,
"Parameters":null,
"ScheduledDate":null,
"TestRunSteps":null,
"TestRunId":null,
"Name":null,
"TestCaseId":0,
"TestCaseGuid":null,
"TestRunTypeId":0,
"TesterId":null,
"TesterGuid":null,
"ExecutionStatusId":0,
"ReleaseId":null,
"ReleaseGuid":null,
"TestSetId":null,
"TestSetGuid":null,
"TestSetTestCaseId":null,
"StartDate":"0001-01-01T00:00:00",
"EndDate":null,
"BuildId":null,
"EstimatedDuration":null,
"ActualDuration":null,
"TestConfigurationId":null,
"ReleaseVersionNumber":null,
"ProjectId":0,
"ProjectGuid":null,
"ArtifactTypeId":0,
"ConcurrencyDate":"0001-01-01T00:00:00",
"CustomProperties":null,
"IsAttachments":false,
"Tags":null,
"Guid":null}]
Return Data
The JSON and XML examples below show the shape of one entry that will be returned. It does not show an example of how that entry will be populated.
<ArrayOfRemoteAutomatedTestRun xmlns:i="http://www.w3.org/2001/XMLSchema-instance" xmlns="http://schemas.datacontract.org/2004/07/Inflectra.SpiraTest.Web.Services.v7_0.DataObjects">
<RemoteAutomatedTestRun>
<ArtifactTypeId>0</ArtifactTypeId>
<ConcurrencyDate>0001-01-01T00:00:00</ConcurrencyDate>
<CustomProperties i:nil="true" />
<Guid i:nil="true" />
<IsAttachments>false</IsAttachments>
<ProjectGuid i:nil="true" />
<ProjectId>0</ProjectId>
<Tags i:nil="true" />
<ActualDuration i:nil="true" />
<BuildId i:nil="true" />
<EndDate i:nil="true" />
<EstimatedDuration i:nil="true" />
<ExecutionStatusId>0</ExecutionStatusId>
<Name i:nil="true" />
<ReleaseGuid i:nil="true" />
<ReleaseId i:nil="true" />
<ReleaseVersionNumber i:nil="true" />
<StartDate>0001-01-01T00:00:00</StartDate>
<TestCaseGuid i:nil="true" />
<TestCaseId>0</TestCaseId>
<TestConfigurationId i:nil="true" />
<TestRunId i:nil="true" />
<TestRunTypeId>0</TestRunTypeId>
<TestSetGuid i:nil="true" />
<TestSetId i:nil="true" />
<TestSetTestCaseId i:nil="true" />
<TesterGuid i:nil="true" />
<TesterId i:nil="true" />
<AutomationAttachmentId i:nil="true" />
<AutomationEngineId i:nil="true" />
<AutomationEngineToken i:nil="true" />
<AutomationHostId i:nil="true" />
<Parameters i:nil="true" />
<RunnerAssertCount i:nil="true" />
<RunnerMessage i:nil="true" />
<RunnerName i:nil="true" />
<RunnerStackTrace i:nil="true" />
<RunnerTestName i:nil="true" />
<ScheduledDate i:nil="true" />
<TestRunFormatId>0</TestRunFormatId>
<TestRunSteps i:nil="true" />
</RemoteAutomatedTestRun>
</ArrayOfRemoteAutomatedTestRun>
[{"TestRunFormatId":0,
"RunnerName":null,
"RunnerTestName":null,
"RunnerAssertCount":null,
"RunnerMessage":null,
"RunnerStackTrace":null,
"AutomationHostId":null,
"AutomationEngineId":null,
"AutomationEngineToken":null,
"AutomationAttachmentId":null,
"Parameters":null,
"ScheduledDate":null,
"TestRunSteps":null,
"TestRunId":null,
"Name":null,
"TestCaseId":0,
"TestCaseGuid":null,
"TestRunTypeId":0,
"TesterId":null,
"TesterGuid":null,
"ExecutionStatusId":0,
"ReleaseId":null,
"ReleaseGuid":null,
"TestSetId":null,
"TestSetGuid":null,
"TestSetTestCaseId":null,
"StartDate":"0001-01-01T00:00:00",
"EndDate":null,
"BuildId":null,
"EstimatedDuration":null,
"ActualDuration":null,
"TestConfigurationId":null,
"ReleaseVersionNumber":null,
"ProjectId":0,
"ProjectGuid":null,
"ArtifactTypeId":0,
"ConcurrencyDate":"0001-01-01T00:00:00",
"CustomProperties":null,
"IsAttachments":false,
"Tags":null,
"Guid":null}]