add markers to pytest.ini; add additional example tests;add proper readme
This commit is contained in:
parent
cc95e12d20
commit
c22079068d
118
README.md
118
README.md
@ -2,3 +2,121 @@
|
|||||||
|
|
||||||
This is a proof-of-concept project, showing how PyTest could be used in combination with a custom API client, to quickly and easily build api tests for a data-delivery service that is deployed and active on an existing environment.
|
This is a proof-of-concept project, showing how PyTest could be used in combination with a custom API client, to quickly and easily build api tests for a data-delivery service that is deployed and active on an existing environment.
|
||||||
|
|
||||||
|
## Instructions
|
||||||
|
|
||||||
|
1. Add a `.env` file to your local copy of the repo. By default, this demo project will look for `.env.qa` in the root of the project. You can find a template to base this on, in `apiclient/env_template`. Info on how to set the values in that file can be found by asking Greg.
|
||||||
|
|
||||||
|
|
||||||
|
2. Create your virtual env:
|
||||||
|
```shell
|
||||||
|
python3 -m venv venv
|
||||||
|
```
|
||||||
|
|
||||||
|
3. Activate the environment:
|
||||||
|
```shell
|
||||||
|
source venv/bin/activate
|
||||||
|
```
|
||||||
|
On Windows:
|
||||||
|
```shell
|
||||||
|
.\venv\Scripts\activate
|
||||||
|
```
|
||||||
|
|
||||||
|
4. Install requirements (pip will come from your venv)
|
||||||
|
```shell
|
||||||
|
pip install -r requirements.txt
|
||||||
|
```
|
||||||
|
|
||||||
|
Once this is done, executing the tests is just a matter of invoking pytest:
|
||||||
|
|
||||||
|
```
|
||||||
|
(.venv) PS C:\Users\GregGauthier\Projects\local\pytest-api> pytest
|
||||||
|
======================================================================================================== test session starts ========================================================================================================
|
||||||
|
platform win32 -- Python 3.12.4, pytest-8.2.2, pluggy-1.5.0
|
||||||
|
rootdir: C:\Users\GregGauthier\Projects\local\pytest-api
|
||||||
|
configfile: pytest.ini
|
||||||
|
collected 5 items
|
||||||
|
tests\test_datadelivery.py ..... [100%]
|
||||||
|
|
||||||
|
========================================================================================================= warnings summary ==========================================================================================================
|
||||||
|
tests/test_datadelivery.py::test_datadelivery_role_get
|
||||||
|
tests/test_datadelivery.py::test_datadelivery_acms_redaction_get
|
||||||
|
tests/test_datadelivery.py::test_datadelivery_client_applications_get
|
||||||
|
tests/test_datadelivery.py::test_datadelivery_endpoint_get
|
||||||
|
tests/test_datadelivery.py::test_datadelivery_redaction_type_get
|
||||||
|
C:\Users\GregGauthier\Projects\local\pytest-api\.venv\Lib\site-packages\urllib3\connectionpool.py:1099: InsecureRequestWarning: Unverified HTTPS request is being made to host 'api-spectrumqa.teledynecontrols.com'. Adding certificate verification is strongly advised. See: https://urllib3.readthedocs.io/en/latest/advanced-usage.html#tls-warnings
|
||||||
|
warnings.warn(
|
||||||
|
|
||||||
|
-- Docs: https://docs.pytest.org/en/stable/how-to/capture-warnings.html
|
||||||
|
============================================================================================================== PASSES ===============================================================================================================
|
||||||
|
======================================================================================================= slowest 25 durations ========================================================================================================
|
||||||
|
4.41s call tests/test_datadelivery.py::test_datadelivery_role_get
|
||||||
|
2.16s call tests/test_datadelivery.py::test_datadelivery_acms_redaction_get
|
||||||
|
1.98s call tests/test_datadelivery.py::test_datadelivery_redaction_type_get
|
||||||
|
1.79s call tests/test_datadelivery.py::test_datadelivery_client_applications_get
|
||||||
|
1.79s call tests/test_datadelivery.py::test_datadelivery_endpoint_get
|
||||||
|
1.04s setup tests/test_datadelivery.py::test_datadelivery_role_get
|
||||||
|
1.01s setup tests/test_datadelivery.py::test_datadelivery_acms_redaction_get
|
||||||
|
1.00s setup tests/test_datadelivery.py::test_datadelivery_endpoint_get
|
||||||
|
0.99s setup tests/test_datadelivery.py::test_datadelivery_client_applications_get
|
||||||
|
0.97s setup tests/test_datadelivery.py::test_datadelivery_redaction_type_get
|
||||||
|
|
||||||
|
(5 durations < 0.005s hidden. Use -vv to show these durations.)
|
||||||
|
====================================================================================================== short test summary info ======================================================================================================
|
||||||
|
PASSED tests/test_datadelivery.py::test_datadelivery_role_get
|
||||||
|
PASSED tests/test_datadelivery.py::test_datadelivery_acms_redaction_get
|
||||||
|
PASSED tests/test_datadelivery.py::test_datadelivery_client_applications_get
|
||||||
|
PASSED tests/test_datadelivery.py::test_datadelivery_endpoint_get
|
||||||
|
PASSED tests/test_datadelivery.py::test_datadelivery_redaction_type_get
|
||||||
|
================================================================================================== 5 passed, 5 warnings in 17.20s ===================================================================================================
|
||||||
|
```
|
||||||
|
|
||||||
|
PyTest is configured to show the top 25 test durations, as well as the pass/fail status of all the tests. The tests have been marked with various tags, to allow for granular test selection. To see all the available markers:
|
||||||
|
|
||||||
|
```
|
||||||
|
(.venv) PS C:\Users\GregGauthier\Projects\local\pytest-api> pytest --markers
|
||||||
|
@pytest.mark.get: marks a test as a get request test (deselect with '-m "not get"')
|
||||||
|
|
||||||
|
@pytest.mark.role: marks a test as a role test (deselect with '-m "not role"')
|
||||||
|
|
||||||
|
@pytest.mark.endpoint: marks a test as an endpoint test (deselect with '-m "not endpoint"')
|
||||||
|
|
||||||
|
@pytest.mark.redaction: marks a test as a redaction test (deselect with '-m "not redaction"')
|
||||||
|
|
||||||
|
@pytest.mark.client_application: marks a test as a client application test (deselect with '-m "not client_application"')
|
||||||
|
|
||||||
|
. . . (etc) . . .
|
||||||
|
```
|
||||||
|
|
||||||
|
To run tests with a specific marker:
|
||||||
|
|
||||||
|
```
|
||||||
|
(.venv) PS C:\Users\GregGauthier\Projects\local\pytest-api> pytest -m "redaction"
|
||||||
|
======================================================================================================== test session starts ========================================================================================================
|
||||||
|
platform win32 -- Python 3.12.4, pytest-8.2.2, pluggy-1.5.0
|
||||||
|
rootdir: C:\Users\GregGauthier\Projects\local\pytest-api
|
||||||
|
configfile: pytest.ini
|
||||||
|
collected 5 items / 3 deselected / 2 selected tests\test_datadelivery.py .. [100%]
|
||||||
|
|
||||||
|
========================================================================================================= warnings summary ==========================================================================================================
|
||||||
|
tests/test_datadelivery.py::test_datadelivery_acms_redaction_get
|
||||||
|
tests/test_datadelivery.py::test_datadelivery_redaction_type_get
|
||||||
|
C:\Users\GregGauthier\Projects\local\pytest-api\.venv\Lib\site-packages\urllib3\connectionpool.py:1099: InsecureRequestWarning: Unverified HTTPS request is being made to host 'api-spectrumqa.teledynecontrols.com'. Adding certificate verification is strongly advised. See: https://urllib3.readthedocs.io/en/latest/advanced-usage.html#tls-warnings
|
||||||
|
warnings.warn(
|
||||||
|
|
||||||
|
-- Docs: https://docs.pytest.org/en/stable/how-to/capture-warnings.html
|
||||||
|
============================================================================================================== PASSES ===============================================================================================================
|
||||||
|
======================================================================================================= slowest 25 durations ========================================================================================================
|
||||||
|
2.32s call tests/test_datadelivery.py::test_datadelivery_acms_redaction_get
|
||||||
|
1.96s call tests/test_datadelivery.py::test_datadelivery_redaction_type_get
|
||||||
|
1.09s setup tests/test_datadelivery.py::test_datadelivery_acms_redaction_get
|
||||||
|
0.99s setup tests/test_datadelivery.py::test_datadelivery_redaction_type_get
|
||||||
|
|
||||||
|
(2 durations < 0.005s hidden. Use -vv to show these durations.)
|
||||||
|
====================================================================================================== short test summary info ======================================================================================================
|
||||||
|
PASSED tests/test_datadelivery.py::test_datadelivery_acms_redaction_get
|
||||||
|
PASSED tests/test_datadelivery.py::test_datadelivery_redaction_type_get
|
||||||
|
============================================================================================ 2 passed, 3 deselected, 2 warnings in 6.38s ============================================================================================
|
||||||
|
(.venv) PS C:\Users\GregGauthier\Projects\local\pytest-api>
|
||||||
|
```
|
||||||
|
|
||||||
|
That's it!
|
10
pytest.ini
10
pytest.ini
@ -1,2 +1,10 @@
|
|||||||
[pytest]
|
[pytest]
|
||||||
addopts = --durations=100 -rA
|
# show the top 25 durations, and show the pass/fail:
|
||||||
|
addopts = --durations=25 -rA
|
||||||
|
|
||||||
|
markers =
|
||||||
|
get: marks a test as a get request test (deselect with '-m "not get"')
|
||||||
|
role: marks a test as a role test (deselect with '-m "not role"')
|
||||||
|
endpoint: marks a test as an endpoint test (deselect with '-m "not endpoint"')
|
||||||
|
redaction: marks a test as a redaction test (deselect with '-m "not redaction"')
|
||||||
|
client_application: marks a test as a client application test (deselect with '-m "not client_application"')
|
||||||
|
73
tests/conftest.py
Normal file
73
tests/conftest.py
Normal file
@ -0,0 +1,73 @@
|
|||||||
|
import pytest
|
||||||
|
from apiclient.config import get_cfg
|
||||||
|
from apiclient.oauth_helper import get_legacy_token
|
||||||
|
|
||||||
|
|
||||||
|
ENV = 'qa' # This would be set in an actual OS env var on the execution platform
|
||||||
|
CFG = get_cfg(ENV) # needed for the token, and the full api url
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def role_get_call():
|
||||||
|
token = get_legacy_token(ENV)
|
||||||
|
api_call = {
|
||||||
|
"token": token,
|
||||||
|
"method": "GET",
|
||||||
|
"url": CFG["api_url"] + '/data-delivery/role',
|
||||||
|
"headers": {'Content-Type': 'application/json'},
|
||||||
|
"body": {"application_id": 1}
|
||||||
|
}
|
||||||
|
return api_call
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def acms_redaction_get_call():
|
||||||
|
token = get_legacy_token(ENV)
|
||||||
|
api_call = {
|
||||||
|
"token": token,
|
||||||
|
"method": "GET",
|
||||||
|
"url": CFG["api_url"] + '/data-delivery/acmsredaction',
|
||||||
|
"headers": {'Content-Type': 'application/json'},
|
||||||
|
"body": {"redaction_id": 1}
|
||||||
|
}
|
||||||
|
return api_call
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def client_applications_get_call():
|
||||||
|
token = get_legacy_token(ENV)
|
||||||
|
api_call = {
|
||||||
|
"token": token,
|
||||||
|
"method": "GET",
|
||||||
|
"url": CFG["api_url"] + '/data-delivery/client-applications',
|
||||||
|
"headers": {'Content-Type': 'application/json'},
|
||||||
|
"body": {}
|
||||||
|
}
|
||||||
|
return api_call
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def endpoint_get_call():
|
||||||
|
token = get_legacy_token(ENV)
|
||||||
|
api_call = {
|
||||||
|
"token": token,
|
||||||
|
"method": "GET",
|
||||||
|
"url": CFG["api_url"] + '/data-delivery/endpoint',
|
||||||
|
"headers": {'Content-Type': 'application/json'},
|
||||||
|
"body": {
|
||||||
|
"endpoint_id": 1001
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return api_call
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def redaction_type_get_call():
|
||||||
|
token = get_legacy_token(ENV)
|
||||||
|
api_call = {
|
||||||
|
"token": token,
|
||||||
|
"method": "GET",
|
||||||
|
"url": CFG["api_url"] + '/data-delivery/redactiontype',
|
||||||
|
"headers": {'Content-Type': 'application/json'},
|
||||||
|
"body": {}
|
||||||
|
}
|
||||||
|
return api_call
|
@ -0,0 +1 @@
|
|||||||
|
[]
|
@ -0,0 +1,22 @@
|
|||||||
|
[
|
||||||
|
{
|
||||||
|
"description" : "RAW A717 Redaction Type",
|
||||||
|
"name" : "RAWA717",
|
||||||
|
"redactiontype_id" : 1
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"description" : "RAW A767 Redaction Type",
|
||||||
|
"name" : "RAWA767",
|
||||||
|
"redactiontype_id" : 2
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"description" : "CSV Redaction Type",
|
||||||
|
"name" : "CSV",
|
||||||
|
"redactiontype_id" : 3
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"description" : "PARQUET Redaction Type",
|
||||||
|
"name" : "PARQUET",
|
||||||
|
"redactiontype_id" : 4
|
||||||
|
}
|
||||||
|
]
|
@ -1,67 +1,50 @@
|
|||||||
import json
|
import json
|
||||||
import pytest
|
import pytest
|
||||||
from apiclient.client import api_client
|
from apiclient.client import api_client
|
||||||
from apiclient.config import get_cfg
|
|
||||||
from apiclient.oauth_helper import get_legacy_token
|
|
||||||
from tests.helpers import get_expected_response
|
from tests.helpers import get_expected_response
|
||||||
|
|
||||||
|
|
||||||
ENV = 'qa'
|
####
|
||||||
CFG = get_cfg(ENV)
|
# NOTE:
|
||||||
|
# * api call fixtures can be found in tests/conftest.py
|
||||||
|
# * expected response fixtures can be found in tests/expected_responses
|
||||||
@pytest.fixture
|
####
|
||||||
def role_get_call():
|
|
||||||
token = get_legacy_token(ENV)
|
|
||||||
api_call = {
|
|
||||||
"token": token,
|
|
||||||
"method": "GET",
|
|
||||||
"url": CFG["api_url"] + '/data-delivery/role',
|
|
||||||
"headers": {'Content-Type': 'application/json'},
|
|
||||||
"body": {"application_id": 1}
|
|
||||||
}
|
|
||||||
return api_call
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
|
||||||
def acms_redaction_get_call():
|
|
||||||
token = get_legacy_token(ENV)
|
|
||||||
api_call = {
|
|
||||||
"token": token,
|
|
||||||
"method": "GET",
|
|
||||||
"url": CFG["api_url"] + '/data-delivery/acmsredaction',
|
|
||||||
"headers": {'Content-Type': 'application/json'},
|
|
||||||
"body": {"redaction_id": 1}
|
|
||||||
}
|
|
||||||
return api_call
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
|
||||||
def client_applications_get_call():
|
|
||||||
token = get_legacy_token(ENV)
|
|
||||||
api_call = {
|
|
||||||
"token": token,
|
|
||||||
"method": "GET",
|
|
||||||
"url": CFG["api_url"] + '/data-delivery/client-applications',
|
|
||||||
"headers": {'Content-Type': 'application/json'},
|
|
||||||
"body": {}
|
|
||||||
}
|
|
||||||
return api_call
|
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.get
|
||||||
|
@pytest.mark.role
|
||||||
def test_datadelivery_role_get(request, role_get_call):
|
def test_datadelivery_role_get(request, role_get_call):
|
||||||
expected_response = get_expected_response(request.node.name)
|
expected_response = get_expected_response(request.node.name)
|
||||||
actual_response = api_client(role_get_call)
|
actual_response = api_client(role_get_call)
|
||||||
assert json.dumps(actual_response, indent=4) == json.dumps(expected_response, indent=4)
|
assert json.dumps(actual_response, indent=4) == json.dumps(expected_response, indent=4)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.get
|
||||||
|
@pytest.mark.redaction
|
||||||
def test_datadelivery_acms_redaction_get(request, acms_redaction_get_call):
|
def test_datadelivery_acms_redaction_get(request, acms_redaction_get_call):
|
||||||
expected_response = get_expected_response(request.node.name)
|
expected_response = get_expected_response(request.node.name)
|
||||||
actual_response = api_client(acms_redaction_get_call)
|
actual_response = api_client(acms_redaction_get_call)
|
||||||
assert json.dumps(actual_response, indent=4) == json.dumps(expected_response, indent=4)
|
assert json.dumps(actual_response, indent=4) == json.dumps(expected_response, indent=4)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.get
|
||||||
|
@pytest.mark.client_application
|
||||||
def test_datadelivery_client_applications_get(request, client_applications_get_call):
|
def test_datadelivery_client_applications_get(request, client_applications_get_call):
|
||||||
expected_response = get_expected_response(request.node.name)
|
expected_response = get_expected_response(request.node.name)
|
||||||
actual_response = api_client(client_applications_get_call)
|
actual_response = api_client(client_applications_get_call)
|
||||||
assert json.dumps(actual_response, indent=4) == json.dumps(expected_response, indent=4)
|
assert json.dumps(actual_response, indent=4) == json.dumps(expected_response, indent=4)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.get
|
||||||
|
@pytest.mark.endpoint
|
||||||
|
def test_datadelivery_endpoint_get(request, endpoint_get_call):
|
||||||
|
expected_response = get_expected_response(request.node.name)
|
||||||
|
actual_response = api_client(endpoint_get_call)
|
||||||
|
assert json.dumps(actual_response, indent=4) == json.dumps(expected_response, indent=4)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.get
|
||||||
|
@pytest.mark.redaction
|
||||||
|
def test_datadelivery_redaction_type_get(request, redaction_type_get_call):
|
||||||
|
expected_response = get_expected_response(request.node.name)
|
||||||
|
actual_response = api_client(redaction_type_get_call)
|
||||||
|
assert json.dumps(actual_response, indent=4) == json.dumps(expected_response, indent=4)
|
Loading…
Reference in New Issue
Block a user