Feat/oai endpoints mapper using JSON (#2929)
* feat: test mapper * chore: temporally add other runner * chore: temp remove ubuntu-18-04-openai-api-collection-test * chore: test json file * chore: test json file * Correct path endpoints_mapping.json * feat: running via endpoints * feat: running multiple endpoint * feat: use endpoint value from workflow dispatch * feat: add mapper between endpoint and python test file * feat: config run all * feat: config run all --------- Co-authored-by: Van-QA <van@jan.ai> Co-authored-by: Hien To <tominhhien97@gmail.com>
This commit is contained in:
parent
65b8d8e66b
commit
ae499c7ac4
21
.github/workflows/jan-openai-api-test.yml
vendored
21
.github/workflows/jan-openai-api-test.yml
vendored
@ -1,6 +1,13 @@
|
||||
name: Test - OpenAI API Pytest collection
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
endpoints:
|
||||
description: 'comma-separated list (see available at endpoints_mapping.json e.g. GET /users,POST /transform)'
|
||||
required: false
|
||||
default: all
|
||||
type: string
|
||||
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
@ -38,11 +45,11 @@ jobs:
|
||||
rm -rf ~/jan
|
||||
make clean
|
||||
|
||||
- name: install dependencies
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
npm install -g @stoplight/prism-cli
|
||||
|
||||
- name: create python virtual environment and run test
|
||||
- name: Create python virtual environment and run test
|
||||
run: |
|
||||
python3 -m venv /tmp/jan
|
||||
source /tmp/jan/bin/activate
|
||||
@ -65,10 +72,14 @@ jobs:
|
||||
|
||||
# Append to conftest.py
|
||||
cat ../docs/tests/conftest.py >> tests/conftest.py
|
||||
|
||||
cat ../docs/tests/endpoints_mapping.json >> tests/endpoints_mapping.json
|
||||
|
||||
# start mock server and run test then stop mock server
|
||||
prism mock ../docs/openapi/jan.yaml > prism.log & prism_pid=$! && pytest --reportportal --html=report.html && kill $prism_pid
|
||||
prism mock ../docs/openapi/jan.yaml > prism.log & prism_pid=$! &&
|
||||
pytest --endpoint "$ENDPOINTS" --reportportal --html=report.html && kill $prism_pid
|
||||
deactivate
|
||||
env:
|
||||
ENDPOINTS: ${{ github.event.inputs.endpoints }}
|
||||
|
||||
- name: Upload Artifact
|
||||
uses: actions/upload-artifact@v2
|
||||
@ -79,7 +90,7 @@ jobs:
|
||||
openai-python/assets
|
||||
openai-python/prism.log
|
||||
|
||||
- name: clean up
|
||||
- name: Clean up
|
||||
if: always()
|
||||
run: |
|
||||
rm -rf /tmp/jan
|
||||
|
||||
@ -1,6 +1,40 @@
|
||||
import json
|
||||
|
||||
|
||||
def pytest_addoption(parser):
|
||||
parser.addoption(
|
||||
"--endpoint", action="store", default="all", help="my option: endpoints"
|
||||
)
|
||||
|
||||
|
||||
def pytest_configure(config):
|
||||
config.addinivalue_line(
|
||||
"markers", "endpoint(endpoint): this mark select the test based on endpoint"
|
||||
)
|
||||
|
||||
|
||||
def pytest_runtest_setup(item):
|
||||
getoption = item.config.getoption("--endpoint").split(",")
|
||||
if getoption not in (["all"], [''], [""]):
|
||||
endpoint_names = [mark.args[0] for mark in item.iter_markers(name="endpoint")]
|
||||
if not endpoint_names or not set(getoption).intersection(set(endpoint_names)):
|
||||
pytest.skip("Test skipped because endpoint is {!r}".format(endpoint_names))
|
||||
|
||||
|
||||
def pytest_collection_modifyitems(items):
|
||||
# load the JSON file
|
||||
with open("tests/endpoints_mapping.json", "r") as json_file:
|
||||
endpoints_file_mapping = json.load(json_file)
|
||||
|
||||
# create a dictionary to map filenames to endpoints
|
||||
filename_to_endpoint = {}
|
||||
for endpoint, files in endpoints_file_mapping.items():
|
||||
for filename in files:
|
||||
filename_to_endpoint[filename] = endpoint
|
||||
|
||||
# add the markers based on the JSON file
|
||||
for item in items:
|
||||
# add the name of the file (without extension) as a marker
|
||||
filename = item.nodeid.split("::")[0].split("/")[-1].replace(".py", "")
|
||||
marker = pytest.mark.file(filename)
|
||||
item.add_marker(marker)
|
||||
# map the name of the file to endpoint, else use default value
|
||||
filename = item.fspath.basename
|
||||
marker = filename_to_endpoint.get(filename, filename)
|
||||
item.add_marker(pytest.mark.endpoint(marker, filename=filename))
|
||||
|
||||
75
docs/tests/endpoints_mapping.json
Normal file
75
docs/tests/endpoints_mapping.json
Normal file
@ -0,0 +1,75 @@
|
||||
{
|
||||
"/embeddings": [
|
||||
"test_embedding.py"
|
||||
],
|
||||
"/audio/translations": [
|
||||
"test_translations.py"
|
||||
],
|
||||
"/audio/transcriptions": [
|
||||
"test_transcriptions.py"
|
||||
],
|
||||
"/moderations": [
|
||||
"test_moderations.py"
|
||||
],
|
||||
"/images/generations": [
|
||||
"test_images.py"
|
||||
],
|
||||
"/batches": [
|
||||
"test_batches.py"
|
||||
],
|
||||
"/vector_stores": [
|
||||
"test_vector_stores.py"
|
||||
],
|
||||
"/fine_tuning/jobs": [
|
||||
"test_jobs.py",
|
||||
"test_checkpoints.py"
|
||||
],
|
||||
"/assistants": [
|
||||
"test_assistants.py"
|
||||
],
|
||||
"/threads/{thread_id}/runs": [
|
||||
"test_runs.py"
|
||||
],
|
||||
"/threads/{thread_id}/runs/{run_id}/steps": [
|
||||
"test_steps.py"
|
||||
],
|
||||
"/vector_stores/{vector_store_id}/file_batches": [
|
||||
"test_file_batches.py"
|
||||
],
|
||||
"/messages": [
|
||||
"test_messages.py"
|
||||
],
|
||||
"/vector_stores/{vector_store_id}/files": [
|
||||
"test_files.py"
|
||||
],
|
||||
"/chat/completions": [
|
||||
"test_completions.py"
|
||||
],
|
||||
"/threads": [
|
||||
"test_threads.py"
|
||||
],
|
||||
"/audio/speech": [
|
||||
"test_speech.py"
|
||||
],
|
||||
"/models": [
|
||||
"test_models.py"
|
||||
],
|
||||
"native_client_sdk_only": [
|
||||
"test_streaming.py"
|
||||
],
|
||||
"utils": [
|
||||
"test_response.py",
|
||||
"test_client.py",
|
||||
"test_extract_files.py",
|
||||
"test_typing.py",
|
||||
"test_legacy_response.py",
|
||||
"test_module_client.py",
|
||||
"test_old_api.py",
|
||||
"test_proxy.py",
|
||||
"test_qs.py",
|
||||
"test_required_args.py",
|
||||
"test_transform.py",
|
||||
"test_azure.py",
|
||||
"test_deepcopy.py"
|
||||
]
|
||||
}
|
||||
Loading…
x
Reference in New Issue
Block a user