@@ -102,7 +102,18 @@ steps:
102102 commands :
103103 - pytest -v -s core
104104
105- - label : Entrypoints Test (LLM) # 30min
105+ - label : Entrypoints Unit Tests # 5min
106+ timeout_in_minutes : 10
107+ working_dir : " /vllm-workspace/tests"
108+ fast_check : true
109+ source_file_dependencies :
110+ - vllm/entrypoints
111+ - tests/entrypoints/
112+ commands :
113+ - pytest -v -s entrypoints/openai/tool_parsers
114+ - pytest -v -s entrypoints/ --ignore=entrypoints/llm --ignore=entrypoints/openai --ignore=entrypoints/offline_mode --ignore=entrypoints/test_chat_utils.py
115+
116+ - label : Entrypoints Integration Test (LLM) # 30min
106117 timeout_in_minutes : 40
107118 mirror_hardwares : [amdexperimental]
108119 working_dir : " /vllm-workspace/tests"
@@ -119,7 +130,7 @@ steps:
119130 - pytest -v -s entrypoints/llm/test_generate.py # it needs a clean process
120131 - VLLM_USE_V1=0 pytest -v -s entrypoints/offline_mode # Needs to avoid interference with other tests
121132
122- - label : Entrypoints Test (API Server) # 100min
133+ - label : Entrypoints Integration Test (API Server) # 100min
123134 timeout_in_minutes : 130
124135 mirror_hardwares : [amdexperimental]
125136 working_dir : " /vllm-workspace/tests"
@@ -132,7 +143,7 @@ steps:
132143 commands :
133144 - export VLLM_WORKER_MULTIPROC_METHOD=spawn
134145 - PYTHONPATH=/vllm-workspace pytest -v -s entrypoints/openai/test_collective_rpc.py # PYTHONPATH is needed to import custom Worker extension
135- - pytest -v -s entrypoints/openai --ignore=entrypoints/openai/test_chat_with_tool_reasoning.py --ignore=entrypoints/openai/test_oot_registration.py --ignore=entrypoints/openai/test_tensorizer_entrypoint.py --ignore=entrypoints/openai/correctness/ --ignore=entrypoints/openai/test_collective_rpc.py
146+ - pytest -v -s entrypoints/openai --ignore=entrypoints/openai/test_chat_with_tool_reasoning.py --ignore=entrypoints/openai/test_oot_registration.py --ignore=entrypoints/openai/test_tensorizer_entrypoint.py --ignore=entrypoints/openai/correctness/ --ignore=entrypoints/openai/test_collective_rpc.py --ignore=entrypoints/openai/tool_parsers/
136147 - pytest -v -s entrypoints/test_chat_utils.py
137148
138149- label : Distributed Tests (4 GPUs) # 35min
@@ -823,7 +834,7 @@ steps:
823834 # begin io_processor plugins test, all the code in between uses the prithvi_io_processor plugin
824835 - pip install -e ./plugins/prithvi_io_processor_plugin
825836 - pytest -v -s plugins_tests/test_io_processor_plugins.py
826- - pip uninstall prithvi_io_processor_plugin -y
837+ - pip uninstall prithvi_io_processor_plugin -y
827838 # end io_processor plugins test
828839 # other tests continue here:
829840 - pytest -v -s plugins_tests/test_scheduler_plugins.py
@@ -871,7 +882,7 @@ steps:
871882 timeout_in_minutes : 45
872883 mirror_hardwares : [amdexperimental]
873884 working_dir : " /vllm-workspace/tests"
874- num_gpus : 2
885+ num_gpus : 2
875886 optional : true
876887 source_file_dependencies :
877888 - vllm/
0 commit comments