@@ -18,9 +18,6 @@ def clip_model_name() -> str:
1818 return "RN50"
1919
2020
21- @pytest .mark .skip (
22- reason = "Skipping clip reference model test because openai weights hosted on azure are not available"
23- )
2421@pytest .fixture (scope = "module" )
2522def baseline_clip_model (clip_model_name : str ):
2623 original_clip_dir = os .path .join (ASSETS_DIR , "original_clip" )
@@ -81,9 +78,6 @@ def clip_onnx_wrapper_small_batch(clip_model_name: str) -> AutoModel:
8178 return _get_clip_onnx_wrapper (clip_model_name = clip_model_name , max_batch_size = 2 )
8279
8380
84- @pytest .mark .skip (
85- reason = "Skipping clip reference model test because openai weights hosted on azure are not available"
86- )
8781def _test_clip_wrapper_vs_baseline_for_image_embeddings (
8882 clip_wrapper ,
8983 baseline_clip_model ,
@@ -110,9 +104,6 @@ def _test_clip_wrapper_vs_baseline_for_image_embeddings(
110104 assert similarity .item () > 0.99
111105
112106
113- @pytest .mark .skip (
114- reason = "Skipping clip reference model test because openai weights hosted on azure are not available"
115- )
116107@pytest .mark .e2e_model_inference
117108@pytest .mark .parametrize ("image_shape" , [(224 , 224 ), (320 , 240 ), (448 , 448 )])
118109def test_torch_clip_wrapper_vs_baseline_for_image_embeddings (
@@ -127,9 +118,6 @@ def test_torch_clip_wrapper_vs_baseline_for_image_embeddings(
127118 )
128119
129120
130- @pytest .mark .skip (
131- reason = "Skipping clip reference model test because openai weights hosted on azure are not available"
132- )
133121@pytest .mark .onnx_extras
134122@pytest .mark .e2e_model_inference
135123@pytest .mark .parametrize ("image_shape" , [(224 , 224 ), (320 , 240 ), (448 , 448 )])
@@ -145,9 +133,6 @@ def test_onnx_clip_wrapper_vs_baseline_for_image_embeddings(
145133 )
146134
147135
148- @pytest .mark .skip (
149- reason = "Skipping clip reference model test because openai weights hosted on azure are not available"
150- )
151136def _test_clip_wrapper_vs_baseline_for_text_embeddings (
152137 clip_wrapper ,
153138 baseline_clip_model ,
@@ -168,9 +153,6 @@ def _test_clip_wrapper_vs_baseline_for_text_embeddings(
168153 assert similarity .item () > 0.999
169154
170155
171- @pytest .mark .skip (
172- reason = "Skipping clip reference model test because openai weights hosted on azure are not available"
173- )
174156@pytest .mark .e2e_model_inference
175157def test_torch_clip_wrapper_vs_baseline_for_text_embeddings (
176158 clip_torch_wrapper : AutoModel ,
@@ -181,9 +163,6 @@ def test_torch_clip_wrapper_vs_baseline_for_text_embeddings(
181163 )
182164
183165
184- @pytest .mark .skip (
185- reason = "Skipping clip reference model test because openai weights hosted on azure are not available"
186- )
187166@pytest .mark .onnx_extras
188167@pytest .mark .e2e_model_inference
189168def test_onnx_clip_wrapper_vs_baseline_for_text_embeddings (
0 commit comments