Skip to content

Commit dee4354

Browse files
authored
Cleanup (#2264)
1 parent eda4b96 commit dee4354

File tree

2 files changed

+8
-72
lines changed

2 files changed

+8
-72
lines changed

test/e2e/e2e/tests.py

Lines changed: 8 additions & 29 deletions
Original file line numberDiff line numberDiff line change
@@ -41,7 +41,6 @@
4141
wait_on_futures,
4242
endpoint_ready,
4343
request_prediction,
44-
generate_grpc,
4544
job_done,
4645
jobs_done,
4746
request_batch_prediction,
@@ -88,36 +87,16 @@ def test_realtime_api(
8887
client=client, api_names=[api_name], timeout=timeout
8988
), f"apis {api_name} not ready"
9089

91-
if not expectations or "grpc" not in expectations:
92-
with open(str(api_dir / "sample.json")) as f:
93-
payload = json.load(f)
94-
response = request_prediction(client, api_name, payload, extra_path)
95-
96-
assert (
97-
response.status_code == HTTPStatus.OK
98-
), f"status code: got {response.status_code}, expected {HTTPStatus.OK}"
90+
with open(str(api_dir / "sample.json")) as f:
91+
payload = json.load(f)
92+
response = request_prediction(client, api_name, payload, extra_path)
9993

100-
if expectations and "response" in expectations:
101-
assert_response_expectations(response, expectations["response"])
94+
assert (
95+
response.status_code == HTTPStatus.OK
96+
), f"status code: got {response.status_code}, expected {HTTPStatus.OK}"
10297

103-
if expectations and "grpc" in expectations:
104-
stub, input_sample, output_values, output_type, is_output_stream = generate_grpc(
105-
client, api_name, api_dir, expectations["grpc"]
106-
)
107-
if is_output_stream:
108-
for response, output_val in zip(stub.Predict(input_sample), output_values):
109-
assert (
110-
type(response) == output_type
111-
), f"didn't receive response of type {str(output_type)}, but received {str(type(response))}"
112-
assert response == output_val, f"received {response} instead of {output_val}"
113-
else:
114-
response = stub.Predict(input_sample)
115-
assert (
116-
type(stub.Predict(input_sample)) == output_type
117-
), f"didn't receive response of type {str(output_type)}, but received {str(type(response))}"
118-
assert (
119-
response == output_values[0]
120-
), f"received {response} instead of {output_values[0]}"
98+
if expectations and "response" in expectations:
99+
assert_response_expectations(response, expectations["response"])
121100
except:
122101
# best effort
123102
try:

test/e2e/e2e/utils.py

Lines changed: 0 additions & 43 deletions
Original file line numberDiff line numberDiff line change
@@ -12,18 +12,14 @@
1212
# See the License for the specific language governing permissions and
1313
# limitations under the License.
1414

15-
import importlib
16-
import pathlib
1715
import os
18-
import sys
1916
import threading as td
2017
import time
2118
from concurrent import futures
2219
from http import HTTPStatus
2320
from typing import Any, List, Optional, Tuple, Union, Dict, Callable
2421

2522
import cortex as cx
26-
import grpc
2723
import requests
2824
import yaml
2925

@@ -130,45 +126,6 @@ def _runnable(job_id):
130126
return True
131127

132128

133-
def generate_grpc(
134-
client: cx.Client, api_name: str, api_dir: pathlib.Path, config: Dict[str, Any]
135-
) -> Tuple[Any, Any, List, Any, bool]:
136-
api_info = client.get_api(api_name)
137-
138-
test_proto_dir = pathlib.Path(config["proto_module_pb2"]).parent
139-
sys.path.append(str(api_dir / test_proto_dir))
140-
proto_module_pb2 = importlib.import_module(str(pathlib.Path(config["proto_module_pb2"]).stem))
141-
proto_module_pb2_grpc = importlib.import_module(
142-
str(pathlib.Path(config["proto_module_pb2_grpc"]).stem)
143-
)
144-
sys.path.pop()
145-
146-
endpoint = api_info["endpoint"] + ":" + str(api_info["grpc_ports"]["insecure"])
147-
channel = grpc.insecure_channel(endpoint)
148-
stub = getattr(proto_module_pb2_grpc, config["stub_service_name"] + "Stub")(channel)
149-
150-
input_sample = getattr(proto_module_pb2, config["input_spec"]["class_name"])()
151-
for k, v in config["input_spec"]["input"].items():
152-
setattr(input_sample, k, v)
153-
154-
SampleClass = getattr(proto_module_pb2, config["output_spec"]["class_name"])
155-
output_values = []
156-
is_output_stream = config["output_spec"]["stream"]
157-
if is_output_stream:
158-
for entry in config["output_spec"]["output"]:
159-
output_val = SampleClass()
160-
for k, v in entry.items():
161-
setattr(output_val, k, v)
162-
output_values.append(output_val)
163-
else:
164-
output_val = SampleClass()
165-
for k, v in config["output_spec"]["output"].items():
166-
setattr(output_val, k, v)
167-
output_values.append(output_val)
168-
169-
return stub, input_sample, output_values, SampleClass, is_output_stream
170-
171-
172129
def request_prediction(
173130
client: cx.Client,
174131
api_name: str,

0 commit comments

Comments
 (0)