|
12 | 12 | # See the License for the specific language governing permissions and
|
13 | 13 | # limitations under the License.
|
14 | 14 |
|
15 |
| -import importlib |
16 |
| -import pathlib |
17 | 15 | import os
|
18 |
| -import sys |
19 | 16 | import threading as td
|
20 | 17 | import time
|
21 | 18 | from concurrent import futures
|
22 | 19 | from http import HTTPStatus
|
23 | 20 | from typing import Any, List, Optional, Tuple, Union, Dict, Callable
|
24 | 21 |
|
25 | 22 | import cortex as cx
|
26 |
| -import grpc |
27 | 23 | import requests
|
28 | 24 | import yaml
|
29 | 25 |
|
@@ -130,45 +126,6 @@ def _runnable(job_id):
|
130 | 126 | return True
|
131 | 127 |
|
132 | 128 |
|
133 |
| -def generate_grpc( |
134 |
| - client: cx.Client, api_name: str, api_dir: pathlib.Path, config: Dict[str, Any] |
135 |
| -) -> Tuple[Any, Any, List, Any, bool]: |
136 |
| - api_info = client.get_api(api_name) |
137 |
| - |
138 |
| - test_proto_dir = pathlib.Path(config["proto_module_pb2"]).parent |
139 |
| - sys.path.append(str(api_dir / test_proto_dir)) |
140 |
| - proto_module_pb2 = importlib.import_module(str(pathlib.Path(config["proto_module_pb2"]).stem)) |
141 |
| - proto_module_pb2_grpc = importlib.import_module( |
142 |
| - str(pathlib.Path(config["proto_module_pb2_grpc"]).stem) |
143 |
| - ) |
144 |
| - sys.path.pop() |
145 |
| - |
146 |
| - endpoint = api_info["endpoint"] + ":" + str(api_info["grpc_ports"]["insecure"]) |
147 |
| - channel = grpc.insecure_channel(endpoint) |
148 |
| - stub = getattr(proto_module_pb2_grpc, config["stub_service_name"] + "Stub")(channel) |
149 |
| - |
150 |
| - input_sample = getattr(proto_module_pb2, config["input_spec"]["class_name"])() |
151 |
| - for k, v in config["input_spec"]["input"].items(): |
152 |
| - setattr(input_sample, k, v) |
153 |
| - |
154 |
| - SampleClass = getattr(proto_module_pb2, config["output_spec"]["class_name"]) |
155 |
| - output_values = [] |
156 |
| - is_output_stream = config["output_spec"]["stream"] |
157 |
| - if is_output_stream: |
158 |
| - for entry in config["output_spec"]["output"]: |
159 |
| - output_val = SampleClass() |
160 |
| - for k, v in entry.items(): |
161 |
| - setattr(output_val, k, v) |
162 |
| - output_values.append(output_val) |
163 |
| - else: |
164 |
| - output_val = SampleClass() |
165 |
| - for k, v in config["output_spec"]["output"].items(): |
166 |
| - setattr(output_val, k, v) |
167 |
| - output_values.append(output_val) |
168 |
| - |
169 |
| - return stub, input_sample, output_values, SampleClass, is_output_stream |
170 |
| - |
171 |
| - |
172 | 129 | def request_prediction(
|
173 | 130 | client: cx.Client,
|
174 | 131 | api_name: str,
|
|
0 commit comments