Skip to content

Commit d4ee4fa

Browse files
authored
Async proxy pool, Event manager, Custom access log, Expose loop to plugins (#645)
* Async proxy pool * Async proxy pool * Late upstream initialization and exception guards * Close upstream proxy connection on client connection close * Refactor into EventManager * Fix tests accounting in the event manager * Ensure each process initializes logger * pragma no cover * Teardown connection when proxy pool upstream proxy closes * Add ability to customize access log format and add additional context to it * Maintain total size for response bytes in access logs * Fix tests broken due to new plugin methods missing mock * Update pubsub_eventing to use EventManager to avoid entire bootstrapping step
1 parent 567d616 commit d4ee4fa

23 files changed

+518
-262
lines changed

examples/pubsub_eventing.py

Lines changed: 10 additions & 21 deletions
Original file line numberDiff line numberDiff line change
@@ -9,22 +9,17 @@
99
:license: BSD, see LICENSE for more details.
1010
"""
1111
import time
12-
import threading
1312
import multiprocessing
1413
import logging
1514

1615
from typing import Dict, Any
1716

18-
from proxy.core.event import EventQueue, EventSubscriber, EventDispatcher, eventNames
17+
from proxy.core.event import EventQueue, EventSubscriber, eventNames
18+
from proxy.core.event.manager import EventManager
1919

2020
# Enable debug logging to view core event logs
2121
logging.basicConfig(level=logging.DEBUG)
2222

23-
# Eventing requires a multiprocess safe queue
24-
# so that events can be safely published and received
25-
# between processes.
26-
manager = multiprocessing.Manager()
27-
2823
main_publisher_request_id = '1234'
2924
process_publisher_request_id = '12345'
3025
num_events_received = [0, 0]
@@ -59,17 +54,13 @@ def on_event(payload: Dict[str, Any]) -> None:
5954
if __name__ == '__main__':
6055
start_time = time.time()
6156

62-
# Start dispatcher thread
63-
dispatcher_queue = EventQueue(manager.Queue())
64-
dispatcher_shutdown_event = threading.Event()
65-
dispatcher = EventDispatcher(
66-
shutdown=dispatcher_shutdown_event,
67-
event_queue=dispatcher_queue)
68-
dispatcher_thread = threading.Thread(target=dispatcher.run)
69-
dispatcher_thread.start()
57+
# Start dispatcher thread using EventManager
58+
event_manager = EventManager()
59+
event_manager.start_event_dispatcher()
60+
assert event_manager.event_queue
7061

7162
# Create a subscriber
72-
subscriber = EventSubscriber(dispatcher_queue)
63+
subscriber = EventSubscriber(event_manager.event_queue)
7364
# Internally, subscribe will start a separate thread
7465
# to receive incoming published messages
7566
subscriber.subscribe(on_event)
@@ -79,13 +70,13 @@ def on_event(payload: Dict[str, Any]) -> None:
7970
publisher_shutdown_event = multiprocessing.Event()
8071
publisher = multiprocessing.Process(
8172
target=publisher_process, args=(
82-
publisher_shutdown_event, dispatcher_queue, ))
73+
publisher_shutdown_event, event_manager.event_queue, ))
8374
publisher.start()
8475

8576
try:
8677
while True:
8778
# Dispatch event from main process
88-
dispatcher_queue.publish(
79+
event_manager.event_queue.publish(
8980
request_id=main_publisher_request_id,
9081
event_name=eventNames.WORK_STARTED,
9182
event_payload={'time': time.time()},
@@ -100,8 +91,6 @@ def on_event(payload: Dict[str, Any]) -> None:
10091
# Stop subscriber thread
10192
subscriber.unsubscribe()
10293
# Signal dispatcher to shutdown
103-
dispatcher_shutdown_event.set()
104-
# Wait for dispatcher shutdown
105-
dispatcher_thread.join()
94+
event_manager.stop_event_dispatcher()
10695
print('Received {0} events from main thread, {1} events from another process, in {2} seconds'.format(
10796
num_events_received[0], num_events_received[1], time.time() - start_time))

proxy/common/constants.py

Lines changed: 8 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -60,8 +60,15 @@
6060
DEFAULT_IPV6_HOSTNAME = ipaddress.IPv6Address('::1')
6161
DEFAULT_KEY_FILE = None
6262
DEFAULT_LOG_FILE = None
63-
DEFAULT_LOG_FORMAT = '%(asctime)s - pid:%(process)d [%(levelname)-.1s] %(funcName)s:%(lineno)d - %(message)s'
63+
DEFAULT_LOG_FORMAT = '%(asctime)s - pid:%(process)d [%(levelname)-.1s] %(filename)s:%(funcName)s:%(lineno)d - %(message)s'
6464
DEFAULT_LOG_LEVEL = 'INFO'
65+
DEFAULT_HTTP_ACCESS_LOG_FORMAT = '{client_ip}:{client_port} - ' + \
66+
'{request_method} {server_host}:{server_port}{request_path} - ' + \
67+
'{response_code} {response_reason} - {response_bytes} bytes - ' + \
68+
'{connection_time_ms} ms'
69+
DEFAULT_HTTPS_ACCESS_LOG_FORMAT = '{client_ip}:{client_port} - ' + \
70+
'{request_method} {server_host}:{server_port} - ' + \
71+
'{response_bytes} bytes - {connection_time_ms} ms'
6572
DEFAULT_NUM_WORKERS = 0
6673
DEFAULT_OPEN_FILE_LIMIT = 1024
6774
DEFAULT_PAC_FILE = None

proxy/common/utils.py

Lines changed: 30 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -9,15 +9,17 @@
99
:license: BSD, see LICENSE for more details.
1010
"""
1111
import ssl
12-
import contextlib
12+
import socket
13+
import logging
1314
import functools
1415
import ipaddress
15-
import socket
16+
import contextlib
1617

1718
from types import TracebackType
1819
from typing import Optional, Dict, Any, List, Tuple, Type, Callable
1920

2021
from .constants import HTTP_1_1, COLON, WHITESPACE, CRLF, DEFAULT_TIMEOUT
22+
from .constants import DEFAULT_LOG_FILE, DEFAULT_LOG_FORMAT, DEFAULT_LOG_LEVEL
2123

2224

2325
def text_(s: Any, encoding: str = 'utf-8', errors: str = 'strict') -> Any:
@@ -89,14 +91,14 @@ def build_http_pkt(line: List[bytes],
8991
headers: Optional[Dict[bytes, bytes]] = None,
9092
body: Optional[bytes] = None) -> bytes:
9193
"""Build and returns a HTTP request or response packet."""
92-
req = WHITESPACE.join(line) + CRLF
94+
pkt = WHITESPACE.join(line) + CRLF
9395
if headers is not None:
9496
for k in headers:
95-
req += build_http_header(k, headers[k]) + CRLF
96-
req += CRLF
97+
pkt += build_http_header(k, headers[k]) + CRLF
98+
pkt += CRLF
9799
if body:
98-
req += body
99-
return req
100+
pkt += body
101+
return pkt
100102

101103

102104
def build_websocket_handshake_request(
@@ -226,3 +228,24 @@ def get_available_port() -> int:
226228
sock.bind(('', 0))
227229
_, port = sock.getsockname()
228230
return int(port)
231+
232+
233+
def setup_logger(
234+
log_file: Optional[str] = DEFAULT_LOG_FILE,
235+
log_level: str = DEFAULT_LOG_LEVEL,
236+
log_format: str = DEFAULT_LOG_FORMAT) -> None:
237+
ll = getattr(
238+
logging,
239+
{'D': 'DEBUG',
240+
'I': 'INFO',
241+
'W': 'WARNING',
242+
'E': 'ERROR',
243+
'C': 'CRITICAL'}[log_level.upper()[0]])
244+
if log_file:
245+
logging.basicConfig(
246+
filename=log_file,
247+
filemode='a',
248+
level=ll,
249+
format=log_format)
250+
else:
251+
logging.basicConfig(level=ll, format=log_format)

proxy/core/acceptor/acceptor.py

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -27,6 +27,7 @@
2727
from ..event import EventQueue, eventNames
2828
from ...common.constants import DEFAULT_THREADLESS
2929
from ...common.flag import flags
30+
from ...common.utils import setup_logger
3031

3132
logger = logging.getLogger(__name__)
3233

@@ -133,6 +134,8 @@ def run_once(self) -> None:
133134
self.start_work(conn, addr)
134135

135136
def run(self) -> None:
137+
setup_logger(self.flags.log_file, self.flags.log_level,
138+
self.flags.log_format)
136139
self.selector = selectors.DefaultSelector()
137140
fileno = recv_handle(self.work_queue)
138141
self.work_queue.close()

proxy/core/acceptor/pool.py

Lines changed: 6 additions & 54 deletions
Original file line numberDiff line numberDiff line change
@@ -12,18 +12,18 @@
1212
import logging
1313
import multiprocessing
1414
import socket
15-
import threading
15+
1616
from multiprocessing import connection
1717
from multiprocessing.reduction import send_handle
1818
from typing import List, Optional, Type
1919

2020
from .acceptor import Acceptor
2121
from .work import Work
2222

23-
from ..event import EventQueue, EventDispatcher
23+
from ..event import EventQueue
24+
2425
from ...common.flag import flags
25-
from ...common.constants import DEFAULT_BACKLOG, DEFAULT_ENABLE_EVENTS
26-
from ...common.constants import DEFAULT_IPV6_HOSTNAME, DEFAULT_NUM_WORKERS, DEFAULT_PORT
26+
from ...common.constants import DEFAULT_BACKLOG, DEFAULT_IPV6_HOSTNAME, DEFAULT_NUM_WORKERS, DEFAULT_PORT
2727

2828
logger = logging.getLogger(__name__)
2929

@@ -37,14 +37,6 @@
3737
default=DEFAULT_BACKLOG,
3838
help='Default: 100. Maximum number of pending connections to proxy server')
3939

40-
flags.add_argument(
41-
'--enable-events',
42-
action='store_true',
43-
default=DEFAULT_ENABLE_EVENTS,
44-
help='Default: False. Enables core to dispatch lifecycle events. '
45-
'Plugins can be used to subscribe for core events.'
46-
)
47-
4840
flags.add_argument(
4941
'--hostname',
5042
type=str,
@@ -79,31 +71,16 @@ class AcceptorPool:
7971
pool.shutdown()
8072
8173
`work_klass` must implement `work.Work` class.
82-
83-
Optionally, AcceptorPool also initialize a global event queue.
84-
It is a multiprocess safe queue which can be used to build pubsub patterns
85-
for message sharing or signaling.
86-
87-
TODO(abhinavsingh): Decouple event queue setup & teardown into its own class.
8874
"""
8975

9076
def __init__(self, flags: argparse.Namespace,
91-
work_klass: Type[Work]) -> None:
77+
work_klass: Type[Work], event_queue: Optional[EventQueue] = None) -> None:
9278
self.flags = flags
9379
self.socket: Optional[socket.socket] = None
9480
self.acceptors: List[Acceptor] = []
9581
self.work_queues: List[connection.Connection] = []
9682
self.work_klass = work_klass
97-
98-
self.event_queue: Optional[EventQueue] = None
99-
self.event_dispatcher: Optional[EventDispatcher] = None
100-
self.event_dispatcher_thread: Optional[threading.Thread] = None
101-
self.event_dispatcher_shutdown: Optional[threading.Event] = None
102-
self.manager: Optional[multiprocessing.managers.SyncManager] = None
103-
104-
if self.flags.enable_events:
105-
self.manager = multiprocessing.Manager()
106-
self.event_queue = EventQueue(self.manager.Queue())
83+
self.event_queue: Optional[EventQueue] = event_queue
10784

10885
def listen(self) -> None:
10986
self.socket = socket.socket(self.flags.family, socket.SOCK_STREAM)
@@ -137,42 +114,17 @@ def start_workers(self) -> None:
137114
self.work_queues.append(work_queue[0])
138115
logger.info('Started %d workers' % self.flags.num_workers)
139116

140-
def start_event_dispatcher(self) -> None:
141-
self.event_dispatcher_shutdown = threading.Event()
142-
assert self.event_dispatcher_shutdown
143-
assert self.event_queue
144-
self.event_dispatcher = EventDispatcher(
145-
shutdown=self.event_dispatcher_shutdown,
146-
event_queue=self.event_queue
147-
)
148-
self.event_dispatcher_thread = threading.Thread(
149-
target=self.event_dispatcher.run
150-
)
151-
self.event_dispatcher_thread.start()
152-
logger.debug('Thread ID: %d', self.event_dispatcher_thread.ident)
153-
154117
def shutdown(self) -> None:
155118
logger.info('Shutting down %d workers' % self.flags.num_workers)
156119
for acceptor in self.acceptors:
157120
acceptor.running.set()
158-
if self.flags.enable_events:
159-
assert self.event_dispatcher_shutdown
160-
assert self.event_dispatcher_thread
161-
self.event_dispatcher_shutdown.set()
162-
self.event_dispatcher_thread.join()
163-
logger.debug(
164-
'Shutdown of global event dispatcher thread %d successful',
165-
self.event_dispatcher_thread.ident)
166121
for acceptor in self.acceptors:
167122
acceptor.join()
168123
logger.debug('Acceptors shutdown')
169124

170125
def setup(self) -> None:
171126
"""Listen on port, setup workers and pass server socket to workers."""
172127
self.listen()
173-
if self.flags.enable_events:
174-
logger.info('Core Event enabled')
175-
self.start_event_dispatcher()
176128
self.start_workers()
177129
# Send server socket to all acceptor processes.
178130
assert self.socket is not None

proxy/core/acceptor/threadless.py

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -26,6 +26,7 @@
2626
from ..connection import TcpClientConnection
2727
from ..event import EventQueue, eventNames
2828

29+
from ...common.utils import setup_logger
2930
from ...common.types import Readables, Writables
3031
from ...common.constants import DEFAULT_TIMEOUT
3132

@@ -179,6 +180,8 @@ def run_once(self) -> None:
179180
self.cleanup_inactive()
180181

181182
def run(self) -> None:
183+
setup_logger(self.flags.log_file, self.flags.log_level,
184+
self.flags.log_format)
182185
try:
183186
self.selector = selectors.DefaultSelector()
184187
self.selector.register(self.client_queue, selectors.EVENT_READ)

proxy/core/connection/server.py

Lines changed: 6 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -10,9 +10,11 @@
1010
"""
1111
import ssl
1212
import socket
13+
1314
from typing import Optional, Union, Tuple
1415

1516
from .connection import TcpConnection, tcpConnectionTypes, TcpConnectionUninitializedException
17+
1618
from ...common.utils import new_socket_connection
1719

1820

@@ -23,6 +25,7 @@ def __init__(self, host: str, port: int):
2325
super().__init__(tcpConnectionTypes.SERVER)
2426
self._conn: Optional[Union[ssl.SSLSocket, socket.socket]] = None
2527
self.addr: Tuple[str, int] = (host, int(port))
28+
self.closed = True
2629

2730
@property
2831
def connection(self) -> Union[ssl.SSLSocket, socket.socket]:
@@ -31,9 +34,9 @@ def connection(self) -> Union[ssl.SSLSocket, socket.socket]:
3134
return self._conn
3235

3336
def connect(self) -> None:
34-
if self._conn is not None:
35-
return
36-
self._conn = new_socket_connection(self.addr)
37+
if self._conn is None:
38+
self._conn = new_socket_connection(self.addr)
39+
self.closed = False
3740

3841
def wrap(self, hostname: str, ca_file: Optional[str]) -> None:
3942
ctx = ssl.create_default_context(

proxy/core/event/__init__.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -12,11 +12,13 @@
1212
from .names import EventNames, eventNames
1313
from .dispatcher import EventDispatcher
1414
from .subscriber import EventSubscriber
15+
from .manager import EventManager
1516

1617
__all__ = [
1718
'eventNames',
1819
'EventNames',
1920
'EventQueue',
2021
'EventDispatcher',
2122
'EventSubscriber',
23+
'EventManager',
2224
]

proxy/core/event/dispatcher.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -35,7 +35,7 @@ class EventDispatcher:
3535
module is not-recommended. Python native multiprocessing queue
3636
doesn't provide a fanout functionality which core dispatcher module
3737
implements so that several plugins can consume same published
38-
event at a time.
38+
event concurrently.
3939
4040
When --enable-events is used, a multiprocessing.Queue is created and
4141
attached to global argparse. This queue can then be used for

0 commit comments

Comments
 (0)