Skip to content

Ability to execute in parallel (GDB) #29

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 1 commit into from
Sep 16, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
103 changes: 75 additions & 28 deletions debuggers/gdb/gdb_mi_driver.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
import os
import json

from debuggers.gdb.idd_gdb_controller import IDDGdbController
from debuggers.gdb.idd_gdb_controller import create_IDDGdbController, terminate_all_IDDGdbController
from driver import Driver

from debuggers.gdb.utils import parse_gdb_line
Expand All @@ -17,8 +17,8 @@ class GDBMiDebugger(Driver):
gdb_instances = None

def __init__(self, base_args, base_script_file_path, regression_args, regression_script_file_path):
self.base_gdb_instance = IDDGdbController(base_script_file_path)
self.regressed_gdb_instance = IDDGdbController(regression_script_file_path)
self.base_gdb_instance = create_IDDGdbController(base_script_file_path)
self.regressed_gdb_instance = create_IDDGdbController(regression_script_file_path)

self.gdb_instances = { 'base': self.base_gdb_instance, 'regressed': self.regressed_gdb_instance }

Expand All @@ -29,57 +29,100 @@ def __init__(self, base_args, base_script_file_path, regression_args, regression
self.run_parallel_raw_command("source " + os.path.join(dirname, "gdb_commands.py"))

def run_parallel_command(self, command):
base_response = self.run_single_command(command, "base")
regressed_response = self.run_single_command(command, "regressed")
# start both execution in parallel
self.base_gdb_instance.send(((" {command}\n".format(command = command),), {"timeout_sec": 60}))
self.regressed_gdb_instance.send(((" {command}\n".format(command = command),), {"timeout_sec": 60}))

# wait till base is done
raw_result = self.base_gdb_instance.recv()

# make sure all output is flushed
# time.sleep(.005)
self.base_gdb_instance.send((("",), {"timeout_sec": 60}))
raw_result += self.base_gdb_instance.recv()
Comment on lines +39 to +42
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Do we need this?
I observed that we do not have this in the run_single_special_command function.

Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I think this is a remaining of the previous efforts to capture GDB response where we listened for stdout output. I think this is no longer needed since we are using the other approach based on the IoManager.


# parse output (base)
base_response = self.parse_command_output(raw_result)

# wait till regression is done
raw_result = self.regressed_gdb_instance.recv()

# make sure all output is flushed
# time.sleep(.005)
self.regressed_gdb_instance.send((("",), {"timeout_sec": 60}))
raw_result += self.regressed_gdb_instance.recv()

# parse output regression
regressed_response = self.parse_command_output(raw_result)

return { "base": base_response, "regressed": regressed_response }

def parse_command_output(self, raw_result):
response = []
for item in raw_result:
if item['type'] == 'console':
input_string = str(item['payload'])
processed_output = parse_gdb_line(input_string)
response.append(processed_output)
return response

def run_single_command(self, command, version):
global base_response
global regressed_response

result = []
raw_result = self.gdb_instances[version].write(" {command}\n".format(command = command), 2)
self.gdb_instances[version].send(((" {command}\n".format(command = command),), {"timeout_sec": 60}))
raw_result = self.gdb_instances[version].recv()

# make sure all output is flushed
# time.sleep(.005)
flushed_results = self.gdb_instances[version].write("".format(command = command))
raw_result = raw_result + flushed_results

for item in raw_result:
if item['type'] == 'console':
input_string = str(item['payload'])
processed_output = parse_gdb_line(input_string)

result.append(processed_output)
self.gdb_instances[version].send((("",), {"timeout_sec": 60}))
raw_result += self.gdb_instances[version].recv()

return result
return self.parse_command_output(raw_result)

def run_single_special_command(self, command, version):
global base_response
global regressed_response

raw_result = self.gdb_instances[version].write(" {command}\n".format(command = command), 2)
flushed_results = self.gdb_instances[version].write("".format(command = command))
raw_result = raw_result + flushed_results
self.gdb_instances[version].send(((" {command}\n".format(command = command),), {"timeout_sec": 60}))
raw_result = self.gdb_instances[version].recv()

# flush output
self.gdb_instances[version].send((("",), {"timeout_sec": 60}))
raw_result += self.gdb_instances[version].recv()

return self.parse_special_command_output(raw_result)

def parse_special_command_output(self, raw_result):
for item in raw_result:
if item['type'] == 'console':
input_string = str(item['payload'])
processed_output = parse_gdb_line(input_string)
parsed_dict = json.loads(processed_output)
try:
parsed_dict = json.loads(processed_output)
except json.JSONDecodeError:
parsed_dict = processed_output
Comment on lines +101 to +104
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This change is related to the JSONDecodeError we were talking about.
Without this change, I am getting the JSON error.

Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Maybe there are certain cases where the processed_output is not in the correct format and json.loads fails to parse it.


if parsed_dict:
return parsed_dict

def get_state(self, version=None):
if version is None:
base_state = self.run_single_special_command('pstate', 'base')
regression_state = self.run_single_special_command('pstate', 'regressed')
if version is not None:
return self.run_single_special_command("pstate", version)

# get base and regression state
self.base_gdb_instance.send(((" {command}\n".format(command = "pstate"),), {"timeout_sec": 60}))
self.regressed_gdb_instance.send(((" {command}\n".format(command = "pstate"),), {"timeout_sec": 60}))

return { "base" : base_state, "regressed" : regression_state }
# wait till base is done
raw_result = self.base_gdb_instance.recv()
base_state = self.parse_special_command_output(raw_result)

# wait till regression is done
raw_result = self.regressed_gdb_instance.recv()
regression_state = self.parse_special_command_output(raw_result)

return self.run_single_special_command("pstate", version)
return { "base" : base_state, "regressed" : regression_state }

def get_current_stack_frames(self, state):
base_stack_frame = state['base']['stack_frame']
Expand Down Expand Up @@ -119,9 +162,13 @@ def run_parallel_raw_command(self, command):

def run_single_raw_command(self, command, version):
result = []
raw_result = self.gdb_instances[version].write("{command}\n".format(command = command))
self.gdb_instances[version].send((("{command}\n".format(command = command),), {"timeout_sec": 60}))
raw_result = self.gdb_instances[version].recv()

for item in raw_result:
result.append(str(item))

return result

def terminate(self):
terminate_all_IDDGdbController()
42 changes: 41 additions & 1 deletion debuggers/gdb/idd_gdb_controller.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,10 @@
DEFAULT_TIME_TO_CHECK_FOR_ADDITIONAL_OUTPUT_SEC,
)

from multiprocessing import Process, Pipe

processes = []

DEFAULT_GDB_LAUNCH_COMMAND = ["gdb", "--nx", "--quiet", "--interpreter=mi3"]
logger = logging.getLogger(__name__)

Expand Down Expand Up @@ -48,4 +52,40 @@ def spawn_new_gdb_subprocess(self) -> int:
self.gdb_process.stderr,
self.time_to_check_for_additional_output_sec,
)
return self.gdb_process.pid
return self.gdb_process.pid


class IDDParallelTerminate:
pass


class IDDParallelGdbController:
def __init__(self, *args, **kwargs):
self.args = args
self.kwargs = kwargs

def run(self, pipe):
gdb = IDDGdbController(*self.args, **self.kwargs)
while True:
args, kwargs = pipe.recv()
if isinstance(args, IDDParallelTerminate) or isinstance(kwargs, IDDParallelTerminate):
return
res = gdb.write(*args, **kwargs)
pipe.send(res)


def create_IDDGdbController(*args, **kwargs):
global processes

gdb = IDDParallelGdbController(*args, **kwargs)
parent_conn, child_conn = Pipe()
process = Process(target=gdb.run, args=(child_conn,))
processes.append((process, parent_conn))
process.start()
return parent_conn

def terminate_all_IDDGdbController():
for _, pipe in processes:
pipe.send((IDDParallelTerminate(), IDDParallelTerminate()))
for process, _ in processes:
process.join()
3 changes: 3 additions & 0 deletions debuggers/lldb/lldb_driver.py
Original file line number Diff line number Diff line change
Expand Up @@ -162,3 +162,6 @@ def get_current_calls(self):
regression_calls = get_call_instructions(regression_target)

return { "base" : base_calls, "regressed" : regression_calls }

def terminate(self):
pass
3 changes: 3 additions & 0 deletions driver.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,3 +7,6 @@ def run_single_command(self, command, target): raise NotImplementedError

@abstractmethod
def run_parallel_command(self, command): raise NotImplementedError

@abstractmethod
def terminate(self): raise NotImplementedError
4 changes: 4 additions & 0 deletions idd.py
Original file line number Diff line number Diff line change
Expand Up @@ -272,6 +272,10 @@ def compose(self) -> ComposeResult:
async def execute_debugger_command(self, event: Input.Changed) -> None:
# Updating the UI to show the reasons why validation failed
if event.control.id == 'parallel-command-bar':
if self.parallel_command_bar.value == "quit" or \
self.parallel_command_bar.value == "exit":
Debugger.terminate()
exit(0)
if self.parallel_command_bar.value != "":
result = Debugger.run_parallel_command(self.parallel_command_bar.value)

Expand Down