-
Notifications
You must be signed in to change notification settings - Fork 6
Ability to execute in parallel (GDB) #29
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Changes from all commits
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,7 +1,7 @@ | ||
import os | ||
import json | ||
|
||
from debuggers.gdb.idd_gdb_controller import IDDGdbController | ||
from debuggers.gdb.idd_gdb_controller import create_IDDGdbController, terminate_all_IDDGdbController | ||
from driver import Driver | ||
|
||
from debuggers.gdb.utils import parse_gdb_line | ||
|
@@ -17,8 +17,8 @@ class GDBMiDebugger(Driver): | |
gdb_instances = None | ||
|
||
def __init__(self, base_args, base_script_file_path, regression_args, regression_script_file_path): | ||
self.base_gdb_instance = IDDGdbController(base_script_file_path) | ||
self.regressed_gdb_instance = IDDGdbController(regression_script_file_path) | ||
self.base_gdb_instance = create_IDDGdbController(base_script_file_path) | ||
self.regressed_gdb_instance = create_IDDGdbController(regression_script_file_path) | ||
|
||
self.gdb_instances = { 'base': self.base_gdb_instance, 'regressed': self.regressed_gdb_instance } | ||
|
||
|
@@ -29,57 +29,100 @@ def __init__(self, base_args, base_script_file_path, regression_args, regression | |
self.run_parallel_raw_command("source " + os.path.join(dirname, "gdb_commands.py")) | ||
|
||
def run_parallel_command(self, command): | ||
base_response = self.run_single_command(command, "base") | ||
regressed_response = self.run_single_command(command, "regressed") | ||
# start both execution in parallel | ||
self.base_gdb_instance.send(((" {command}\n".format(command = command),), {"timeout_sec": 60})) | ||
self.regressed_gdb_instance.send(((" {command}\n".format(command = command),), {"timeout_sec": 60})) | ||
|
||
# wait till base is done | ||
raw_result = self.base_gdb_instance.recv() | ||
|
||
# make sure all output is flushed | ||
# time.sleep(.005) | ||
self.base_gdb_instance.send((("",), {"timeout_sec": 60})) | ||
raw_result += self.base_gdb_instance.recv() | ||
|
||
# parse output (base) | ||
base_response = self.parse_command_output(raw_result) | ||
|
||
# wait till regression is done | ||
raw_result = self.regressed_gdb_instance.recv() | ||
|
||
# make sure all output is flushed | ||
# time.sleep(.005) | ||
self.regressed_gdb_instance.send((("",), {"timeout_sec": 60})) | ||
raw_result += self.regressed_gdb_instance.recv() | ||
|
||
# parse output regression | ||
regressed_response = self.parse_command_output(raw_result) | ||
|
||
return { "base": base_response, "regressed": regressed_response } | ||
|
||
def parse_command_output(self, raw_result): | ||
response = [] | ||
for item in raw_result: | ||
if item['type'] == 'console': | ||
input_string = str(item['payload']) | ||
processed_output = parse_gdb_line(input_string) | ||
response.append(processed_output) | ||
return response | ||
|
||
def run_single_command(self, command, version): | ||
global base_response | ||
global regressed_response | ||
|
||
result = [] | ||
raw_result = self.gdb_instances[version].write(" {command}\n".format(command = command), 2) | ||
self.gdb_instances[version].send(((" {command}\n".format(command = command),), {"timeout_sec": 60})) | ||
raw_result = self.gdb_instances[version].recv() | ||
|
||
# make sure all output is flushed | ||
# time.sleep(.005) | ||
flushed_results = self.gdb_instances[version].write("".format(command = command)) | ||
raw_result = raw_result + flushed_results | ||
|
||
for item in raw_result: | ||
if item['type'] == 'console': | ||
input_string = str(item['payload']) | ||
processed_output = parse_gdb_line(input_string) | ||
|
||
result.append(processed_output) | ||
self.gdb_instances[version].send((("",), {"timeout_sec": 60})) | ||
raw_result += self.gdb_instances[version].recv() | ||
|
||
return result | ||
return self.parse_command_output(raw_result) | ||
|
||
def run_single_special_command(self, command, version): | ||
global base_response | ||
global regressed_response | ||
|
||
raw_result = self.gdb_instances[version].write(" {command}\n".format(command = command), 2) | ||
flushed_results = self.gdb_instances[version].write("".format(command = command)) | ||
raw_result = raw_result + flushed_results | ||
self.gdb_instances[version].send(((" {command}\n".format(command = command),), {"timeout_sec": 60})) | ||
raw_result = self.gdb_instances[version].recv() | ||
|
||
# flush output | ||
self.gdb_instances[version].send((("",), {"timeout_sec": 60})) | ||
raw_result += self.gdb_instances[version].recv() | ||
|
||
return self.parse_special_command_output(raw_result) | ||
|
||
def parse_special_command_output(self, raw_result): | ||
for item in raw_result: | ||
if item['type'] == 'console': | ||
input_string = str(item['payload']) | ||
processed_output = parse_gdb_line(input_string) | ||
parsed_dict = json.loads(processed_output) | ||
try: | ||
parsed_dict = json.loads(processed_output) | ||
except json.JSONDecodeError: | ||
parsed_dict = processed_output | ||
Comment on lines
+101
to
+104
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. This change is related to the There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Maybe there are certain cases where the processed_output is not in the correct format and json.loads fails to parse it. |
||
|
||
if parsed_dict: | ||
return parsed_dict | ||
|
||
def get_state(self, version=None): | ||
if version is None: | ||
base_state = self.run_single_special_command('pstate', 'base') | ||
regression_state = self.run_single_special_command('pstate', 'regressed') | ||
if version is not None: | ||
return self.run_single_special_command("pstate", version) | ||
|
||
# get base and regression state | ||
self.base_gdb_instance.send(((" {command}\n".format(command = "pstate"),), {"timeout_sec": 60})) | ||
self.regressed_gdb_instance.send(((" {command}\n".format(command = "pstate"),), {"timeout_sec": 60})) | ||
|
||
return { "base" : base_state, "regressed" : regression_state } | ||
# wait till base is done | ||
raw_result = self.base_gdb_instance.recv() | ||
base_state = self.parse_special_command_output(raw_result) | ||
|
||
# wait till regression is done | ||
raw_result = self.regressed_gdb_instance.recv() | ||
regression_state = self.parse_special_command_output(raw_result) | ||
|
||
return self.run_single_special_command("pstate", version) | ||
return { "base" : base_state, "regressed" : regression_state } | ||
|
||
def get_current_stack_frames(self, state): | ||
base_stack_frame = state['base']['stack_frame'] | ||
|
@@ -119,9 +162,13 @@ def run_parallel_raw_command(self, command): | |
|
||
def run_single_raw_command(self, command, version): | ||
result = [] | ||
raw_result = self.gdb_instances[version].write("{command}\n".format(command = command)) | ||
self.gdb_instances[version].send((("{command}\n".format(command = command),), {"timeout_sec": 60})) | ||
raw_result = self.gdb_instances[version].recv() | ||
|
||
for item in raw_result: | ||
result.append(str(item)) | ||
|
||
return result | ||
|
||
def terminate(self): | ||
terminate_all_IDDGdbController() |
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Do we need this?
I observed that we do not have this in the
run_single_special_command
function.There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
I think this is a remaining of the previous efforts to capture GDB response where we listened for stdout output. I think this is no longer needed since we are using the other approach based on the IoManager.