Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ jobs:
# flake8 should run on each Python version that we target,
# because the errors and warnings can differ due to language
# changes, and we want to catch them all.
python_version: ['3.5', '3.7']
python_version: ['3.6', '3.7']
steps:
- uses: actions/checkout@v1
- uses: actions/setup-python@v1
Expand Down
4 changes: 1 addition & 3 deletions pyproject.toml
Original file line number Diff line number Diff line change
@@ -1,5 +1,3 @@
[tool.black]
line-length = 80
# TODO(@wchargin): Drop `py35` here once we drop support for Python 3.5
# and aren't affected by <https://bugs.python.org/issue9232>.
target-version = ["py27", "py35", "py36", "py37", "py38"]
target-version = ["py36", "py37", "py38"]
5 changes: 4 additions & 1 deletion tensorboard/backend/application.py
Original file line number Diff line number Diff line change
Expand Up @@ -390,7 +390,10 @@ def _serve_plugin_entry(self, request):
"""
).format(name=name, script_content=script_content)
return http_util.Respond(
request, html, "text/html", csp_scripts_sha256s=[script_sha],
request,
html,
"text/html",
csp_scripts_sha256s=[script_sha],
)

@wrappers.Request.application
Expand Down
32 changes: 24 additions & 8 deletions tensorboard/backend/application_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -190,7 +190,8 @@ def app(request):
server = werkzeug_test.Client(app, wrappers.BaseResponse)
response = server.get("/")
self.assertEqual(
response.get_data(), b"Unauthenticated: who are you?",
response.get_data(),
b"Unauthenticated: who are you?",
)
self.assertEqual(response.status_code, 401)
self.assertStartsWith(
Expand Down Expand Up @@ -224,11 +225,15 @@ def setUp(self):
),
FakePlugin(
plugin_name="baz",
routes_mapping={"/esmodule": lambda req: None,},
routes_mapping={
"/esmodule": lambda req: None,
},
es_module_path_value="/esmodule",
),
FakePlugin(
plugin_name="qux", is_active_value=False, is_ng_component=True,
plugin_name="qux",
is_active_value=False,
is_ng_component=True,
),
]
app = application.TensorBoardWSGI(plugins)
Expand Down Expand Up @@ -289,7 +294,9 @@ def testPluginsListing(self):
},
"qux": {
"enabled": False,
"loading_mechanism": {"type": "NG_COMPONENT",},
"loading_mechanism": {
"type": "NG_COMPONENT",
},
"tab_name": "qux",
"remove_dom": False,
"disable_reload": False,
Expand All @@ -305,7 +312,9 @@ def testPluginsListingWithDataProviderListActivePlugins(self):
plugins = [
FakePlugin(plugin_name="foo", is_active_value=False),
FakePlugin(
plugin_name="bar", is_active_value=False, data_plugin_names=(),
plugin_name="bar",
is_active_value=False,
data_plugin_names=(),
),
FakePlugin(plugin_name="baz", is_active_value=False),
FakePlugin(
Expand Down Expand Up @@ -495,7 +504,9 @@ def setUp(self):
),
FakePlugin(
plugin_name="baz",
routes_mapping={"/esmodule": lambda req: None,},
routes_mapping={
"/esmodule": lambda req: None,
},
es_module_path_value="/esmodule",
),
]
Expand Down Expand Up @@ -695,7 +706,9 @@ def setUp(self):
),
FakePluginLoader(
plugin_name="whoami",
routes_mapping={"/eid": self._eid_handler,},
routes_mapping={
"/eid": self._eid_handler,
},
),
],
data_provider=FakeDataProvider(),
Expand Down Expand Up @@ -761,7 +774,10 @@ def _wildcard_special_handler(self, request):
def testPluginsAdded(self):
# The routes are prefixed with /data/plugin/[plugin name].
expected_routes = frozenset(
("/data/plugin/foo/foo_route", "/data/plugin/bar/bar_route",)
(
"/data/plugin/foo/foo_route",
"/data/plugin/bar/bar_route",
)
)
self.assertLessEqual(expected_routes, frozenset(self.app.exact_routes))

Expand Down
17 changes: 12 additions & 5 deletions tensorboard/backend/event_processing/data_provider.py
Original file line number Diff line number Diff line change
Expand Up @@ -129,7 +129,7 @@ def read_scalars(
experiment_id,
plugin_name,
downsample=None,
run_tag_filter=None
run_tag_filter=None,
):
self._validate_context(ctx)
self._validate_experiment_id(experiment_id)
Expand All @@ -156,7 +156,7 @@ def read_tensors(
experiment_id,
plugin_name,
downsample=None,
run_tag_filter=None
run_tag_filter=None,
):
self._validate_context(ctx)
self._validate_experiment_id(experiment_id)
Expand Down Expand Up @@ -313,7 +313,7 @@ def read_blob_sequences(
experiment_id,
plugin_name,
downsample=None,
run_tag_filter=None
run_tag_filter=None,
):
self._validate_context(ctx)
self._validate_experiment_id(experiment_id)
Expand Down Expand Up @@ -445,13 +445,20 @@ def _convert_blob_sequence_event(experiment_id, plugin_name, run, tag, event):
values = tuple(
provider.BlobReference(
_encode_blob_key(
experiment_id, plugin_name, run, tag, event.step, idx,
experiment_id,
plugin_name,
run,
tag,
event.step,
idx,
)
)
for idx in range(num_blobs)
)
return provider.BlobSequenceDatum(
wall_time=event.wall_time, step=event.step, values=values,
wall_time=event.wall_time,
step=event.step,
values=values,
)


Expand Down
3 changes: 2 additions & 1 deletion tensorboard/backend/event_processing/data_provider_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -377,7 +377,8 @@ def test_read_tensors(self):
)

run_tag_filter = base_provider.RunTagFilter(
runs=["lebesgue"], tags=["uniform", "bimodal"],
runs=["lebesgue"],
tags=["uniform", "bimodal"],
)
result = provider.read_tensors(
self.ctx,
Expand Down
2 changes: 1 addition & 1 deletion tensorboard/backend/event_processing/event_accumulator.py
Original file line number Diff line number Diff line change
Expand Up @@ -778,7 +778,7 @@ def _ExpiredPerTag(value):
self.most_recent_wall_time,
event.step,
event.wall_time,
*expired_per_type
*expired_per_type,
)
logger.warning(purge_msg)

Expand Down
31 changes: 22 additions & 9 deletions tensorboard/backend/event_processing/event_accumulator_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -785,17 +785,21 @@ def testTFSummaryImage(self):
accumulator.Reload()

tags = [
u"1/images/image",
u"2/images/image/0",
u"2/images/image/1",
u"3/images/image/0",
u"3/images/image/1",
u"3/images/image/2",
"1/images/image",
"2/images/image/0",
"2/images/image/1",
"3/images/image/0",
"3/images/image/1",
"3/images/image/2",
]

self.assertTagsEqual(
accumulator.Tags(),
{ea.IMAGES: tags, ea.GRAPH: True, ea.META_GRAPH: False,},
{
ea.IMAGES: tags,
ea.GRAPH: True,
ea.META_GRAPH: False,
},
)

def testTFSummaryTensor(self):
Expand All @@ -819,7 +823,10 @@ def testTFSummaryTensor(self):
accumulator.Reload()

self.assertTagsEqual(
accumulator.Tags(), {ea.TENSORS: ["scalar", "vector", "string"],}
accumulator.Tags(),
{
ea.TENSORS: ["scalar", "vector", "string"],
},
)

scalar_proto = accumulator.Tensors("scalar")[0].tensor_proto
Expand Down Expand Up @@ -949,7 +956,13 @@ def testGraphFromMetaGraphBecomesAvailable(self):
# Verify that we can load those events properly
acc = ea.EventAccumulator(directory)
acc.Reload()
self.assertTagsEqual(acc.Tags(), {ea.GRAPH: True, ea.META_GRAPH: True,})
self.assertTagsEqual(
acc.Tags(),
{
ea.GRAPH: True,
ea.META_GRAPH: True,
},
)

expected_graph_def = graph_pb2.GraphDef.FromString(
graph.as_graph_def(add_shapes=True).SerializeToString()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -139,10 +139,12 @@ def assertEventWallTimes(self, load_result, event_wall_times_in_order):
transposed = list(zip(*load_result))
wall_times, events = transposed if transposed else ([], [])
self.assertEqual(
list(wall_times), event_wall_times_in_order,
list(wall_times),
event_wall_times_in_order,
)
self.assertEqual(
[event.wall_time for event in events], event_wall_times_in_order,
[event.wall_time for event in events],
event_wall_times_in_order,
)


Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,10 @@ def __init__(self, path):
self._path = path
self.reload_called = False
self._plugin_to_tag_to_content = {
"baz_plugin": {"foo": "foo_content", "bar": "bar_content",}
"baz_plugin": {
"foo": "foo_content",
"bar": "bar_content",
}
}

def Tags(self):
Expand Down
Loading