diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 76da1741f1..fe241662aa 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -33,7 +33,7 @@ jobs: # flake8 should run on each Python version that we target, # because the errors and warnings can differ due to language # changes, and we want to catch them all. - python_version: ['3.5', '3.7'] + python_version: ['3.6', '3.7'] steps: - uses: actions/checkout@v1 - uses: actions/setup-python@v1 diff --git a/pyproject.toml b/pyproject.toml index cc5891c6fb..74ff519048 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,5 +1,3 @@ [tool.black] line-length = 80 -# TODO(@wchargin): Drop `py35` here once we drop support for Python 3.5 -# and aren't affected by . -target-version = ["py27", "py35", "py36", "py37", "py38"] +target-version = ["py36", "py37", "py38"] diff --git a/tensorboard/backend/application.py b/tensorboard/backend/application.py index ba8e5e888a..da4f9fc230 100644 --- a/tensorboard/backend/application.py +++ b/tensorboard/backend/application.py @@ -390,7 +390,10 @@ def _serve_plugin_entry(self, request): """ ).format(name=name, script_content=script_content) return http_util.Respond( - request, html, "text/html", csp_scripts_sha256s=[script_sha], + request, + html, + "text/html", + csp_scripts_sha256s=[script_sha], ) @wrappers.Request.application diff --git a/tensorboard/backend/application_test.py b/tensorboard/backend/application_test.py index 2b2500ecdc..92d445a331 100644 --- a/tensorboard/backend/application_test.py +++ b/tensorboard/backend/application_test.py @@ -190,7 +190,8 @@ def app(request): server = werkzeug_test.Client(app, wrappers.BaseResponse) response = server.get("/") self.assertEqual( - response.get_data(), b"Unauthenticated: who are you?", + response.get_data(), + b"Unauthenticated: who are you?", ) self.assertEqual(response.status_code, 401) self.assertStartsWith( @@ -224,11 +225,15 @@ def setUp(self): ), FakePlugin( plugin_name="baz", - routes_mapping={"/esmodule": lambda req: None,}, + routes_mapping={ + "/esmodule": lambda req: None, + }, es_module_path_value="/esmodule", ), FakePlugin( - plugin_name="qux", is_active_value=False, is_ng_component=True, + plugin_name="qux", + is_active_value=False, + is_ng_component=True, ), ] app = application.TensorBoardWSGI(plugins) @@ -289,7 +294,9 @@ def testPluginsListing(self): }, "qux": { "enabled": False, - "loading_mechanism": {"type": "NG_COMPONENT",}, + "loading_mechanism": { + "type": "NG_COMPONENT", + }, "tab_name": "qux", "remove_dom": False, "disable_reload": False, @@ -305,7 +312,9 @@ def testPluginsListingWithDataProviderListActivePlugins(self): plugins = [ FakePlugin(plugin_name="foo", is_active_value=False), FakePlugin( - plugin_name="bar", is_active_value=False, data_plugin_names=(), + plugin_name="bar", + is_active_value=False, + data_plugin_names=(), ), FakePlugin(plugin_name="baz", is_active_value=False), FakePlugin( @@ -495,7 +504,9 @@ def setUp(self): ), FakePlugin( plugin_name="baz", - routes_mapping={"/esmodule": lambda req: None,}, + routes_mapping={ + "/esmodule": lambda req: None, + }, es_module_path_value="/esmodule", ), ] @@ -695,7 +706,9 @@ def setUp(self): ), FakePluginLoader( plugin_name="whoami", - routes_mapping={"/eid": self._eid_handler,}, + routes_mapping={ + "/eid": self._eid_handler, + }, ), ], data_provider=FakeDataProvider(), @@ -761,7 +774,10 @@ def _wildcard_special_handler(self, request): def testPluginsAdded(self): # The routes are prefixed with /data/plugin/[plugin name]. expected_routes = frozenset( - ("/data/plugin/foo/foo_route", "/data/plugin/bar/bar_route",) + ( + "/data/plugin/foo/foo_route", + "/data/plugin/bar/bar_route", + ) ) self.assertLessEqual(expected_routes, frozenset(self.app.exact_routes)) diff --git a/tensorboard/backend/event_processing/data_provider.py b/tensorboard/backend/event_processing/data_provider.py index 473e04e01d..c046a28c3e 100644 --- a/tensorboard/backend/event_processing/data_provider.py +++ b/tensorboard/backend/event_processing/data_provider.py @@ -129,7 +129,7 @@ def read_scalars( experiment_id, plugin_name, downsample=None, - run_tag_filter=None + run_tag_filter=None, ): self._validate_context(ctx) self._validate_experiment_id(experiment_id) @@ -156,7 +156,7 @@ def read_tensors( experiment_id, plugin_name, downsample=None, - run_tag_filter=None + run_tag_filter=None, ): self._validate_context(ctx) self._validate_experiment_id(experiment_id) @@ -313,7 +313,7 @@ def read_blob_sequences( experiment_id, plugin_name, downsample=None, - run_tag_filter=None + run_tag_filter=None, ): self._validate_context(ctx) self._validate_experiment_id(experiment_id) @@ -445,13 +445,20 @@ def _convert_blob_sequence_event(experiment_id, plugin_name, run, tag, event): values = tuple( provider.BlobReference( _encode_blob_key( - experiment_id, plugin_name, run, tag, event.step, idx, + experiment_id, + plugin_name, + run, + tag, + event.step, + idx, ) ) for idx in range(num_blobs) ) return provider.BlobSequenceDatum( - wall_time=event.wall_time, step=event.step, values=values, + wall_time=event.wall_time, + step=event.step, + values=values, ) diff --git a/tensorboard/backend/event_processing/data_provider_test.py b/tensorboard/backend/event_processing/data_provider_test.py index 8e89faee6f..b9270600c4 100644 --- a/tensorboard/backend/event_processing/data_provider_test.py +++ b/tensorboard/backend/event_processing/data_provider_test.py @@ -377,7 +377,8 @@ def test_read_tensors(self): ) run_tag_filter = base_provider.RunTagFilter( - runs=["lebesgue"], tags=["uniform", "bimodal"], + runs=["lebesgue"], + tags=["uniform", "bimodal"], ) result = provider.read_tensors( self.ctx, diff --git a/tensorboard/backend/event_processing/event_accumulator.py b/tensorboard/backend/event_processing/event_accumulator.py index 56d2be8132..5143acc0ae 100644 --- a/tensorboard/backend/event_processing/event_accumulator.py +++ b/tensorboard/backend/event_processing/event_accumulator.py @@ -778,7 +778,7 @@ def _ExpiredPerTag(value): self.most_recent_wall_time, event.step, event.wall_time, - *expired_per_type + *expired_per_type, ) logger.warning(purge_msg) diff --git a/tensorboard/backend/event_processing/event_accumulator_test.py b/tensorboard/backend/event_processing/event_accumulator_test.py index 180a562018..38e4fb0865 100644 --- a/tensorboard/backend/event_processing/event_accumulator_test.py +++ b/tensorboard/backend/event_processing/event_accumulator_test.py @@ -785,17 +785,21 @@ def testTFSummaryImage(self): accumulator.Reload() tags = [ - u"1/images/image", - u"2/images/image/0", - u"2/images/image/1", - u"3/images/image/0", - u"3/images/image/1", - u"3/images/image/2", + "1/images/image", + "2/images/image/0", + "2/images/image/1", + "3/images/image/0", + "3/images/image/1", + "3/images/image/2", ] self.assertTagsEqual( accumulator.Tags(), - {ea.IMAGES: tags, ea.GRAPH: True, ea.META_GRAPH: False,}, + { + ea.IMAGES: tags, + ea.GRAPH: True, + ea.META_GRAPH: False, + }, ) def testTFSummaryTensor(self): @@ -819,7 +823,10 @@ def testTFSummaryTensor(self): accumulator.Reload() self.assertTagsEqual( - accumulator.Tags(), {ea.TENSORS: ["scalar", "vector", "string"],} + accumulator.Tags(), + { + ea.TENSORS: ["scalar", "vector", "string"], + }, ) scalar_proto = accumulator.Tensors("scalar")[0].tensor_proto @@ -949,7 +956,13 @@ def testGraphFromMetaGraphBecomesAvailable(self): # Verify that we can load those events properly acc = ea.EventAccumulator(directory) acc.Reload() - self.assertTagsEqual(acc.Tags(), {ea.GRAPH: True, ea.META_GRAPH: True,}) + self.assertTagsEqual( + acc.Tags(), + { + ea.GRAPH: True, + ea.META_GRAPH: True, + }, + ) expected_graph_def = graph_pb2.GraphDef.FromString( graph.as_graph_def(add_shapes=True).SerializeToString() diff --git a/tensorboard/backend/event_processing/event_file_loader_test.py b/tensorboard/backend/event_processing/event_file_loader_test.py index d969ecb8a4..1eac652deb 100644 --- a/tensorboard/backend/event_processing/event_file_loader_test.py +++ b/tensorboard/backend/event_processing/event_file_loader_test.py @@ -139,10 +139,12 @@ def assertEventWallTimes(self, load_result, event_wall_times_in_order): transposed = list(zip(*load_result)) wall_times, events = transposed if transposed else ([], []) self.assertEqual( - list(wall_times), event_wall_times_in_order, + list(wall_times), + event_wall_times_in_order, ) self.assertEqual( - [event.wall_time for event in events], event_wall_times_in_order, + [event.wall_time for event in events], + event_wall_times_in_order, ) diff --git a/tensorboard/backend/event_processing/event_multiplexer_test.py b/tensorboard/backend/event_processing/event_multiplexer_test.py index 7c24c7eb52..5c79ac9252 100644 --- a/tensorboard/backend/event_processing/event_multiplexer_test.py +++ b/tensorboard/backend/event_processing/event_multiplexer_test.py @@ -52,7 +52,10 @@ def __init__(self, path): self._path = path self.reload_called = False self._plugin_to_tag_to_content = { - "baz_plugin": {"foo": "foo_content", "bar": "bar_content",} + "baz_plugin": { + "foo": "foo_content", + "bar": "bar_content", + } } def Tags(self): diff --git a/tensorboard/backend/event_processing/io_wrapper_test.py b/tensorboard/backend/event_processing/io_wrapper_test.py index e9ac2faff2..d3e6aa1289 100644 --- a/tensorboard/backend/event_processing/io_wrapper_test.py +++ b/tensorboard/backend/event_processing/io_wrapper_test.py @@ -123,20 +123,72 @@ def testListRecursivelyViaGlobbing(self): expected = [ [ "", - ["foo", "bar", "a.tfevents.1", "model.ckpt", "quuz", "waldo",], + [ + "foo", + "bar", + "a.tfevents.1", + "model.ckpt", + "quuz", + "waldo", + ], + ], + [ + "bar", + [ + "b.tfevents.1", + "red_herring.txt", + "baz", + "quux", + ], + ], + [ + "bar/baz", + [ + "c.tfevents.1", + "d.tfevents.1", + ], ], - ["bar", ["b.tfevents.1", "red_herring.txt", "baz", "quux",]], - ["bar/baz", ["c.tfevents.1", "d.tfevents.1",]], [ "bar/quux", - ["some_flume_output.txt", "some_more_flume_output.txt",], + [ + "some_flume_output.txt", + "some_more_flume_output.txt", + ], + ], + [ + "quuz", + [ + "e.tfevents.1", + "garply", + ], + ], + [ + "quuz/garply", + [ + "f.tfevents.1", + "corge", + "grault", + ], ], - ["quuz", ["e.tfevents.1", "garply",]], - ["quuz/garply", ["f.tfevents.1", "corge", "grault",]], ["quuz/garply/corge", ["g.tfevents.1"]], - ["quuz/garply/grault", ["h.tfevents.1",]], - ["waldo", ["fred",]], - ["waldo/fred", ["i.tfevents.1",]], + [ + "quuz/garply/grault", + [ + "h.tfevents.1", + ], + ], + [ + "waldo", + [ + "fred", + ], + ], + [ + "waldo/fred", + [ + "i.tfevents.1", + ], + ], ] for pair in expected: # If this is not the top-level directory, prepend the high-level @@ -169,8 +221,19 @@ def skipTestListRecursivelyViaGlobbingForPathWithGlobCharacters(self): open(os.path.join(temp_dir, file_name), "w").close() expected = [ - ["", ["a.tfevents.1", "subdirectory",]], - ["subdirectory", ["b.tfevents.1",]], + [ + "", + [ + "a.tfevents.1", + "subdirectory", + ], + ], + [ + "subdirectory", + [ + "b.tfevents.1", + ], + ], # The contents of the bar subdirectory should be excluded from # this listing because the * character should have been escaped. ] @@ -188,20 +251,66 @@ def testListRecursivelyViaWalking(self): temp_dir = tempfile.mkdtemp(prefix=self.get_temp_dir()) self._CreateDeepDirectoryStructure(temp_dir) expected = [ - ["", ["a.tfevents.1", "model.ckpt",]], + [ + "", + [ + "a.tfevents.1", + "model.ckpt", + ], + ], ["foo", []], - ["bar", ["b.tfevents.1", "red_herring.txt",]], - ["bar/baz", ["c.tfevents.1", "d.tfevents.1",]], + [ + "bar", + [ + "b.tfevents.1", + "red_herring.txt", + ], + ], + [ + "bar/baz", + [ + "c.tfevents.1", + "d.tfevents.1", + ], + ], [ "bar/quux", - ["some_flume_output.txt", "some_more_flume_output.txt",], + [ + "some_flume_output.txt", + "some_more_flume_output.txt", + ], + ], + [ + "quuz", + [ + "e.tfevents.1", + ], + ], + [ + "quuz/garply", + [ + "f.tfevents.1", + ], + ], + [ + "quuz/garply/corge", + [ + "g.tfevents.1", + ], + ], + [ + "quuz/garply/grault", + [ + "h.tfevents.1", + ], ], - ["quuz", ["e.tfevents.1",]], - ["quuz/garply", ["f.tfevents.1",]], - ["quuz/garply/corge", ["g.tfevents.1",]], - ["quuz/garply/grault", ["h.tfevents.1",]], ["waldo", []], - ["waldo/fred", ["i.tfevents.1",]], + [ + "waldo/fred", + [ + "i.tfevents.1", + ], + ], ] for pair in expected: # If this is not the top-level directory, prepend the high-level diff --git a/tensorboard/backend/event_processing/plugin_event_accumulator_test.py b/tensorboard/backend/event_processing/plugin_event_accumulator_test.py index f315695a06..f67df572d5 100644 --- a/tensorboard/backend/event_processing/plugin_event_accumulator_test.py +++ b/tensorboard/backend/event_processing/plugin_event_accumulator_test.py @@ -172,7 +172,12 @@ def testReload(self): gen.AddScalarTensor("s1", wall_time=1, step=10, value=50) gen.AddScalarTensor("s2", wall_time=1, step=10, value=80) acc.Reload() - self.assertTagsEqual(acc.Tags(), {ea.TENSORS: ["s1", "s2"],}) + self.assertTagsEqual( + acc.Tags(), + { + ea.TENSORS: ["s1", "s2"], + }, + ) def testKeyError(self): """KeyError should be raised when accessing non-existing keys.""" @@ -193,7 +198,12 @@ def testNonValueEvents(self): gen.AddScalarTensor("s3", wall_time=3, step=100, value=1) acc.Reload() - self.assertTagsEqual(acc.Tags(), {ea.TENSORS: ["s1", "s3"],}) + self.assertTagsEqual( + acc.Tags(), + { + ea.TENSORS: ["s1", "s3"], + }, + ) def testExpiredDataDiscardedAfterRestartForFileVersionLessThan2(self): """Tests that events are discarded after a restart is detected. @@ -401,12 +411,16 @@ def testNewStyleScalarSummary(self): tags = [ graph_metadata.RUN_GRAPH_NAME, - u"accuracy/scalar_summary", - u"xent/scalar_summary", + "accuracy/scalar_summary", + "xent/scalar_summary", ] self.assertTagsEqual( accumulator.Tags(), - {ea.TENSORS: tags, ea.GRAPH: True, ea.META_GRAPH: False,}, + { + ea.TENSORS: tags, + ea.GRAPH: True, + ea.META_GRAPH: False, + }, ) self.assertItemsEqual( @@ -445,13 +459,17 @@ def testNewStyleAudioSummary(self): tags = [ graph_metadata.RUN_GRAPH_NAME, - u"1/one/audio_summary", - u"2/two/audio_summary", - u"3/three/audio_summary", + "1/one/audio_summary", + "2/two/audio_summary", + "3/three/audio_summary", ] self.assertTagsEqual( accumulator.Tags(), - {ea.TENSORS: tags, ea.GRAPH: True, ea.META_GRAPH: False,}, + { + ea.TENSORS: tags, + ea.GRAPH: True, + ea.META_GRAPH: False, + }, ) self.assertItemsEqual( @@ -488,13 +506,17 @@ def testNewStyleImageSummary(self): tags = [ graph_metadata.RUN_GRAPH_NAME, - u"1/images/image_summary", - u"2/images/image_summary", - u"3/images/image_summary", + "1/images/image_summary", + "2/images/image_summary", + "3/images/image_summary", ] self.assertTagsEqual( accumulator.Tags(), - {ea.TENSORS: tags, ea.GRAPH: True, ea.META_GRAPH: False,}, + { + ea.TENSORS: tags, + ea.GRAPH: True, + ea.META_GRAPH: False, + }, ) self.assertItemsEqual( @@ -521,7 +543,10 @@ def testTFSummaryTensor(self): accumulator.Reload() self.assertTagsEqual( - accumulator.Tags(), {ea.TENSORS: ["scalar", "vector", "string"],} + accumulator.Tags(), + { + ea.TENSORS: ["scalar", "vector", "string"], + }, ) scalar_proto = accumulator.Tensors("scalar")[0].tensor_proto @@ -742,7 +767,13 @@ def testGraphFromMetaGraphBecomesAvailable(self): # Verify that we can load those events properly acc = ea.EventAccumulator(directory) acc.Reload() - self.assertTagsEqual(acc.Tags(), {ea.GRAPH: True, ea.META_GRAPH: True,}) + self.assertTagsEqual( + acc.Tags(), + { + ea.GRAPH: True, + ea.META_GRAPH: True, + }, + ) expected_graph_def = graph_pb2.GraphDef.FromString( graph.as_graph_def(add_shapes=True).SerializeToString() diff --git a/tensorboard/backend/event_processing/plugin_event_multiplexer_test.py b/tensorboard/backend/event_processing/plugin_event_multiplexer_test.py index e6bd4953d6..6259286e83 100644 --- a/tensorboard/backend/event_processing/plugin_event_multiplexer_test.py +++ b/tensorboard/backend/event_processing/plugin_event_multiplexer_test.py @@ -57,7 +57,10 @@ def __init__(self, path): self._path = path self.reload_called = False self._plugin_to_tag_to_content = { - "baz_plugin": {"foo": "foo_content", "bar": "bar_content",} + "baz_plugin": { + "foo": "foo_content", + "bar": "bar_content", + } } def Tags(self): diff --git a/tensorboard/backend/http_util.py b/tensorboard/backend/http_util.py index 28838cd2a7..64c446ae1b 100644 --- a/tensorboard/backend/http_util.py +++ b/tensorboard/backend/http_util.py @@ -74,7 +74,12 @@ ] ) -_JSON_MIMETYPES = set(["application/json", "application/json+protobuf",]) +_JSON_MIMETYPES = set( + [ + "application/json", + "application/json+protobuf", + ] +) # Do not support xhtml for now. _HTML_MIMETYPE = "text/html" @@ -232,7 +237,7 @@ def Respond( "data:", # used by What-If tool for image sprites. "blob:", - *_CSP_IMG_DOMAINS_WHITELIST + *_CSP_IMG_DOMAINS_WHITELIST, ), "object-src 'none'", "style-src %s" @@ -243,7 +248,7 @@ def Respond( "data:", # inline styles: Polymer templates + d3 uses inline styles. "'unsafe-inline'", - *_CSP_STYLE_DOMAINS_WHITELIST + *_CSP_STYLE_DOMAINS_WHITELIST, ), "connect-src %s" % _create_csp_string("'self'", *_CSP_CONNECT_DOMAINS_WHITELIST), diff --git a/tensorboard/backend/json_util_test.py b/tensorboard/backend/json_util_test.py index e0d030be52..32d4529965 100644 --- a/tensorboard/backend/json_util_test.py +++ b/tensorboard/backend/json_util_test.py @@ -47,7 +47,12 @@ def testWrapsObjectKeys(self): def testWrapsInListsAndTuples(self): self._assertWrapsAs([_INFINITY], ["Infinity"]) # map() returns a list even if the argument is a tuple. - self._assertWrapsAs((_INFINITY,), ["Infinity",]) + self._assertWrapsAs( + (_INFINITY,), + [ + "Infinity", + ], + ) def testWrapsRecursively(self): self._assertWrapsAs({"x": [_INFINITY]}, {"x": ["Infinity"]}) @@ -69,7 +74,7 @@ def testSet_turnsIntoSortedList(self): def testByteString_turnsIntoUnicodeString(self): self.assertEqual( - json_util.Cleanse(b"\xc2\xa3"), u"\u00a3" + json_util.Cleanse(b"\xc2\xa3"), "\u00a3" ) # is # sterling diff --git a/tensorboard/backend/security_validator_test.py b/tensorboard/backend/security_validator_test.py index 1bd2b54a7d..e5e6b560e6 100644 --- a/tensorboard/backend/security_validator_test.py +++ b/tensorboard/backend/security_validator_test.py @@ -56,7 +56,9 @@ class SecurityValidatorMiddlewareTest(tb_test.TestCase): """Tests for `SecurityValidatorMiddleware`.""" def make_request_and_maybe_assert_warn( - self, headers, expected_warn_substr, + self, + headers, + expected_warn_substr, ): @werkzeug.Request.application def _simple_app(req): @@ -74,7 +76,8 @@ def _simple_app(req): mock_warn.assert_called_with(_WARN_PREFIX + expected_warn_substr) def make_request_and_assert_no_warn( - self, headers, + self, + headers, ): self.make_request_and_maybe_assert_warn(headers, None) diff --git a/tensorboard/compat/tensorflow_stub/io/gfile_s3_test.py b/tensorboard/compat/tensorflow_stub/io/gfile_s3_test.py index 67a9dbf18c..31086796ee 100644 --- a/tensorboard/compat/tensorflow_stub/io/gfile_s3_test.py +++ b/tensorboard/compat/tensorflow_stub/io/gfile_s3_test.py @@ -110,21 +110,67 @@ def testWalk(self): temp_dir = self._CreateDeepS3Structure() self._CreateDeepS3Structure(temp_dir) expected = [ - ["", ["a.tfevents.1", "model.ckpt",]], + [ + "", + [ + "a.tfevents.1", + "model.ckpt", + ], + ], # Empty directory not returned # ['foo', []], - ["bar", ["b.tfevents.1", "red_herring.txt",]], - ["bar/baz", ["c.tfevents.1", "d.tfevents.1",]], + [ + "bar", + [ + "b.tfevents.1", + "red_herring.txt", + ], + ], + [ + "bar/baz", + [ + "c.tfevents.1", + "d.tfevents.1", + ], + ], [ "bar/quux", - ["some_flume_output.txt", "some_more_flume_output.txt",], + [ + "some_flume_output.txt", + "some_more_flume_output.txt", + ], + ], + [ + "quuz", + [ + "e.tfevents.1", + ], + ], + [ + "quuz/garply", + [ + "f.tfevents.1", + ], + ], + [ + "quuz/garply/corge", + [ + "g.tfevents.1", + ], + ], + [ + "quuz/garply/grault", + [ + "h.tfevents.1", + ], ], - ["quuz", ["e.tfevents.1",]], - ["quuz/garply", ["f.tfevents.1",]], - ["quuz/garply/corge", ["g.tfevents.1",]], - ["quuz/garply/grault", ["h.tfevents.1",]], ["waldo", []], - ["waldo/fred", ["i.tfevents.1",]], + [ + "waldo/fred", + [ + "i.tfevents.1", + ], + ], ] for pair in expected: # If this is not the top-level directory, prepend the high-level @@ -156,10 +202,8 @@ def testRead(self): @mock_s3 def testReadLines(self): - ckpt_lines = ( - [u"\n"] + [u"line {}\n".format(i) for i in range(10)] + [u" "] - ) - ckpt_content = u"".join(ckpt_lines) + ckpt_lines = ["\n"] + ["line {}\n".format(i) for i in range(10)] + [" "] + ckpt_content = "".join(ckpt_lines) temp_dir = self._CreateDeepS3Structure(ckpt_content=ckpt_content) ckpt_path = self._PathJoin(temp_dir, "model.ckpt") with gfile.GFile(ckpt_path, "r") as f: @@ -193,7 +237,7 @@ def testReadWithOffset(self): def testWrite(self): temp_dir = self._CreateDeepS3Structure() ckpt_path = os.path.join(temp_dir, "model2.ckpt") - ckpt_content = u"asdfasdfasdffoobarbuzz" + ckpt_content = "asdfasdfasdffoobarbuzz" with gfile.GFile(ckpt_path, "w") as f: f.write(ckpt_content) with gfile.GFile(ckpt_path, "r") as f: @@ -204,9 +248,9 @@ def testWrite(self): def testOverwrite(self): temp_dir = self._CreateDeepS3Structure() ckpt_path = os.path.join(temp_dir, "model2.ckpt") - ckpt_content = u"asdfasdfasdffoobarbuzz" + ckpt_content = "asdfasdfasdffoobarbuzz" with gfile.GFile(ckpt_path, "w") as f: - f.write(u"original") + f.write("original") with gfile.GFile(ckpt_path, "w") as f: f.write(ckpt_content) with gfile.GFile(ckpt_path, "r") as f: @@ -217,7 +261,7 @@ def testOverwrite(self): def testWriteMultiple(self): temp_dir = self._CreateDeepS3Structure() ckpt_path = os.path.join(temp_dir, "model2.ckpt") - ckpt_content = u"asdfasdfasdffoobarbuzz" * 5 + ckpt_content = "asdfasdfasdffoobarbuzz" * 5 with gfile.GFile(ckpt_path, "w") as f: for i in range(0, len(ckpt_content), 3): f.write(ckpt_content[i : i + 3]) @@ -232,7 +276,7 @@ def testWriteMultiple(self): def testWriteEmpty(self): temp_dir = self._CreateDeepS3Structure() ckpt_path = os.path.join(temp_dir, "model2.ckpt") - ckpt_content = u"" + ckpt_content = "" with gfile.GFile(ckpt_path, "w") as f: f.write(ckpt_content) with gfile.GFile(ckpt_path, "r") as f: diff --git a/tensorboard/compat/tensorflow_stub/io/gfile_test.py b/tensorboard/compat/tensorflow_stub/io/gfile_test.py index af2e825151..3ae2f51a77 100644 --- a/tensorboard/compat/tensorflow_stub/io/gfile_test.py +++ b/tensorboard/compat/tensorflow_stub/io/gfile_test.py @@ -90,20 +90,66 @@ def testWalk(self): temp_dir = self.get_temp_dir() self._CreateDeepDirectoryStructure(temp_dir) expected = [ - ["", ["a.tfevents.1", "model.ckpt",]], + [ + "", + [ + "a.tfevents.1", + "model.ckpt", + ], + ], ["foo", []], - ["bar", ["b.tfevents.1", "red_herring.txt",]], - ["bar/baz", ["c.tfevents.1", "d.tfevents.1",]], + [ + "bar", + [ + "b.tfevents.1", + "red_herring.txt", + ], + ], + [ + "bar/baz", + [ + "c.tfevents.1", + "d.tfevents.1", + ], + ], [ "bar/quux", - ["some_flume_output.txt", "some_more_flume_output.txt",], + [ + "some_flume_output.txt", + "some_more_flume_output.txt", + ], + ], + [ + "quuz", + [ + "e.tfevents.1", + ], + ], + [ + "quuz/garply", + [ + "f.tfevents.1", + ], + ], + [ + "quuz/garply/corge", + [ + "g.tfevents.1", + ], + ], + [ + "quuz/garply/grault", + [ + "h.tfevents.1", + ], ], - ["quuz", ["e.tfevents.1",]], - ["quuz/garply", ["f.tfevents.1",]], - ["quuz/garply/corge", ["g.tfevents.1",]], - ["quuz/garply/grault", ["h.tfevents.1",]], ["waldo", []], - ["waldo/fred", ["i.tfevents.1",]], + [ + "waldo/fred", + [ + "i.tfevents.1", + ], + ], ] for pair in expected: # If this is not the top-level directory, prepend the high-level @@ -151,15 +197,15 @@ def testReadLines(self): # the underlying byte offsets (counting \r). Multibyte characters would # similarly cause desynchronization. raw_ckpt_lines = ( - [u"\r\n"] + [u"line {}\r\n".format(i) for i in range(10)] + [u" "] + ["\r\n"] + ["line {}\r\n".format(i) for i in range(10)] + [" "] ) expected_ckpt_lines = ( # without \r - [u"\n"] + [u"line {}\n".format(i) for i in range(10)] + [u" "] + ["\n"] + ["line {}\n".format(i) for i in range(10)] + [" "] ) # Write out newlines as given (i.e., \r\n) regardless of OS, so as to # test translation on read. with io.open(ckpt_path, "w", newline="") as f: - data = u"".join(raw_ckpt_lines) + data = "".join(raw_ckpt_lines) f.write(data) with gfile.GFile(ckpt_path, "r") as f: f.buff_chunk_size = 4 # Test buffering by reducing chunk size @@ -194,7 +240,7 @@ def testWrite(self): temp_dir = self.get_temp_dir() self._CreateDeepDirectoryStructure(temp_dir) ckpt_path = os.path.join(temp_dir, "model2.ckpt") - ckpt_content = u"asdfasdfasdffoobarbuzz" + ckpt_content = "asdfasdfasdffoobarbuzz" with gfile.GFile(ckpt_path, "w") as f: f.write(ckpt_content) with open(ckpt_path, "r") as f: @@ -205,9 +251,9 @@ def testOverwrite(self): temp_dir = self.get_temp_dir() self._CreateDeepDirectoryStructure(temp_dir) ckpt_path = os.path.join(temp_dir, "model2.ckpt") - ckpt_content = u"asdfasdfasdffoobarbuzz" + ckpt_content = "asdfasdfasdffoobarbuzz" with gfile.GFile(ckpt_path, "w") as f: - f.write(u"original") + f.write("original") with gfile.GFile(ckpt_path, "w") as f: f.write(ckpt_content) with open(ckpt_path, "r") as f: @@ -218,7 +264,7 @@ def testWriteMultiple(self): temp_dir = self.get_temp_dir() self._CreateDeepDirectoryStructure(temp_dir) ckpt_path = os.path.join(temp_dir, "model2.ckpt") - ckpt_content = u"asdfasdfasdffoobarbuzz" * 5 + ckpt_content = "asdfasdfasdffoobarbuzz" * 5 with gfile.GFile(ckpt_path, "w") as f: for i in range(0, len(ckpt_content), 3): f.write(ckpt_content[i : i + 3]) @@ -233,7 +279,7 @@ def testWriteEmpty(self): temp_dir = self.get_temp_dir() self._CreateDeepDirectoryStructure(temp_dir) ckpt_path = os.path.join(temp_dir, "model2.ckpt") - ckpt_content = u"" + ckpt_content = "" with gfile.GFile(ckpt_path, "w") as f: f.write(ckpt_content) with open(ckpt_path, "r") as f: diff --git a/tensorboard/compat/tensorflow_stub/pywrap_tensorflow.py b/tensorboard/compat/tensorflow_stub/pywrap_tensorflow.py index f5fe7d3050..8b743f7dca 100644 --- a/tensorboard/compat/tensorflow_stub/pywrap_tensorflow.py +++ b/tensorboard/compat/tensorflow_stub/pywrap_tensorflow.py @@ -244,7 +244,9 @@ def GetNext(self): crc_event = struct.unpack("= "3.6" +black==20.8b1; python_version >= "3.6" flake8==3.7.8 yamllint==1.17.0 diff --git a/tensorboard/pip_package/setup.py b/tensorboard/pip_package/setup.py index f4db0fa22f..530174499b 100644 --- a/tensorboard/pip_package/setup.py +++ b/tensorboard/pip_package/setup.py @@ -55,7 +55,9 @@ def get_readme(): ], }, package_data={ - "tensorboard": ["webfiles.zip",], + "tensorboard": [ + "webfiles.zip", + ], # Must keep this in sync with tf_projector_plugin:projector_assets "tensorboard.plugins.projector": [ "tf_projector_plugin/index.js", diff --git a/tensorboard/plugin_util.py b/tensorboard/plugin_util.py index d51dafcd37..15a2e14eda 100644 --- a/tensorboard/plugin_util.py +++ b/tensorboard/plugin_util.py @@ -133,7 +133,7 @@ def markdowns_to_safe_html(markdown_strings, combine): source_decoded = source.decode("utf-8") # Remove null bytes and warn if there were any, since it probably means # we were given a bad encoding. - source = source_decoded.replace(u"\x00", u"") + source = source_decoded.replace("\x00", "") total_null_bytes += len(source_decoded) - len(source) unsafe_html = _MARKDOWN_STORE.markdown.convert(source) unsafe_htmls.append(unsafe_html) diff --git a/tensorboard/plugin_util_test.py b/tensorboard/plugin_util_test.py index 41aa902f02..80e9d4255d 100644 --- a/tensorboard/plugin_util_test.py +++ b/tensorboard/plugin_util_test.py @@ -32,20 +32,20 @@ def _test(self, markdown_string, expected): self.assertEqual(expected, actual) def test_empty_input(self): - self._test(u"", u"") + self._test("", "") def test_basic_formatting(self): self._test( - u"# _Hello_, **world!**\n\n" + "# _Hello_, **world!**\n\n" "Check out [my website](http://example.com)!", - u"

Hello, world!

\n" + "

Hello, world!

\n" '

Check out my website!

', ) def test_table_formatting(self): self._test( textwrap.dedent( - u"""\ + """\ Here is some data: TensorBoard usage | Happiness @@ -58,7 +58,7 @@ def test_table_formatting(self): """ ), textwrap.dedent( - u"""\ + """\

Here is some data:

@@ -89,39 +89,39 @@ def test_table_formatting(self): def test_whitelisted_tags_and_attributes_allowed(self): s = ( - u'Check out ' + 'Check out ' "my website!" ) - self._test(s, u"

%s

" % s) + self._test(s, "

%s

" % s) def test_arbitrary_tags_and_attributes_removed(self): self._test( - u"We should bring back the blink tag; " + "We should bring back the blink tag; " '' "sign the petition!", - u"

We should bring back the " + "

We should bring back the " "<blink>blink tag</blink>; " 'sign the petition!

', ) def test_javascript_hrefs_sanitized(self): self._test( - u'A sketchy link for you', - u"

A sketchy link for you

", + 'A sketchy link for you', + "

A sketchy link for you

", ) def test_byte_strings_interpreted_as_utf8(self): - s = u"> Look\u2014some UTF-8!".encode("utf-8") + s = "> Look\u2014some UTF-8!".encode("utf-8") assert isinstance(s, six.binary_type), (type(s), six.binary_type) self._test( - s, u"
\n

Look\u2014some UTF-8!

\n
" + s, "
\n

Look\u2014some UTF-8!

\n
" ) def test_unicode_strings_passed_through(self): - s = u"> Look\u2014some UTF-8!" + s = "> Look\u2014some UTF-8!" assert not isinstance(s, six.binary_type), (type(s), six.binary_type) self._test( - s, u"
\n

Look\u2014some UTF-8!

\n
" + s, "
\n

Look\u2014some UTF-8!

\n
" ) def test_null_bytes_stripped_before_markdown_processing(self): @@ -131,11 +131,11 @@ def test_null_bytes_stripped_before_markdown_processing(self): # interpretation to avoid affecting output (e.g. middle-word underscores # would generate erroneous tags like "underscore") and add an # HTML comment with a warning. - s = u"un_der_score".encode("utf-32-le") + s = "un_der_score".encode("utf-32-le") # UTF-32 encoding of ASCII will have 3 null bytes per char. 36 = 3 * 12. self._test( s, - u"\n" "

un_der_score

", ) diff --git a/tensorboard/plugins/audio/audio_plugin.py b/tensorboard/plugins/audio/audio_plugin.py index 00f84bae5a..8171f03b06 100644 --- a/tensorboard/plugins/audio/audio_plugin.py +++ b/tensorboard/plugins/audio/audio_plugin.py @@ -94,7 +94,9 @@ def _index_impl(self, ctx, experiment): dictionary for `"minibatch_input"` will contain `"samples": 10`. """ mapping = self._data_provider.list_blob_sequences( - ctx, experiment_id=experiment, plugin_name=metadata.PLUGIN_NAME, + ctx, + experiment_id=experiment, + plugin_name=metadata.PLUGIN_NAME, ) result = {run: {} for run in mapping} for (run, tag_to_time_series) in mapping.items(): @@ -190,7 +192,9 @@ def _get_mime_type(self, ctx, experiment, run, tag): # times) to `/tags` (called few times) to reduce data provider # calls. mapping = self._data_provider.list_blob_sequences( - ctx, experiment_id=experiment, plugin_name=metadata.PLUGIN_NAME, + ctx, + experiment_id=experiment, + plugin_name=metadata.PLUGIN_NAME, ) time_series = mapping.get(run, {}).get(tag, None) if time_series is None: diff --git a/tensorboard/plugins/audio/audio_plugin_test.py b/tensorboard/plugins/audio/audio_plugin_test.py index d25f06408c..bd3a5bd1dc 100644 --- a/tensorboard/plugins/audio/audio_plugin_test.py +++ b/tensorboard/plugins/audio/audio_plugin_test.py @@ -117,7 +117,10 @@ def setUp(self): # Start a server with the plugin. multiplexer = event_multiplexer.EventMultiplexer( - {"foo": foo_directory, "bar": bar_directory,} + { + "foo": foo_directory, + "bar": bar_directory, + } ) multiplexer.Reload() provider = data_provider.MultiplexerDataProvider( diff --git a/tensorboard/plugins/base_plugin.py b/tensorboard/plugins/base_plugin.py index 5c5e20991b..e9a4f22ba5 100644 --- a/tensorboard/plugins/base_plugin.py +++ b/tensorboard/plugins/base_plugin.py @@ -122,7 +122,7 @@ def __init__( es_module_path=None, remove_dom=None, tab_name=None, - is_ng_component=None + is_ng_component=None, ): """Creates a `FrontendMetadata` value. @@ -254,7 +254,7 @@ def __init__( multiplexer=None, plugin_name_to_instance=None, sampling_hints=None, - window_title=None + window_title=None, ): """Instantiates magic container. diff --git a/tensorboard/plugins/core/core_plugin.py b/tensorboard/plugins/core/core_plugin.py index 42edcdc2b6..8199e6a1f4 100644 --- a/tensorboard/plugins/core/core_plugin.py +++ b/tensorboard/plugins/core/core_plugin.py @@ -145,7 +145,11 @@ def _serve_environment(self, request): "creation_time": experiment_metadata.creation_time, } ) - return http_util.Respond(request, environment, "application/json",) + return http_util.Respond( + request, + environment, + "application/json", + ) @wrappers.Request.application def _serve_logdir(self, request): diff --git a/tensorboard/plugins/core/core_plugin_test.py b/tensorboard/plugins/core/core_plugin_test.py index 918efb3109..2394553d77 100644 --- a/tensorboard/plugins/core/core_plugin_test.py +++ b/tensorboard/plugins/core/core_plugin_test.py @@ -347,7 +347,8 @@ def FirstEventTimestamp_stub(run_name): # Add one run: it should come last. self._add_run("avocado") self.assertEqual( - self._get_json(self.server, "/data/runs"), ["run1", "avocado"], + self._get_json(self.server, "/data/runs"), + ["run1", "avocado"], ) # Add another run: it should come last, too. diff --git a/tensorboard/plugins/custom_scalar/custom_scalars_plugin_test.py b/tensorboard/plugins/custom_scalar/custom_scalars_plugin_test.py index edfe7e943b..7ae6a03a47 100644 --- a/tensorboard/plugins/custom_scalar/custom_scalars_plugin_test.py +++ b/tensorboard/plugins/custom_scalar/custom_scalars_plugin_test.py @@ -173,8 +173,8 @@ def createPlugin(self, logdir): plugin_name_to_instance=plugin_name_to_instance, ) scalars_plugin_instance = scalars_plugin.ScalarsPlugin(context) - custom_scalars_plugin_instance = custom_scalars_plugin.CustomScalarsPlugin( - context + custom_scalars_plugin_instance = ( + custom_scalars_plugin.CustomScalarsPlugin(context) ) plugin_instances = [ scalars_plugin_instance, diff --git a/tensorboard/plugins/debugger_v2/debug_data_provider.py b/tensorboard/plugins/debugger_v2/debug_data_provider.py index 73649afb99..d92a8cafd6 100644 --- a/tensorboard/plugins/debugger_v2/debug_data_provider.py +++ b/tensorboard/plugins/debugger_v2/debug_data_provider.py @@ -345,7 +345,8 @@ def graph_info_run_tag_filter(run, graph_id): if not graph_id: raise ValueError("graph_id must not be None or empty.") return provider.RunTagFilter( - runs=[run], tags=["%s_%s" % (GRAPH_INFO_BLOB_TAG_PREFIX, graph_id)], + runs=[run], + tags=["%s_%s" % (GRAPH_INFO_BLOB_TAG_PREFIX, graph_id)], ) @@ -402,7 +403,8 @@ def source_file_run_tag_filter(run, index): `RunTagFilter` for accessing the content of the source file. """ return provider.RunTagFilter( - runs=[run], tags=["%s_%d" % (SOURCE_FILE_BLOB_TAG_PREFIX, index)], + runs=[run], + tags=["%s_%d" % (SOURCE_FILE_BLOB_TAG_PREFIX, index)], ) @@ -515,7 +517,7 @@ def read_scalars( experiment_id, plugin_name, downsample=None, - run_tag_filter=None + run_tag_filter=None, ): del experiment_id, plugin_name, downsample, run_tag_filter raise TypeError("Debugger V2 DataProvider doesn't support scalars.") @@ -534,7 +536,7 @@ def read_blob_sequences( experiment_id, plugin_name, downsample=None, - run_tag_filter=None + run_tag_filter=None, ): del experiment_id, downsample # Unused. if plugin_name != PLUGIN_NAME: @@ -555,19 +557,22 @@ def read_blob_sequences( continue output[run] = dict() for tag in run_tag_filter.tags: - if tag.startswith( - ( - ALERTS_BLOB_TAG_PREFIX, - EXECUTION_DIGESTS_BLOB_TAG_PREFIX, - EXECUTION_DATA_BLOB_TAG_PREFIX, - GRAPH_EXECUTION_DIGESTS_BLOB_TAG_PREFIX, - GRAPH_EXECUTION_DATA_BLOB_TAG_PREFIX, - GRAPH_INFO_BLOB_TAG_PREFIX, - GRAPH_OP_INFO_BLOB_TAG_PREFIX, - SOURCE_FILE_BLOB_TAG_PREFIX, - STACK_FRAMES_BLOB_TAG_PREFIX, + if ( + tag.startswith( + ( + ALERTS_BLOB_TAG_PREFIX, + EXECUTION_DIGESTS_BLOB_TAG_PREFIX, + EXECUTION_DATA_BLOB_TAG_PREFIX, + GRAPH_EXECUTION_DIGESTS_BLOB_TAG_PREFIX, + GRAPH_EXECUTION_DATA_BLOB_TAG_PREFIX, + GRAPH_INFO_BLOB_TAG_PREFIX, + GRAPH_OP_INFO_BLOB_TAG_PREFIX, + SOURCE_FILE_BLOB_TAG_PREFIX, + STACK_FRAMES_BLOB_TAG_PREFIX, + ) ) - ) or tag in (SOURCE_FILE_LIST_BLOB_TAG,): + or tag in (SOURCE_FILE_LIST_BLOB_TAG,) + ): output[run][tag] = [ provider.BlobReference(blob_key="%s.%s" % (tag, run)) ] diff --git a/tensorboard/plugins/debugger_v2/debugger_v2_plugin.py b/tensorboard/plugins/debugger_v2/debugger_v2_plugin.py index 45791d2a59..e1e0e69081 100644 --- a/tensorboard/plugins/debugger_v2/debugger_v2_plugin.py +++ b/tensorboard/plugins/debugger_v2/debugger_v2_plugin.py @@ -29,7 +29,10 @@ def _error_response(request, error_message): return http_util.Respond( - request, {"error": error_message}, "application/json", code=400, + request, + {"error": error_message}, + "application/json", + code=400, ) @@ -197,8 +200,10 @@ def serve_graph_execution_digests(self, request): return _missing_run_error_response(request) begin = int(request.args.get("begin", "0")) end = int(request.args.get("end", "-1")) - run_tag_filter = debug_data_provider.graph_execution_digest_run_tag_filter( - run, begin, end + run_tag_filter = ( + debug_data_provider.graph_execution_digest_run_tag_filter( + run, begin, end + ) ) blob_sequences = self._data_provider.read_blob_sequences( experiment_id=experiment, @@ -234,8 +239,10 @@ def serve_graph_execution_data(self, request): return _missing_run_error_response(request) begin = int(request.args.get("begin", "0")) end = int(request.args.get("end", "-1")) - run_tag_filter = debug_data_provider.graph_execution_data_run_tag_filter( - run, begin, end + run_tag_filter = ( + debug_data_provider.graph_execution_data_run_tag_filter( + run, begin, end + ) ) blob_sequences = self._data_provider.read_blob_sequences( experiment_id=experiment, diff --git a/tensorboard/plugins/debugger_v2/debugger_v2_plugin_test.py b/tensorboard/plugins/debugger_v2/debugger_v2_plugin_test.py index 9cf244de16..b9123b5e42 100644 --- a/tensorboard/plugins/debugger_v2/debugger_v2_plugin_test.py +++ b/tensorboard/plugins/debugger_v2/debugger_v2_plugin_test.py @@ -292,7 +292,9 @@ def testGetAlertNumberOnlyWhenAlertExistsCurtHealthMode(self): "begin": 0, "end": 0, "num_alerts": 3, - "alerts_breakdown": {"InfNanAlert": 3,}, + "alerts_breakdown": { + "InfNanAlert": 3, + }, "per_type_alert_limit": 1000, "alert_type": None, "alerts": [], @@ -548,7 +550,12 @@ def testServeExecutionDigestsWithEqualBeginAndEnd(self): data = json.loads(response.get_data()) self.assertEqual( data, - {"begin": 0, "end": 0, "num_digests": 3, "execution_digests": [],}, + { + "begin": 0, + "end": 0, + "num_digests": 3, + "execution_digests": [], + }, ) def testServeExecutionDigestsWithEndGreaterThanBeginFullRange(self): @@ -1396,7 +1403,13 @@ def fake_get_op_creation_digest(op_name): self.assertNotIn("input_names", data) self.assertEqual( - data["inputs"], [{"op_name": "add_v2_input", "output_slot": 0,}] + data["inputs"], + [ + { + "op_name": "add_v2_input", + "output_slot": 0, + } + ], ) # "data" is missing due to op lookup failure. # Check the consumer op data, which should also be None due to the # KeyError encountered during the retrieval of the data about the diff --git a/tensorboard/plugins/distribution/compressor_test.py b/tensorboard/plugins/distribution/compressor_test.py index 00ab2b3c28..700e3b7ba7 100644 --- a/tensorboard/plugins/distribution/compressor_test.py +++ b/tensorboard/plugins/distribution/compressor_test.py @@ -32,7 +32,11 @@ def test_example(self): buckets = [[0, 1, 0], [1, 2, 3], [2, 3, 0]] self.assertEqual( _make_expected_value( - (0, 0.0), (2500, 0.5), (5000, 1.0), (7500, 1.5), (10000, 3.0), + (0, 0.0), + (2500, 0.5), + (5000, 1.0), + (7500, 1.5), + (10000, 3.0), ), compressor.compress_histogram(buckets, bps), ) @@ -56,7 +60,11 @@ def test_empty(self): buckets = [[0, 1, 0], [1, 2, 0], [2, 3, 0]] self.assertEqual( _make_expected_value( - (0, 3.0), (2500, 3.0), (5000, 3.0), (7500, 3.0), (10000, 3.0), + (0, 3.0), + (2500, 3.0), + (5000, 3.0), + (7500, 3.0), + (10000, 3.0), ), compressor.compress_histogram(buckets, bps), ) diff --git a/tensorboard/plugins/graph/graph_util.py b/tensorboard/plugins/graph/graph_util.py index 879554b4d9..a3e70a16ca 100644 --- a/tensorboard/plugins/graph/graph_util.py +++ b/tensorboard/plugins/graph/graph_util.py @@ -53,7 +53,8 @@ def _add_with_prepended_names(prefix, graph_to_add, destination_graph): if new_node.op == "PartitionedCall" and new_node.attr["f"]: new_node.attr["f"].func.name = _prefixed_func_name( - prefix, new_node.attr["f"].func.name, + prefix, + new_node.attr["f"].func.name, ) for func in graph_to_add.library.function: @@ -67,10 +68,12 @@ def _add_with_prepended_names(prefix, graph_to_add, destination_graph): new_gradient = destination_graph.library.gradient.add() new_gradient.CopyFrom(gradient) new_gradient.function_name = _prefixed_func_name( - prefix, new_gradient.function_name, + prefix, + new_gradient.function_name, ) new_gradient.gradient_func = _prefixed_func_name( - prefix, new_gradient.gradient_func, + prefix, + new_gradient.gradient_func, ) @@ -118,7 +121,9 @@ def merge_graph_defs(graph_defs): raise ValueError("Cannot combine GraphDefs of different versions.") _add_with_prepended_names( - "graph_%d" % (index + 1), graph_def, dst_graph_def, + "graph_%d" % (index + 1), + graph_def, + dst_graph_def, ) return dst_graph_def diff --git a/tensorboard/plugins/graph/graph_util_test.py b/tensorboard/plugins/graph/graph_util_test.py index 281a993745..0d0f49ab0d 100644 --- a/tensorboard/plugins/graph/graph_util_test.py +++ b/tensorboard/plugins/graph/graph_util_test.py @@ -545,7 +545,8 @@ def test_merge_graph_defs_single_graph_def_no_prefix(self): ) self.assertProtoEquals( - graph_def_a, graph_util.merge_graph_defs([graph_def_a]), + graph_def_a, + graph_util.merge_graph_defs([graph_def_a]), ) diff --git a/tensorboard/plugins/graph/graphs_plugin.py b/tensorboard/plugins/graph/graphs_plugin.py index 9c9cb97834..710b9aff1b 100644 --- a/tensorboard/plugins/graph/graphs_plugin.py +++ b/tensorboard/plugins/graph/graphs_plugin.py @@ -119,7 +119,9 @@ def add_row_item(run, tag=None): if self._data_provider: mapping = self._data_provider.list_blob_sequences( - ctx, experiment_id=experiment, plugin_name=metadata.PLUGIN_NAME, + ctx, + experiment_id=experiment, + plugin_name=metadata.PLUGIN_NAME, ) for (run_name, tag_to_time_series) in six.iteritems(mapping): for tag in tag_to_time_series: diff --git a/tensorboard/plugins/graph/graphs_plugin_test.py b/tensorboard/plugins/graph/graphs_plugin_test.py index 26b5fff930..cd552810bb 100644 --- a/tensorboard/plugins/graph/graphs_plugin_test.py +++ b/tensorboard/plugins/graph/graphs_plugin_test.py @@ -191,7 +191,7 @@ def _get_graph(self, plugin, *args, **kwargs): context.RequestContext(), _RUN_WITH_GRAPH_WITH_METADATA[0], *args, - **kwargs + **kwargs, ) self.assertEqual(mime_type, "text/x-protobuf") return text_format.Parse(graph_pbtxt, tf.compat.v1.GraphDef()) @@ -250,7 +250,10 @@ def test_info(self, plugin): @with_runs([_RUN_WITH_GRAPH_WITH_METADATA]) def test_graph_simple(self, plugin): graph = self._get_graph( - plugin, tag=None, is_conceptual=False, experiment="eid", + plugin, + tag=None, + is_conceptual=False, + experiment="eid", ) node_names = set(node.name for node in graph.node) self.assertEqual( diff --git a/tensorboard/plugins/graph/keras_util_test.py b/tensorboard/plugins/graph/keras_util_test.py index fa20bfbf5b..925b800b2e 100644 --- a/tensorboard/plugins/graph/keras_util_test.py +++ b/tensorboard/plugins/graph/keras_util_test.py @@ -458,7 +458,10 @@ def DISABLED_test_keras_model_to_graph_def_nested_sequential_model(self): ) sub_model = tf.keras.models.Sequential( - [sub_sub_model, tf.keras.layers.Activation("relu", name="my_relu"),] + [ + sub_sub_model, + tf.keras.layers.Activation("relu", name="my_relu"), + ] ) model = tf.keras.models.Sequential( @@ -773,7 +776,9 @@ def test_keras_model_to_graph_def_functional_model_as_layer(self): tf.keras.layers.concatenate(sub_model([inputs2, inputs1])) ) model = tf.keras.models.Model( - inputs=[inputs2, inputs1], outputs=main_outputs, name="model_1", + inputs=[inputs2, inputs1], + outputs=main_outputs, + name="model_1", ) self.assertGraphDefToModel(expected_proto, model) diff --git a/tensorboard/plugins/histogram/histograms_plugin.py b/tensorboard/plugins/histogram/histograms_plugin.py index 996739f5b2..0c5952b3db 100644 --- a/tensorboard/plugins/histogram/histograms_plugin.py +++ b/tensorboard/plugins/histogram/histograms_plugin.py @@ -77,7 +77,9 @@ def index_impl(self, ctx, experiment): """Return {runName: {tagName: {displayName: ..., description: ...}}}.""" mapping = self._data_provider.list_tensors( - ctx, experiment_id=experiment, plugin_name=metadata.PLUGIN_NAME, + ctx, + experiment_id=experiment, + plugin_name=metadata.PLUGIN_NAME, ) result = {run: {} for run in mapping} for (run, tag_to_content) in six.iteritems(mapping): diff --git a/tensorboard/plugins/hparams/backend_context_test.py b/tensorboard/plugins/hparams/backend_context_test.py index 63f03599f6..4b1c00c54a 100644 --- a/tensorboard/plugins/hparams/backend_context_test.py +++ b/tensorboard/plugins/hparams/backend_context_test.py @@ -65,8 +65,10 @@ def setUp(self): self._mock_multiplexer.SummaryMetadata.side_effect = ( self._mock_summary_metadata ) - self._mock_tb_context.data_provider = data_provider.MultiplexerDataProvider( - self._mock_multiplexer, "/path/to/logs" + self._mock_tb_context.data_provider = ( + data_provider.MultiplexerDataProvider( + self._mock_multiplexer, "/path/to/logs" + ) ) self.session_1_start_info_ = "" self.session_2_start_info_ = "" @@ -93,14 +95,32 @@ def _mock_all_summary_metadata(self): } scalars_content = { "exp/session_1": {"loss": b"", "accuracy": b""}, - "exp/session_1/eval": {"loss": b"",}, - "exp/session_1/train": {"loss": b"",}, - "exp/session_2": {"loss": b"", "accuracy": b"",}, - "exp/session_2/eval": {"loss": b"",}, - "exp/session_2/train": {"loss": b"",}, - "exp/session_3": {"loss": b"", "accuracy": b"",}, - "exp/session_3/eval": {"loss": b"",}, - "exp/session_3xyz/": {"loss2": b"",}, + "exp/session_1/eval": { + "loss": b"", + }, + "exp/session_1/train": { + "loss": b"", + }, + "exp/session_2": { + "loss": b"", + "accuracy": b"", + }, + "exp/session_2/eval": { + "loss": b"", + }, + "exp/session_2/train": { + "loss": b"", + }, + "exp/session_3": { + "loss": b"", + "accuracy": b"", + }, + "exp/session_3/eval": { + "loss": b"", + }, + "exp/session_3xyz/": { + "loss2": b"", + }, } for (run, tag_to_content) in hparams_content.items(): result.setdefault(run, {}) @@ -340,7 +360,9 @@ def test_experiment_without_experiment_tag_many_distinct_values(self): ) request_ctx = context.RequestContext() actual_exp = ctxt.experiment_from_metadata( - request_ctx, "123", ctxt.hparams_metadata(request_ctx, "123"), + request_ctx, + "123", + ctxt.hparams_metadata(request_ctx, "123"), ) _canonicalize_experiment(actual_exp) self.assertProtoEquals(expected_exp, actual_exp) diff --git a/tensorboard/plugins/hparams/hparams_demo.py b/tensorboard/plugins/hparams/hparams_demo.py index 1b8b166698..43f4acd45c 100644 --- a/tensorboard/plugins/hparams/hparams_demo.py +++ b/tensorboard/plugins/hparams/hparams_demo.py @@ -60,7 +60,9 @@ "this flag.", ) flags.DEFINE_integer( - "num_epochs", 5, "Number of epochs per trial.", + "num_epochs", + 5, + "Number of epochs per trial.", ) @@ -85,13 +87,25 @@ METRICS = [ hp.Metric( - "epoch_accuracy", group="validation", display_name="accuracy (val.)", + "epoch_accuracy", + group="validation", + display_name="accuracy (val.)", ), - hp.Metric("epoch_loss", group="validation", display_name="loss (val.)",), hp.Metric( - "batch_accuracy", group="train", display_name="accuracy (train)", + "epoch_loss", + group="validation", + display_name="loss (val.)", + ), + hp.Metric( + "batch_accuracy", + group="train", + display_name="accuracy (train)", + ), + hp.Metric( + "batch_loss", + group="train", + display_name="loss (train)", ), - hp.Metric("batch_loss", group="train", display_name="loss (train)",), ] diff --git a/tensorboard/plugins/hparams/hparams_minimal_demo.py b/tensorboard/plugins/hparams/hparams_minimal_demo.py index 6fc8e640ba..652058850c 100644 --- a/tensorboard/plugins/hparams/hparams_minimal_demo.py +++ b/tensorboard/plugins/hparams/hparams_minimal_demo.py @@ -270,9 +270,9 @@ def run_all(logdir, verbose=False): for ambient_temperature in TEMPERATURE_LIST: for material in HEAT_COEFFICIENTS: hparams = { - u"initial_temperature": initial_temperature, - u"ambient_temperature": ambient_temperature, - u"material": material, + "initial_temperature": initial_temperature, + "ambient_temperature": ambient_temperature, + "material": material, } hparam_str = str(hparams) group_name = fingerprint(hparam_str) diff --git a/tensorboard/plugins/hparams/keras_test.py b/tensorboard/plugins/hparams/keras_test.py index 358dc24031..5ace4dc473 100644 --- a/tensorboard/plugins/hparams/keras_test.py +++ b/tensorboard/plugins/hparams/keras_test.py @@ -85,7 +85,8 @@ def mock_time(): self.assertEqual(len(event.summary.value), 1, event.summary.value) value = event.summary.value[0] self.assertEqual( - value.metadata.plugin_data.plugin_name, metadata.PLUGIN_NAME, + value.metadata.plugin_data.plugin_name, + metadata.PLUGIN_NAME, ) plugin_data.append(value.metadata.plugin_data.content) @@ -140,7 +141,8 @@ def mock_time(): def test_explicit_writer(self): writer = tf.compat.v2.summary.create_file_writer( - self.logdir, filename_suffix=".magic", + self.logdir, + filename_suffix=".magic", ) self._initialize_model(writer=writer) self.model.fit(x=[(1,)], y=[(2,)], callbacks=[self.callback]) diff --git a/tensorboard/plugins/hparams/list_session_groups_test.py b/tensorboard/plugins/hparams/list_session_groups_test.py index a13a1c9850..a68881b252 100644 --- a/tensorboard/plugins/hparams/list_session_groups_test.py +++ b/tensorboard/plugins/hparams/list_session_groups_test.py @@ -74,8 +74,10 @@ def setUp(self): self._mock_summary_metadata ) self._mock_multiplexer.Tensors.side_effect = self._mock_tensors - self._mock_tb_context.data_provider = data_provider.MultiplexerDataProvider( - self._mock_multiplexer, "/path/to/logs" + self._mock_tb_context.data_provider = ( + data_provider.MultiplexerDataProvider( + self._mock_multiplexer, "/path/to/logs" + ) ) def _mock_all_summary_metadata(self): @@ -272,7 +274,9 @@ def _mock_tensors(self, run, tag): ) ] result_dict = { - "": {metadata.EXPERIMENT_TAG: hparams_time_series[:],}, + "": { + metadata.EXPERIMENT_TAG: hparams_time_series[:], + }, "session_1": { metadata.SESSION_START_INFO_TAG: hparams_time_series[:], metadata.SESSION_END_INFO_TAG: hparams_time_series[:], diff --git a/tensorboard/plugins/hparams/summary.py b/tensorboard/plugins/hparams/summary.py index 489c2fbad5..2789e35ef9 100644 --- a/tensorboard/plugins/hparams/summary.py +++ b/tensorboard/plugins/hparams/summary.py @@ -194,7 +194,9 @@ def _summary(tag, hparams_plugin_data): raw_metadata = tb_metadata.SerializeToString() tf_metadata = tf.compat.v1.SummaryMetadata.FromString(raw_metadata) summary.value.add( - tag=tag, metadata=tf_metadata, tensor=_TF_NULL_TENSOR, + tag=tag, + metadata=tf_metadata, + tensor=_TF_NULL_TENSOR, ) return summary diff --git a/tensorboard/plugins/hparams/summary_v2.py b/tensorboard/plugins/hparams/summary_v2.py index ce2ee37c88..39a9406678 100644 --- a/tensorboard/plugins/hparams/summary_v2.py +++ b/tensorboard/plugins/hparams/summary_v2.py @@ -58,7 +58,9 @@ def hparams(hparams, trial_id=None, start_time_secs=None): was written because no default summary writer was available. """ pb = hparams_pb( - hparams=hparams, trial_id=trial_id, start_time_secs=start_time_secs, + hparams=hparams, + trial_id=trial_id, + start_time_secs=start_time_secs, ) return _write_summary("hparams", pb) @@ -87,7 +89,8 @@ def hparams_pb(hparams, trial_id=None, start_time_secs=None): group_name = _derive_session_group_name(trial_id, hparams) session_start_info = plugin_data_pb2.SessionStartInfo( - group_name=group_name, start_time_secs=start_time_secs, + group_name=group_name, + start_time_secs=start_time_secs, ) for hp_name in sorted(hparams): hp_value = hparams[hp_name] @@ -131,7 +134,9 @@ def hparams_config(hparams, metrics, time_created_secs=None): was written because no default summary writer was available. """ pb = hparams_config_pb( - hparams=hparams, metrics=metrics, time_created_secs=time_created_secs, + hparams=hparams, + metrics=metrics, + time_created_secs=time_created_secs, ) return _write_summary("hparams_config", pb) @@ -588,7 +593,10 @@ def __init__( def as_proto(self): return api_pb2.MetricInfo( - name=api_pb2.MetricName(group=self._group, tag=self._tag,), + name=api_pb2.MetricName( + group=self._group, + tag=self._tag, + ), display_name=self._display_name, description=self._description, dataset_type=self._dataset_type, diff --git a/tensorboard/plugins/hparams/summary_v2_test.py b/tensorboard/plugins/hparams/summary_v2_test.py index 86d4dbf40d..a5b0fe0db8 100644 --- a/tensorboard/plugins/hparams/summary_v2_test.py +++ b/tensorboard/plugins/hparams/summary_v2_test.py @@ -102,7 +102,8 @@ def _check_summary(self, summary_pb, check_group_name=False): self.assertEqual(len(values), 1, values) actual_value = values[0] self.assertEqual( - actual_value.metadata.plugin_data.plugin_name, metadata.PLUGIN_NAME, + actual_value.metadata.plugin_data.plugin_name, + metadata.PLUGIN_NAME, ) self.assertEqual( tensor_pb2.TensorProto.FromString( @@ -408,7 +409,8 @@ def _check_summary(self, summary_pb): self.assertEqual(len(values), 1, values) actual_value = values[0] self.assertEqual( - actual_value.metadata.plugin_data.plugin_name, metadata.PLUGIN_NAME, + actual_value.metadata.plugin_data.plugin_name, + metadata.PLUGIN_NAME, ) plugin_content = actual_value.metadata.plugin_data.content self.assertEqual( diff --git a/tensorboard/plugins/image/images_demo.py b/tensorboard/plugins/image/images_demo.py index 8f32f9cd03..6d93f394a8 100644 --- a/tensorboard/plugins/image/images_demo.py +++ b/tensorboard/plugins/image/images_demo.py @@ -264,10 +264,10 @@ def run_sobel(logdir, verbose=False): tf.stack([output_image]), display_name="Sobel edge detection", description=( - u"Demonstration of [Sobel edge detection]. The step " + "Demonstration of [Sobel edge detection]. The step " "parameter adjusts the radius of the kernel. " "The kernel can be of arbitrary size, and considers " - u"nearby pixels with \u2113\u2082-linear falloff.\n\n" + "nearby pixels with \u2113\u2082-linear falloff.\n\n" # (that says ``$\ell_2$-linear falloff'') "Edge detection is done on a per-channel basis, so " "you can observe which edges are “mostly red " diff --git a/tensorboard/plugins/image/images_plugin.py b/tensorboard/plugins/image/images_plugin.py index 8fef2fb43c..8866be5aa8 100644 --- a/tensorboard/plugins/image/images_plugin.py +++ b/tensorboard/plugins/image/images_plugin.py @@ -86,7 +86,9 @@ def frontend_metadata(self): def _index_impl(self, ctx, experiment): mapping = self._data_provider.list_blob_sequences( - ctx, experiment_id=experiment, plugin_name=metadata.PLUGIN_NAME, + ctx, + experiment_id=experiment, + plugin_name=metadata.PLUGIN_NAME, ) result = {run: {} for run in mapping} for (run, tag_to_content) in six.iteritems(mapping): @@ -201,7 +203,12 @@ def _query_for_individual_image(self, run, tag, sample, index): in the given run with the given tag. """ query_string = urllib.parse.urlencode( - {"run": run, "tag": tag, "sample": sample, "index": index,} + { + "run": run, + "tag": tag, + "sample": sample, + "index": index, + } ) return query_string diff --git a/tensorboard/plugins/image/images_plugin_test.py b/tensorboard/plugins/image/images_plugin_test.py index c36e95649f..db881a7677 100644 --- a/tensorboard/plugins/image/images_plugin_test.py +++ b/tensorboard/plugins/image/images_plugin_test.py @@ -113,7 +113,10 @@ def _create_data(self): # Start a server with the plugin. multiplexer = event_multiplexer.EventMultiplexer( - {"foo": foo_directory, "bar": bar_directory,} + { + "foo": foo_directory, + "bar": bar_directory, + } ) multiplexer.Reload() return (self.log_dir, multiplexer) diff --git a/tensorboard/plugins/mesh/mesh_plugin_test.py b/tensorboard/plugins/mesh/mesh_plugin_test.py index 3065557659..b3df05ce7d 100644 --- a/tensorboard/plugins/mesh/mesh_plugin_test.py +++ b/tensorboard/plugins/mesh/mesh_plugin_test.py @@ -147,7 +147,9 @@ def setUp(self): # Start a server that will receive requests. self.multiplexer = event_multiplexer.EventMultiplexer( - {"bar": bar_directory,} + { + "bar": bar_directory, + } ) self.context = base_plugin.TBContext( logdir=self.log_dir, multiplexer=self.multiplexer diff --git a/tensorboard/plugins/metrics/metrics_plugin_test.py b/tensorboard/plugins/metrics/metrics_plugin_test.py index ddaf923077..9733f08571 100644 --- a/tensorboard/plugins/metrics/metrics_plugin_test.py +++ b/tensorboard/plugins/metrics/metrics_plugin_test.py @@ -180,7 +180,10 @@ def test_tags_empty(self): self.assertEqual(expected_tags, response["scalars"]) self.assertEqual(expected_tags, response["histograms"]) self.assertEqual( - {"tagDescriptions": {}, "tagRunSampledInfo": {},}, + { + "tagDescriptions": {}, + "tagRunSampledInfo": {}, + }, response["images"], ) @@ -695,7 +698,10 @@ def test_image_bad_request(self): "run": "run1", }, {"plugin": "images", "tag": "images/tagA", "run": "run1"}, - {"plugin": "images", "tag": "images/tagA",}, + { + "plugin": "images", + "tag": "images/tagA", + }, ] response = self._plugin._time_series_impl( context.RequestContext(), "expid", requests diff --git a/tensorboard/plugins/npmi/csv_to_plugin_data_demo.py b/tensorboard/plugins/npmi/csv_to_plugin_data_demo.py index b5439b0876..d5d147ec42 100644 --- a/tensorboard/plugins/npmi/csv_to_plugin_data_demo.py +++ b/tensorboard/plugins/npmi/csv_to_plugin_data_demo.py @@ -31,7 +31,9 @@ FLAGS = flags.FLAGS flags.DEFINE_string( - "csv_path", "", "CSV file to convert to npmi plugin data.", + "csv_path", + "", + "CSV file to convert to npmi plugin data.", ) diff --git a/tensorboard/plugins/npmi/metadata.py b/tensorboard/plugins/npmi/metadata.py index 82f3fb9bff..7d5c5fd4eb 100644 --- a/tensorboard/plugins/npmi/metadata.py +++ b/tensorboard/plugins/npmi/metadata.py @@ -41,7 +41,8 @@ def create_summary_metadata(description): return summary_pb2.SummaryMetadata( summary_description=description, plugin_data=summary_pb2.SummaryMetadata.PluginData( - plugin_name=PLUGIN_NAME, content=content.SerializeToString(), + plugin_name=PLUGIN_NAME, + content=content.SerializeToString(), ), data_class=summary_pb2.DATA_CLASS_TENSOR, ) diff --git a/tensorboard/plugins/npmi/npmi_plugin.py b/tensorboard/plugins/npmi/npmi_plugin.py index 51e5bd5632..9208ae5339 100644 --- a/tensorboard/plugins/npmi/npmi_plugin.py +++ b/tensorboard/plugins/npmi/npmi_plugin.py @@ -34,7 +34,10 @@ def _error_response(request, error_message): return http_util.Respond( - request, {"error": error_message}, "application/json", code=400, + request, + {"error": error_message}, + "application/json", + code=400, ) diff --git a/tensorboard/plugins/npmi/npmi_plugin_test.py b/tensorboard/plugins/npmi/npmi_plugin_test.py index db597cf6e5..b76db63473 100644 --- a/tensorboard/plugins/npmi/npmi_plugin_test.py +++ b/tensorboard/plugins/npmi/npmi_plugin_test.py @@ -137,7 +137,8 @@ def testTags(self): def testAnnotations(self): plugin = self.create_plugin() annotations = plugin.annotations_impl( - context.RequestContext(), experiment="exp", + context.RequestContext(), + experiment="exp", ) self.assertItemsEqual(["name_1", "name_2"], annotations["run_1"]) self.assertItemsEqual(["name_1", "name_2"], annotations["run_2"]) @@ -145,7 +146,8 @@ def testAnnotations(self): def testMetrics(self): plugin = self.create_plugin() metrics = plugin.metrics_impl( - context.RequestContext(), experiment="exp", + context.RequestContext(), + experiment="exp", ) self.assertItemsEqual(["A", "B"], metrics["run_1"]) self.assertItemsEqual(["A", "B"], metrics["run_2"]) diff --git a/tensorboard/plugins/npmi/npy_to_embedding_data_demo.py b/tensorboard/plugins/npmi/npy_to_embedding_data_demo.py index e5b0ddf44c..bf495c3edd 100644 --- a/tensorboard/plugins/npmi/npy_to_embedding_data_demo.py +++ b/tensorboard/plugins/npmi/npy_to_embedding_data_demo.py @@ -31,7 +31,9 @@ FLAGS = flags.FLAGS flags.DEFINE_string( - "out_path", None, "Directory to write the new log file to.", + "out_path", + None, + "Directory to write the new log file to.", ) flags.DEFINE_string( "embeddings_path", diff --git a/tensorboard/plugins/npmi/summary.py b/tensorboard/plugins/npmi/summary.py index 5190d17d60..caad1f6e8b 100644 --- a/tensorboard/plugins/npmi/summary.py +++ b/tensorboard/plugins/npmi/summary.py @@ -42,7 +42,9 @@ def npmi_metrics(tensor, step=None, description=None): `tf.summary.experimental.get_step()` is None. """ with tf.summary.experimental.summary_scope( - metadata.METRICS_TAG, "", values=[tensor, step], + metadata.METRICS_TAG, + "", + values=[tensor, step], ) as (tag, _): return tf.summary.write( tag=tag, @@ -72,7 +74,9 @@ def npmi_annotations(tensor, step=None, description=None): `tf.summary.experimental.get_step()` is None. """ with tf.summary.experimental.summary_scope( - metadata.ANNOTATIONS_TAG, "", values=[tensor, step], + metadata.ANNOTATIONS_TAG, + "", + values=[tensor, step], ) as (tag, _): return tf.summary.write( tag=tag, @@ -103,7 +107,9 @@ def npmi_values(tensor, step=None, description=None): `tf.summary.experimental.get_step()` is None. """ with tf.summary.experimental.summary_scope( - metadata.VALUES_TAG, "", values=[tensor, step], + metadata.VALUES_TAG, + "", + values=[tensor, step], ) as (tag, _): return tf.summary.write( tag=tag, @@ -134,7 +140,9 @@ def npmi_embeddings(tensor, step=None, description=None): `tf.summary.experimental.get_step()` is None. """ with tf.summary.experimental.summary_scope( - metadata.EMBEDDINGS_TAG, "", values=[tensor, step], + metadata.EMBEDDINGS_TAG, + "", + values=[tensor, step], ) as (tag, _): return tf.summary.write( tag=tag, diff --git a/tensorboard/plugins/pr_curve/pr_curves_plugin.py b/tensorboard/plugins/pr_curve/pr_curves_plugin.py index 27316c7d0e..bf435511ee 100644 --- a/tensorboard/plugins/pr_curve/pr_curves_plugin.py +++ b/tensorboard/plugins/pr_curve/pr_curves_plugin.py @@ -173,7 +173,8 @@ def is_active(self): def frontend_metadata(self): return base_plugin.FrontendMetadata( - element_name="tf-pr-curve-dashboard", tab_name="PR Curves", + element_name="tf-pr-curve-dashboard", + tab_name="PR Curves", ) def _process_datum(self, datum): diff --git a/tensorboard/plugins/profile_redirect/profile_redirect_plugin.py b/tensorboard/plugins/profile_redirect/profile_redirect_plugin.py index 0b94084f9e..161ae428b2 100644 --- a/tensorboard/plugins/profile_redirect/profile_redirect_plugin.py +++ b/tensorboard/plugins/profile_redirect/profile_redirect_plugin.py @@ -48,5 +48,6 @@ def is_active(self): def frontend_metadata(self): return base_plugin.FrontendMetadata( - element_name="tf-profile-redirect-dashboard", tab_name="Profile", + element_name="tf-profile-redirect-dashboard", + tab_name="Profile", ) diff --git a/tensorboard/plugins/projector/projector_api_test.py b/tensorboard/plugins/projector/projector_api_test.py index e05f99b915..e9d2e1c565 100644 --- a/tensorboard/plugins/projector/projector_api_test.py +++ b/tensorboard/plugins/projector/projector_api_test.py @@ -34,7 +34,8 @@ def create_dummy_config(): model_checkpoint_path="test", embeddings=[ projector.EmbeddingInfo( - tensor_name="tensor1", metadata_path="metadata1", + tensor_name="tensor1", + metadata_path="metadata1", ), ], ) diff --git a/tensorboard/plugins/projector/projector_plugin.py b/tensorboard/plugins/projector/projector_plugin.py index 923b342aeb..785533abea 100644 --- a/tensorboard/plugins/projector/projector_plugin.py +++ b/tensorboard/plugins/projector/projector_plugin.py @@ -273,7 +273,8 @@ def get_plugin_apps(self): BOOKMARKS_ROUTE: self._serve_bookmarks, SPRITE_IMAGE_ROUTE: self._serve_sprite_image, "/index.js": functools.partial( - self._serve_file, os.path.join(asset_prefix, "index.js"), + self._serve_file, + os.path.join(asset_prefix, "index.js"), ), "/projector_binary.html": functools.partial( self._serve_file, @@ -320,7 +321,8 @@ def is_active(self): def frontend_metadata(self): return base_plugin.FrontendMetadata( - es_module_path="/index.js", disable_reload=True, + es_module_path="/index.js", + disable_reload=True, ) def _determine_is_active(self): diff --git a/tensorboard/plugins/scalar/scalars_plugin.py b/tensorboard/plugins/scalar/scalars_plugin.py index 6c66d0e6d1..adb3789d2b 100644 --- a/tensorboard/plugins/scalar/scalars_plugin.py +++ b/tensorboard/plugins/scalar/scalars_plugin.py @@ -79,7 +79,9 @@ def index_impl(self, ctx, experiment=None): """Return {runName: {tagName: {displayName: ..., description: ...}}}.""" mapping = self._data_provider.list_scalars( - ctx, experiment_id=experiment, plugin_name=metadata.PLUGIN_NAME, + ctx, + experiment_id=experiment, + plugin_name=metadata.PLUGIN_NAME, ) result = {run: {} for run in mapping} for (run, tag_to_content) in six.iteritems(mapping): diff --git a/tensorboard/plugins/scalar/scalars_plugin_test.py b/tensorboard/plugins/scalar/scalars_plugin_test.py index 88bb876240..1cb6f94730 100644 --- a/tensorboard/plugins/scalar/scalars_plugin_test.py +++ b/tensorboard/plugins/scalar/scalars_plugin_test.py @@ -76,7 +76,10 @@ def load_plugin(self, run_names): multiplexer.Reload() provider = data_provider.MultiplexerDataProvider(multiplexer, logdir) - ctx = base_plugin.TBContext(logdir=logdir, data_provider=provider,) + ctx = base_plugin.TBContext( + logdir=logdir, + data_provider=provider, + ) return scalars_plugin.ScalarsPlugin(ctx) def load_server(self, run_names): @@ -92,7 +95,8 @@ def generate_run(self, logdir, run_name): data = [1 + step, 2 + step, 3 + step] if run_name == self._RUN_WITH_LEGACY_SCALARS: summ = tf.compat.v1.summary.scalar( - self._LEGACY_SCALAR_TAG, tf.reduce_mean(data), + self._LEGACY_SCALAR_TAG, + tf.reduce_mean(data), ).numpy() elif run_name == self._RUN_WITH_SCALARS: summ = summary.op( @@ -303,7 +307,10 @@ def test_scalars_multirun_bad_method(self): "/data/plugin/scalars/scalars_multirun", query_string={ "tag": "%s/scalar_summary" % self._SCALAR_TAG, - "runs": [self._RUN_WITH_SCALARS, self._RUN_WITH_SCALARS_3,], + "runs": [ + self._RUN_WITH_SCALARS, + self._RUN_WITH_SCALARS_3, + ], }, ) self.assertEqual(405, response.status_code) @@ -337,7 +344,10 @@ def test_download_url_json(self): server = werkzeug_test.Client(wsgi_app, wrappers.BaseResponse) response = server.get( "/data/plugin/scalars/scalars?run=%s&tag=%s" - % (self._RUN_WITH_SCALARS, "%s/scalar_summary" % self._SCALAR_TAG,) + % ( + self._RUN_WITH_SCALARS, + "%s/scalar_summary" % self._SCALAR_TAG, + ) ) self.assertEqual(200, response.status_code) self.assertEqual("application/json", response.headers["Content-Type"]) @@ -350,7 +360,10 @@ def test_download_url_csv(self): server = werkzeug_test.Client(wsgi_app, wrappers.BaseResponse) response = server.get( "/data/plugin/scalars/scalars?run=%s&tag=%s&format=csv" - % (self._RUN_WITH_SCALARS, "%s/scalar_summary" % self._SCALAR_TAG,) + % ( + self._RUN_WITH_SCALARS, + "%s/scalar_summary" % self._SCALAR_TAG, + ) ) self.assertEqual(200, response.status_code) self.assertEqual( diff --git a/tensorboard/plugins/text/summary_test.py b/tensorboard/plugins/text/summary_test.py index ee9d5d4eb4..a440cf487a 100644 --- a/tensorboard/plugins/text/summary_test.py +++ b/tensorboard/plugins/text/summary_test.py @@ -81,7 +81,7 @@ def test_bytes_value(self): self.assertEqual(b"A name\xe2\x80\xa6I call myself", value) def test_unicode_value(self): - pb = self.text("mi", u"A name\u2026I call myself") + pb = self.text("mi", "A name\u2026I call myself") value = tensor_util.make_ndarray(pb.value[0].tensor).item() self.assertIsInstance(value, six.binary_type) self.assertEqual(b"A name\xe2\x80\xa6I call myself", value) @@ -106,9 +106,7 @@ def test_np_array_bytes_value(self): def test_np_array_unicode_value(self): pb = self.text( "fa", - np.array( - [[u"A", u"long", u"long"], [u"way", u"to", u"run \u203C"]] - ), + np.array([["A", "long", "long"], ["way", "to", "run \u203C"]]), ) values = tensor_util.make_ndarray(pb.value[0].tensor).tolist() self.assertEqual( diff --git a/tensorboard/plugins/text/text_demo.py b/tensorboard/plugins/text/text_demo.py index 463ad8bd99..d69a1ca59f 100644 --- a/tensorboard/plugins/text/text_demo.py +++ b/tensorboard/plugins/text/text_demo.py @@ -96,7 +96,7 @@ def higher_order_tensors(step): bold_numbers = tf.strings.join(["**", tf.as_string(numbers), "**"]) bold_row = tf.expand_dims(bold_numbers, 0) bold_column = tf.expand_dims(bold_numbers, 1) - corner_cell = tf.constant(u"\u00d7".encode("utf-8")) # MULTIPLICATION SIGN + corner_cell = tf.constant("\u00d7".encode("utf-8")) # MULTIPLICATION SIGN # Now, we have to put the pieces together. Using `axis=0` stacks # vertically; using `axis=1` juxtaposes horizontally. diff --git a/tensorboard/plugins/text/text_plugin.py b/tensorboard/plugins/text/text_plugin.py index 8ac70ccf35..be8e582671 100644 --- a/tensorboard/plugins/text/text_plugin.py +++ b/tensorboard/plugins/text/text_plugin.py @@ -222,7 +222,9 @@ def frontend_metadata(self): def index_impl(self, ctx, experiment): mapping = self._data_provider.list_tensors( - ctx, experiment_id=experiment, plugin_name=metadata.PLUGIN_NAME, + ctx, + experiment_id=experiment, + plugin_name=metadata.PLUGIN_NAME, ) return { run: list(tag_to_content) diff --git a/tensorboard/plugins/text_v2/text_v2_plugin.py b/tensorboard/plugins/text_v2/text_v2_plugin.py index 0296a3e5d5..f7f060122e 100644 --- a/tensorboard/plugins/text_v2/text_v2_plugin.py +++ b/tensorboard/plugins/text_v2/text_v2_plugin.py @@ -143,7 +143,9 @@ def is_active(self): def index_impl(self, ctx, experiment): mapping = self._data_provider.list_tensors( - ctx, experiment_id=experiment, plugin_name=metadata.PLUGIN_NAME, + ctx, + experiment_id=experiment, + plugin_name=metadata.PLUGIN_NAME, ) return { run: list(tag_to_content) diff --git a/tensorboard/plugins/text_v2/text_v2_plugin_test.py b/tensorboard/plugins/text_v2/text_v2_plugin_test.py index a83dc0a33d..72146dd930 100644 --- a/tensorboard/plugins/text_v2/text_v2_plugin_test.py +++ b/tensorboard/plugins/text_v2/text_v2_plugin_test.py @@ -59,7 +59,9 @@ def create_plugin(self, generate_testdata=True, include_text=True): ) ctx = base_plugin.TBContext( - logdir=self.logdir, multiplexer=multiplexer, data_provider=provider, + logdir=self.logdir, + multiplexer=multiplexer, + data_provider=provider, ) return text_v2_plugin.TextV2Plugin(ctx) diff --git a/tensorboard/scripts/generate_testdata.py b/tensorboard/scripts/generate_testdata.py index d6f5d3f592..c7bbf75eaf 100644 --- a/tensorboard/scripts/generate_testdata.py +++ b/tensorboard/scripts/generate_testdata.py @@ -33,7 +33,9 @@ flags.DEFINE_string( - "target", None, """The directory where serialized data will be written""", + "target", + None, + """The directory where serialized data will be written""", ) flags.DEFINE_boolean( diff --git a/tensorboard/summary/summary_test.py b/tensorboard/summary/summary_test.py index 58c27e9ed5..87ba4d7cfa 100644 --- a/tensorboard/summary/summary_test.py +++ b/tensorboard/summary/summary_test.py @@ -76,7 +76,15 @@ def test_all_exports_correspond_to_plugins(self): class SummaryExportsTest(SummaryExportsBaseTest, unittest.TestCase): module = tb_summary allowed = frozenset(("v1", "v2")) - plugins = frozenset(["audio", "histogram", "image", "scalar", "text",]) + plugins = frozenset( + [ + "audio", + "histogram", + "image", + "scalar", + "text", + ] + ) def test_plugins_export_pb_functions(self): self.skipTest("V2 summary API _pb functions are not finalized yet") @@ -99,7 +107,15 @@ class SummaryExportsV1Test(SummaryExportsBaseTest, unittest.TestCase): class SummaryExportsV2Test(SummaryExportsBaseTest, unittest.TestCase): module = tb_summary_v2 - plugins = frozenset(["audio", "histogram", "image", "scalar", "text",]) + plugins = frozenset( + [ + "audio", + "histogram", + "image", + "scalar", + "text", + ] + ) def test_plugins_export_pb_functions(self): self.skipTest("V2 summary API _pb functions are not finalized yet") diff --git a/tensorboard/tools/diagnose_tensorboard.py b/tensorboard/tools/diagnose_tensorboard.py index 724b56cee5..ad749c1795 100644 --- a/tensorboard/tools/diagnose_tensorboard.py +++ b/tensorboard/tools/diagnose_tensorboard.py @@ -161,10 +161,12 @@ def general(): logging.info("os.name: %s", os.name) na = type("N/A", (object,), {"__repr__": lambda self: "N/A"}) logging.info( - "os.uname(): %r", getattr(os, "uname", na)(), + "os.uname(): %r", + getattr(os, "uname", na)(), ) logging.info( - "sys.getwindowsversion(): %r", getattr(sys, "getwindowsversion", na)(), + "sys.getwindowsversion(): %r", + getattr(sys, "getwindowsversion", na)(), ) @@ -178,28 +180,34 @@ def package_management(): @check def installed_packages(): freeze = pip(["freeze", "--all"]).decode("utf-8").splitlines() - packages = {line.split(u"==")[0]: line for line in freeze} + packages = {line.split("==")[0]: line for line in freeze} packages_set = frozenset(packages) # For each of the following families, expect exactly one package to be # installed. expect_unique = [ - frozenset([u"tensorboard", u"tb-nightly", u"tensorflow-tensorboard",]), frozenset( [ - u"tensorflow", - u"tensorflow-gpu", - u"tf-nightly", - u"tf-nightly-2.0-preview", - u"tf-nightly-gpu", - u"tf-nightly-gpu-2.0-preview", + "tensorboard", + "tb-nightly", + "tensorflow-tensorboard", ] ), frozenset( [ - u"tensorflow-estimator", - u"tensorflow-estimator-2.0-preview", - u"tf-estimator-nightly", + "tensorflow", + "tensorflow-gpu", + "tf-nightly", + "tf-nightly-2.0-preview", + "tf-nightly-gpu", + "tf-nightly-gpu-2.0-preview", + ] + ), + frozenset( + [ + "tensorflow-estimator", + "tensorflow-estimator-2.0-preview", + "tf-estimator-nightly", ] ), ] diff --git a/tensorboard/uploader/auth.py b/tensorboard/uploader/auth.py index 1a1210c7a5..f1b40e3ca5 100644 --- a/tensorboard/uploader/auth.py +++ b/tensorboard/uploader/auth.py @@ -48,7 +48,7 @@ # The client "secret" is public by design for installed apps. See # https://developers.google.com/identity/protocols/OAuth2?csw=1#installed -OAUTH_CLIENT_CONFIG = u""" +OAUTH_CLIENT_CONFIG = """ { "installed": { "client_id": "373649185512-8v619h5kft38l4456nm2dj4ubeqsrvh6.apps.googleusercontent.com", @@ -108,8 +108,10 @@ def read_credentials(self): if self._credentials_filepath is None: return None if os.path.exists(self._credentials_filepath): - return google.oauth2.credentials.Credentials.from_authorized_user_file( - self._credentials_filepath + return ( + google.oauth2.credentials.Credentials.from_authorized_user_file( + self._credentials_filepath + ) ) return None diff --git a/tensorboard/uploader/exporter.py b/tensorboard/uploader/exporter.py index 925202d8cb..66eacf15ba 100644 --- a/tensorboard/uploader/exporter.py +++ b/tensorboard/uploader/exporter.py @@ -141,7 +141,10 @@ def export(self, read_time=None): if read_time is None: read_time = time.time() experiment_metadata_mask = experiment_pb2.ExperimentMask( - create_time=True, update_time=True, name=True, description=True, + create_time=True, + update_time=True, + name=True, + description=True, ) experiments = list_experiments( self._api, fieldmask=experiment_metadata_mask, read_time=read_time @@ -230,23 +233,23 @@ def _request_json_data(self, experiment_id, read_time): response.tag_metadata.SerializeToString() ).decode("ascii") json_data = { - u"run": response.run_name, - u"tag": response.tag_name, - u"summary_metadata": metadata, + "run": response.run_name, + "tag": response.tag_name, + "summary_metadata": metadata, } filename = None if response.HasField("points"): - json_data[u"points"] = self._process_scalar_points( + json_data["points"] = self._process_scalar_points( response.points ) filename = _FILENAME_SCALARS elif response.HasField("tensors"): - json_data[u"points"] = self._process_tensor_points( + json_data["points"] = self._process_tensor_points( response.tensors, experiment_id ) filename = _FILENAME_TENSORS elif response.HasField("blob_sequences"): - json_data[u"points"] = self._process_blob_sequence_points( + json_data["points"] = self._process_blob_sequence_points( response.blob_sequences, experiment_id ) filename = _FILENAME_BLOB_SEQUENCES @@ -266,9 +269,9 @@ def _process_scalar_points(self, points): """ wall_times = [t.ToNanoseconds() / 1e9 for t in points.wall_times] return { - u"steps": list(points.steps), - u"wall_times": wall_times, - u"values": list(points.values), + "steps": list(points.steps), + "wall_times": wall_times, + "values": list(points.values), } def _process_tensor_points(self, points, experiment_id): @@ -286,9 +289,9 @@ def _process_tensor_points(self, points, experiment_id): """ wall_times = [t.ToNanoseconds() / 1e9 for t in points.wall_times] json_object = { - u"steps": list(points.steps), - u"wall_times": wall_times, - u"tensors_file_path": None, + "steps": list(points.steps), + "wall_times": wall_times, + "tensors_file_path": None, } if not json_object["steps"]: return json_object @@ -306,7 +309,7 @@ def _process_tensor_points(self, points, experiment_id): return json_object def _fix_string_types(self, ndarray): - """ Change the dtype of text arrays to String rather than Object. + """Change the dtype of text arrays to String rather than Object. np.savez ends up pickling np.object arrays, while it doesn't pickle strings. The downside is that it needs to pad the length of each string @@ -367,11 +370,11 @@ def _process_blob_sequence_points(self, blob_sequences, experiment_id): t.ToNanoseconds() / 1e9 for t in blob_sequences.wall_times ] json_object = { - u"steps": list(blob_sequences.steps), - u"wall_times": wall_times, - u"blob_file_paths": [], + "steps": list(blob_sequences.steps), + "wall_times": wall_times, + "blob_file_paths": [], } - blob_file_paths = json_object[u"blob_file_paths"] + blob_file_paths = json_object["blob_file_paths"] for blobseq in blob_sequences.values: seq_blob_file_paths = [] for entry in blobseq.entries: diff --git a/tensorboard/uploader/exporter_test.py b/tensorboard/uploader/exporter_test.py index ea43f88946..74e9fc22eb 100644 --- a/tensorboard/uploader/exporter_test.py +++ b/tensorboard/uploader/exporter_test.py @@ -532,10 +532,14 @@ def stream_experiment_data(request, **kwargs): iter( [ export_service_pb2.StreamBlobDataResponse( - data=b"4321", offset=0, final_chunk=False, + data=b"4321", + offset=0, + final_chunk=False, ), export_service_pb2.StreamBlobDataResponse( - data=b"8765", offset=4, final_chunk=True, + data=b"8765", + offset=4, + final_chunk=True, ), ] ), @@ -702,7 +706,7 @@ def stream_experiments(request, **kwargs): msg = str(cm.exception) self.assertIn("Unexpected characters", msg) - self.assertIn(repr(sorted([u".", u"/"])), msg) + self.assertIn(repr(sorted([".", "/"])), msg) self.assertIn("../authorized_keys", msg) mock_api_client.StreamExperimentData.assert_not_called() diff --git a/tensorboard/uploader/formatters.py b/tensorboard/uploader/formatters.py index 116a13ab4f..08db39438b 100644 --- a/tensorboard/uploader/formatters.py +++ b/tensorboard/uploader/formatters.py @@ -71,7 +71,11 @@ def format_experiment(self, experiment, experiment_url): ] for name, value in data: output.append( - "\t%s %s" % (name.ljust(self._NAME_COLUMN_WIDTH), value,) + "\t%s %s" + % ( + name.ljust(self._NAME_COLUMN_WIDTH), + value, + ) ) return "\n".join(output) @@ -99,5 +103,6 @@ def format_experiment(self, experiment, experiment_url): ("binary_object_bytes", experiment.total_blob_bytes), ] return json.dumps( - collections.OrderedDict(data), indent=self._JSON_INDENT, + collections.OrderedDict(data), + indent=self._JSON_INDENT, ) diff --git a/tensorboard/uploader/upload_tracker_test.py b/tensorboard/uploader/upload_tracker_test.py index ded0042be0..af0a817532 100644 --- a/tensorboard/uploader/upload_tracker_test.py +++ b/tensorboard/uploader/upload_tracker_test.py @@ -125,7 +125,8 @@ def testUploadedSummaryWithTensorsAndBlobs(self): "1234 scalars, 40 tensors (200 B), 1 binary objects (1000 B)", ) self.assertEqual( - skipped_summary, "10 tensors (1.8 kB), 1 binary objects (2.0 kB)", + skipped_summary, + "10 tensors (1.8 kB), 1 binary objects (2.0 kB)", ) self.assertEqual(stats.has_new_data_since_last_summarize(), False) @@ -135,7 +136,8 @@ def testSummarizeeWithoutTensorsOrBlobs(self): self.assertEqual(stats.has_new_data_since_last_summarize(), True) (uploaded_summary, skipped_summary) = stats.summarize() self.assertEqual( - uploaded_summary, "1234 scalars, 0 tensors, 0 binary objects", + uploaded_summary, + "1234 scalars, 0 tensors, 0 binary objects", ) self.assertIsNone(skipped_summary) self.assertEqual(stats.has_new_data_since_last_summarize(), False) @@ -245,7 +247,8 @@ def testSendTracker(self): self.assertEqual(self.mock_write.call_count, 2) self.assertEqual(self.mock_flush.call_count, 2) self.assertIn( - "Data upload starting...", self.mock_write.call_args[0][0], + "Data upload starting...", + self.mock_write.call_args[0][0], ) self.assertEqual(self.mock_write.call_count, 3) self.assertEqual(self.mock_flush.call_count, 3) @@ -269,7 +272,8 @@ def testScalarsTracker(self): self.assertEqual(self.mock_write.call_count, 1) self.assertEqual(self.mock_flush.call_count, 1) self.assertIn( - "Uploading 123 scalars...", self.mock_write.call_args[0][0], + "Uploading 123 scalars...", + self.mock_write.call_args[0][0], ) self.assertEqual(self.mock_write.call_count, 1) self.assertEqual(self.mock_flush.call_count, 1) @@ -352,7 +356,8 @@ def testBlobTrackerNotUploaded(self): self.assertEqual(self.mock_write.call_count, 2) self.assertEqual(self.mock_flush.call_count, 2) self.assertIn( - "Started scanning", self.mock_write.call_args_list[0][0][0], + "Started scanning", + self.mock_write.call_args_list[0][0][0], ) with tracker.blob_tracker( blob_bytes=2048 * 1024 * 1024 diff --git a/tensorboard/uploader/uploader.py b/tensorboard/uploader/uploader.py index 08fdabdddf..30a40e8c8e 100644 --- a/tensorboard/uploader/uploader.py +++ b/tensorboard/uploader/uploader.py @@ -484,7 +484,12 @@ class _ScalarBatchedRequestSender(object): """ def __init__( - self, experiment_id, api, rpc_rate_limiter, max_request_size, tracker, + self, + experiment_id, + api, + rpc_rate_limiter, + max_request_size, + tracker, ): if experiment_id is None: raise ValueError("experiment_id cannot be None") @@ -827,18 +832,18 @@ def _validate_tensor_value(self, tensor_proto, tag, step, wall_time): class _ByteBudgetManager(object): """Helper class for managing the request byte budget for certain RPCs. - This should be used for RPCs that organize data by Runs, Tags, and Points, - specifically WriteScalar and WriteTensor. + This should be used for RPCs that organize data by Runs, Tags, and Points, + specifically WriteScalar and WriteTensor. - Any call to add_run(), add_tag(), or add_point() may raise an - _OutOfSpaceError, which is non-fatal. It signals to the caller that they - should flush the current request and begin a new one. + Any call to add_run(), add_tag(), or add_point() may raise an + _OutOfSpaceError, which is non-fatal. It signals to the caller that they + should flush the current request and begin a new one. - For more information on the protocol buffer encoding and how byte cost - can be calculated, visit: + For more information on the protocol buffer encoding and how byte cost + can be calculated, visit: - https://developers.google.com/protocol-buffers/docs/encoding - """ + https://developers.google.com/protocol-buffers/docs/encoding + """ def __init__(self, max_bytes): # The remaining number of bytes that we may yet add to the request. @@ -848,13 +853,13 @@ def __init__(self, max_bytes): def reset(self, base_request): """Resets the byte budget and calculates the cost of the base request. - Args: - base_request: Base request. + Args: + base_request: Base request. - Raises: - _OutOfSpaceError: If the size of the request exceeds the entire - request byte budget. - """ + Raises: + _OutOfSpaceError: If the size of the request exceeds the entire + request byte budget. + """ self._byte_budget = self._max_bytes self._byte_budget -= base_request.ByteSize() if self._byte_budget < 0: @@ -863,13 +868,13 @@ def reset(self, base_request): def add_run(self, run_proto): """Integrates the cost of a run proto into the byte budget. - Args: - run_proto: The proto representing a run. + Args: + run_proto: The proto representing a run. - Raises: - _OutOfSpaceError: If adding the run would exceed the remaining request - budget. - """ + Raises: + _OutOfSpaceError: If adding the run would exceed the remaining request + budget. + """ cost = ( # The size of the run proto without any tag fields set. run_proto.ByteSize() @@ -889,13 +894,13 @@ def add_run(self, run_proto): def add_tag(self, tag_proto): """Integrates the cost of a tag proto into the byte budget. - Args: - tag_proto: The proto representing a tag. + Args: + tag_proto: The proto representing a tag. - Raises: - _OutOfSpaceError: If adding the tag would exceed the remaining request - budget. - """ + Raises: + _OutOfSpaceError: If adding the tag would exceed the remaining request + budget. + """ cost = ( # The size of the tag proto without any tag fields set. tag_proto.ByteSize() @@ -915,13 +920,13 @@ def add_tag(self, tag_proto): def add_point(self, point_proto): """Integrates the cost of a point proto into the byte budget. - Args: - point_proto: The proto representing a point. + Args: + point_proto: The proto representing a point. - Raises: - _OutOfSpaceError: If adding the point would exceed the remaining request - budget. - """ + Raises: + _OutOfSpaceError: If adding the point would exceed the remaining request + budget. + """ submessage_cost = point_proto.ByteSize() cost = ( # The size of the point proto. @@ -980,7 +985,11 @@ def _new_request(self): self._metadata = None def add_event( - self, run_name, event, value, metadata, + self, + run_name, + event, + value, + metadata, ): """Attempts to add the given event to the current request. @@ -1014,8 +1023,7 @@ def add_event( self._new_request() def flush(self): - """Sends the current blob sequence fully, and clears it to make way for the next. - """ + """Sends the current blob sequence fully, and clears it to make way for the next.""" if self._value: blob_sequence_id = self._get_or_create_blob_sequence() logger.info( @@ -1212,7 +1220,8 @@ def _filtered_graph_bytes(graph_bytes): # a combination of mysterious circumstances. except (message.DecodeError, RuntimeWarning): logger.warning( - "Could not parse GraphDef of size %d. Skipping.", len(graph_bytes), + "Could not parse GraphDef of size %d. Skipping.", + len(graph_bytes), ) return None # Use the default filter parameters: diff --git a/tensorboard/uploader/uploader_subcommand.py b/tensorboard/uploader/uploader_subcommand.py index ca5da7958d..02179f6bfc 100644 --- a/tensorboard/uploader/uploader_subcommand.py +++ b/tensorboard/uploader/uploader_subcommand.py @@ -43,7 +43,7 @@ from tensorboard.plugins import base_plugin -_MESSAGE_TOS = u"""\ +_MESSAGE_TOS = """\ Your use of this service is subject to Google's Terms of Service and Privacy Policy , and TensorBoard.dev's Terms of Service @@ -193,7 +193,7 @@ class _DeleteExperimentIntent(_Intent): """The user intends to delete an experiment.""" _MESSAGE_TEMPLATE = textwrap.dedent( - u"""\ + """\ This will delete the experiment on https://tensorboard.dev with the following experiment ID: @@ -241,7 +241,7 @@ class _UpdateMetadataIntent(_Intent): """The user intends to update the metadata for an experiment.""" _MESSAGE_TEMPLATE = textwrap.dedent( - u"""\ + """\ This will modify the metadata associated with the experiment on https://tensorboard.dev with the following experiment ID: @@ -304,7 +304,7 @@ class _ListIntent(_Intent): """The user intends to list all their experiments.""" _MESSAGE = textwrap.dedent( - u"""\ + """\ This will list all experiments that you've uploaded to https://tensorboard.dev. TensorBoard.dev experiments are visible to everyone. Do not upload sensitive data. @@ -380,7 +380,7 @@ class UploadIntent(_Intent): """The user intends to upload an experiment from the given logdir.""" _MESSAGE_TEMPLATE = textwrap.dedent( - u"""\ + """\ This will upload your TensorBoard logs to https://tensorboard.dev/ from the following directory: @@ -492,7 +492,7 @@ class _ExportIntent(_Intent): """The user intends to download all their experiment data.""" _MESSAGE_TEMPLATE = textwrap.dedent( - u"""\ + """\ This will download all your experiment data from https://tensorboard.dev and save it to the following directory: diff --git a/tensorboard/uploader/uploader_subcommand_test.py b/tensorboard/uploader/uploader_subcommand_test.py index 075fff69f9..e85d2ccbdd 100644 --- a/tensorboard/uploader/uploader_subcommand_test.py +++ b/tensorboard/uploader/uploader_subcommand_test.py @@ -67,7 +67,9 @@ def testUploadIntentOneShotEmptyDirectoryFails(self): mock_uploader = mock.MagicMock() mock_stdout_write = mock.MagicMock() with mock.patch.object( - uploader_lib, "TensorBoardUploader", return_value=mock_uploader, + uploader_lib, + "TensorBoardUploader", + return_value=mock_uploader, ), mock.patch.object( sys.stdout, "write", mock_stdout_write ), mock.patch.object( @@ -124,7 +126,8 @@ def testUploadIntentOneShot(self): # Expect that ".*Done scanning logdir.*" is among the things printed. stdout_writes = [x[0][0] for x in mock_stdout_write.call_args_list] self.assertRegex( - ",".join(stdout_writes), ".*experiment created.*", + ",".join(stdout_writes), + ".*experiment created.*", ) # Expect that the last thing written is the string "Done" and the # experiment_id. @@ -138,14 +141,18 @@ def testUploadIntentWithExperimentUrlCallback(self): server_info.url_format.id_placeholder = "{}" stub = dry_run_stubs.DryRunTensorBoardWriterStub() - stub.CreateExperiment = lambda req, **__: write_service_pb2.CreateExperimentResponse( - experiment_id="test_experiment_id", url="this URL is ignored" + stub.CreateExperiment = ( + lambda req, **__: write_service_pb2.CreateExperimentResponse( + experiment_id="test_experiment_id", url="this URL is ignored" + ) ) expected_url = "https://tensorboard.dev/x/test_experiment_id" with mock.patch.object( - dry_run_stubs, "DryRunTensorBoardWriterStub", wraps=lambda: stub, + dry_run_stubs, + "DryRunTensorBoardWriterStub", + wraps=lambda: stub, ), mock.patch.object(sys.stdout, "write"): mock_channel = mock.Mock() mock_experiment_url_callback = mock.Mock() @@ -164,7 +171,9 @@ def testUploadIntentDryRunNonOneShotInterrupted(self): mock_stdout_write = mock.MagicMock() mock_uploader = mock.MagicMock() with mock.patch.object( - mock_uploader, "start_uploading", side_effect=KeyboardInterrupt(), + mock_uploader, + "start_uploading", + side_effect=KeyboardInterrupt(), ), mock.patch.object( uploader_lib, "TensorBoardUploader", return_value=mock_uploader ), mock.patch.object( @@ -184,7 +193,9 @@ def testUploadIntentNonDryRunNonOneShotInterrupted(self): mock_stdout_write = mock.MagicMock() mock_uploader = mock.MagicMock() with mock.patch.object( - mock_uploader, "start_uploading", side_effect=KeyboardInterrupt(), + mock_uploader, + "start_uploading", + side_effect=KeyboardInterrupt(), ), mock.patch.object( uploader_lib, "TensorBoardUploader", return_value=mock_uploader ), mock.patch.object( diff --git a/tensorboard/uploader/uploader_test.py b/tensorboard/uploader/uploader_test.py index f79e787290..b55eb49f89 100644 --- a/tensorboard/uploader/uploader_test.py +++ b/tensorboard/uploader/uploader_test.py @@ -173,7 +173,9 @@ def _create_uploader( def _create_request_sender( - experiment_id=None, api=None, allowed_plugins=_USE_DEFAULT, + experiment_id=None, + api=None, + allowed_plugins=_USE_DEFAULT, ): if api is _USE_DEFAULT: api = _create_mock_client() @@ -300,7 +302,8 @@ def test_create_experiment_with_all_metadata(self): (args, _) = mock_client.CreateExperiment.call_args expected_request = write_service_pb2.CreateExperimentRequest( - name=new_name, description=new_description, + name=new_name, + description=new_description, ) self.assertEqual(args[0], expected_request) @@ -543,7 +546,8 @@ def test_start_uploading_graphs(self): actual_graph_def = graph_pb2.GraphDef.FromString(data) self.assertProtoEquals(expected_graph_def, actual_graph_def) self.assertEqual( - set(r.blob_sequence_id for r in requests), {"blob%d" % i}, + set(r.blob_sequence_id for r in requests), + {"blob%d" % i}, ) self.assertEqual(0, mock_rate_limiter.tick.call_count) self.assertEqual(0, mock_tensor_rate_limiter.tick.call_count) @@ -611,7 +615,9 @@ def test_filter_graphs(self): limiter.tick.side_effect = [None, AbortUploadError] mock_client = _create_mock_client() uploader = _create_uploader( - mock_client, logdir, logdir_poll_rate_limiter=limiter, + mock_client, + logdir, + logdir_poll_rate_limiter=limiter, ) uploader.create_experiment() @@ -1073,7 +1079,8 @@ def _add_events(self, sender, run_name, events): def _add_events_and_flush(self, events): mock_client = _create_mock_client() sender = _create_scalar_request_sender( - experiment_id="123", api=mock_client, + experiment_id="123", + api=mock_client, ) self._add_events(sender, "", events) sender.flush() @@ -1355,7 +1362,9 @@ def mock_add_point(byte_budget_manager_self, point): raise uploader_lib._OutOfSpaceError() with mock.patch.object( - uploader_lib._ByteBudgetManager, "add_point", mock_add_point, + uploader_lib._ByteBudgetManager, + "add_point", + mock_add_point, ): sender = _create_scalar_request_sender("123", mock_client) self._add_events(sender, "train", _apply_compat([event_1])) @@ -1799,7 +1808,9 @@ def mock_add_point(byte_budget_manager_self, point): raise uploader_lib._OutOfSpaceError() with mock.patch.object( - uploader_lib._ByteBudgetManager, "add_point", mock_add_point, + uploader_lib._ByteBudgetManager, + "add_point", + mock_add_point, ): sender = _create_tensor_request_sender("123", mock_client) self._add_events(sender, "train", _apply_compat([event_1])) diff --git a/tensorboard/uploader/util_test.py b/tensorboard/uploader/util_test.py index caba38fda8..d3b619bf8b 100644 --- a/tensorboard/uploader/util_test.py +++ b/tensorboard/uploader/util_test.py @@ -88,7 +88,11 @@ def test_windows(self): util.get_user_config_directory(), ) with mock.patch.dict( - os.environ, {"LOCALAPPDATA": "", "APPDATA": "",} + os.environ, + { + "LOCALAPPDATA": "", + "APPDATA": "", + }, ): self.assertIsNone(util.get_user_config_directory()) diff --git a/tensorboard/util/timing.py b/tensorboard/util/timing.py index 0823efb585..8353eaa3cb 100644 --- a/tensorboard/util/timing.py +++ b/tensorboard/util/timing.py @@ -105,7 +105,11 @@ def _log_latency(name, log_level): _store.nesting_level = start_level elapsed = time.time() - started _log( - log_level, "%s LEAVE %s - %0.6fs elapsed", prefix, name, elapsed, + log_level, + "%s LEAVE %s - %0.6fs elapsed", + prefix, + name, + elapsed, )