From 3d53fd36865e69434d4b44cead095f4b5060c86f Mon Sep 17 00:00:00 2001 From: William Chargin Date: Wed, 23 Dec 2020 14:12:11 -0800 Subject: [PATCH 1/9] py3: replace `assertRaisesRegex`, `assertCountEqual` MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Summary: `six.assertRaisesRegex(self, ...) -> self.assertRaisesRegex(...)`; `six.assertCountEqual(self, ...) -> self.assertCountEqual(...)`. Generated manually (with Vim macros), since the `self` transposition means that a blanket `sed` doesn’t suffice. Tried using Comby, Codemod, and Semgrep, but none of them could handle this enormously complicated pattern (Comby has bad Python-whitespace support, Codemod is plain regex only, and Semgrep just spit out bad output). Incantation (requires `vim-fugitive` for `Ggrep`): ``` vim +'let @q = "cwself\/self\rdf," | let @w = "@q:noau w|cn\@w" | Ggrep "six\.\(assertRaisesRegex\|assertCountEqual\)" | normal @w' +q && flake8 tensorboard/ --select=F401 | cut -d : -f 1 | xargs git sed '/import six/d' && black tensorboard ``` Test Plan: Suffices that all tests pass. wchargin-branch: py3-assertraisesregex --- tensorboard/backend/application_test.py | 37 ++++++++--------- .../event_processing/data_provider_test.py | 3 +- .../event_accumulator_test.py | 2 +- .../event_processing/io_wrapper_test.py | 5 +-- .../plugin_event_accumulator_test.py | 2 +- tensorboard/backend/http_util_test.py | 2 +- .../tensorflow_stub/io/gfile_s3_test.py | 12 ++---- .../compat/tensorflow_stub/io/gfile_test.py | 12 ++---- tensorboard/data/provider_test.py | 17 ++++---- tensorboard/lazy_test.py | 7 ++-- tensorboard/manager_test.py | 30 ++++++-------- tensorboard/plugins/audio/summary_test.py | 9 ++-- tensorboard/plugins/core/core_plugin_test.py | 10 ++--- tensorboard/plugins/hparams/keras_test.py | 24 +++++------ .../plugins/hparams/summary_v2_test.py | 41 +++++++++---------- tensorboard/plugins/image/summary_test.py | 7 ++-- tensorboard/plugins/mesh/metadata_test.py | 9 ++-- .../plugins/pr_curve/pr_curves_plugin_test.py | 9 +--- .../plugins/projector/projector_api_test.py | 5 +-- tensorboard/plugins/scalar/summary_test.py | 7 ++-- tensorboard/plugins/text/summary_test.py | 6 +-- tensorboard/program_test.py | 6 +-- tensorboard/util/encoder_test.py | 5 +-- 23 files changed, 114 insertions(+), 153 deletions(-) diff --git a/tensorboard/backend/application_test.py b/tensorboard/backend/application_test.py index 2e04ab2c2c..c4057f3783 100644 --- a/tensorboard/backend/application_test.py +++ b/tensorboard/backend/application_test.py @@ -18,7 +18,6 @@ import json from unittest import mock -import six from werkzeug import test as werkzeug_test from werkzeug import wrappers @@ -446,8 +445,8 @@ def testPluginEntryBadModulePath(self): ] app = application.TensorBoardWSGI(plugins) server = werkzeug_test.Client(app, wrappers.BaseResponse) - with six.assertRaisesRegex( - self, ValueError, "Expected es_module_path to be non-absolute path" + with self.assertRaisesRegex( + ValueError, "Expected es_module_path to be non-absolute path" ): server.get("/data/plugin_entry.html?name=mallory") @@ -461,8 +460,8 @@ def testNgComponentPluginWithIncompatibleSetElementName(self): ] app = application.TensorBoardWSGI(plugins) server = werkzeug_test.Client(app, wrappers.BaseResponse) - with six.assertRaisesRegex( - self, ValueError, "quux.*declared.*both Angular.*legacy" + with self.assertRaisesRegex( + ValueError, "quux.*declared.*both Angular.*legacy" ): server.get("/data/plugins_listing") @@ -476,8 +475,8 @@ def testNgComponentPluginWithIncompatiblEsModulePath(self): ] app = application.TensorBoardWSGI(plugins) server = werkzeug_test.Client(app, wrappers.BaseResponse) - with six.assertRaisesRegex( - self, ValueError, "quux.*declared.*both Angular.*iframed" + with self.assertRaisesRegex( + ValueError, "quux.*declared.*both Angular.*iframed" ): server.get("/data/plugins_listing") @@ -576,33 +575,33 @@ def testComprehensiveName(self): ) def testNameIsNone(self): - with six.assertRaisesRegex(self, ValueError, r"no plugin_name"): + with self.assertRaisesRegex(ValueError, r"no plugin_name"): application.TensorBoardWSGI(plugins=[FakePlugin(plugin_name=None)]) def testEmptyName(self): - with six.assertRaisesRegex(self, ValueError, r"invalid name"): + with self.assertRaisesRegex(ValueError, r"invalid name"): application.TensorBoardWSGI(plugins=[FakePlugin(plugin_name="")]) def testNameWithSlashes(self): - with six.assertRaisesRegex(self, ValueError, r"invalid name"): + with self.assertRaisesRegex(ValueError, r"invalid name"): application.TensorBoardWSGI( plugins=[FakePlugin(plugin_name="scalars/data")] ) def testNameWithPeriods(self): - with six.assertRaisesRegex(self, ValueError, r"invalid name"): + with self.assertRaisesRegex(ValueError, r"invalid name"): application.TensorBoardWSGI( plugins=[FakePlugin(plugin_name="scalars.data")] ) def testNameWithSpaces(self): - with six.assertRaisesRegex(self, ValueError, r"invalid name"): + with self.assertRaisesRegex(ValueError, r"invalid name"): application.TensorBoardWSGI( plugins=[FakePlugin(plugin_name="my favorite plugin")] ) def testDuplicateName(self): - with six.assertRaisesRegex(self, ValueError, r"Duplicate"): + with self.assertRaisesRegex(ValueError, r"Duplicate"): application.TensorBoardWSGI( plugins=[ FakePlugin(plugin_name="scalars"), @@ -625,23 +624,23 @@ def testWildcardRoute(self): application.TensorBoardWSGI([self._make_plugin("/foo/*")]) def testNonPathComponentWildcardRoute(self): - with six.assertRaisesRegex(self, ValueError, r"invalid route"): + with self.assertRaisesRegex(ValueError, r"invalid route"): application.TensorBoardWSGI([self._make_plugin("/foo*")]) def testMultiWildcardRoute(self): - with six.assertRaisesRegex(self, ValueError, r"invalid route"): + with self.assertRaisesRegex(ValueError, r"invalid route"): application.TensorBoardWSGI([self._make_plugin("/foo/*/bar/*")]) def testInternalWildcardRoute(self): - with six.assertRaisesRegex(self, ValueError, r"invalid route"): + with self.assertRaisesRegex(ValueError, r"invalid route"): application.TensorBoardWSGI([self._make_plugin("/foo/*/bar")]) def testEmptyRoute(self): - with six.assertRaisesRegex(self, ValueError, r"invalid route"): + with self.assertRaisesRegex(ValueError, r"invalid route"): application.TensorBoardWSGI([self._make_plugin("")]) def testSlashlessRoute(self): - with six.assertRaisesRegex(self, ValueError, r"invalid route"): + with self.assertRaisesRegex(ValueError, r"invalid route"): application.TensorBoardWSGI([self._make_plugin("runaway")]) @@ -660,7 +659,7 @@ def testMakePluginLoader_pluginLoader(self): self.assertIs(loader, application.make_plugin_loader(loader)) def testMakePluginLoader_invalidType(self): - with six.assertRaisesRegex(self, TypeError, "FakePlugin"): + with self.assertRaisesRegex(TypeError, "FakePlugin"): application.make_plugin_loader(FakePlugin()) diff --git a/tensorboard/backend/event_processing/data_provider_test.py b/tensorboard/backend/event_processing/data_provider_test.py index c339cbe217..f2c304e9ea 100644 --- a/tensorboard/backend/event_processing/data_provider_test.py +++ b/tensorboard/backend/event_processing/data_provider_test.py @@ -318,8 +318,7 @@ def test_read_scalars_but_not_rank_0(self): provider = self.create_provider() run_tag_filter = base_provider.RunTagFilter(["waves"], ["bad"]) # No explicit checks yet. - with six.assertRaisesRegex( - self, + with self.assertRaisesRegex( ValueError, "can only convert an array of size 1 to a Python scalar", ): diff --git a/tensorboard/backend/event_processing/event_accumulator_test.py b/tensorboard/backend/event_processing/event_accumulator_test.py index 1f5266b5c2..31b922b70a 100644 --- a/tensorboard/backend/event_processing/event_accumulator_test.py +++ b/tensorboard/backend/event_processing/event_accumulator_test.py @@ -1061,7 +1061,7 @@ def testPluginTagToContent_PluginsCannotJumpOnTheBandwagon(self): self.assertEqual( acc.PluginTagToContent("outlet"), {"you_are_it": b"120v"} ) - with six.assertRaisesRegex(self, KeyError, "plug"): + with self.assertRaisesRegex(KeyError, "plug"): acc.PluginTagToContent("plug") diff --git a/tensorboard/backend/event_processing/io_wrapper_test.py b/tensorboard/backend/event_processing/io_wrapper_test.py index bf9ab9d4f5..a14d8fd410 100644 --- a/tensorboard/backend/event_processing/io_wrapper_test.py +++ b/tensorboard/backend/event_processing/io_wrapper_test.py @@ -17,7 +17,6 @@ import os import tempfile -import six import tensorflow as tf from tensorboard.backend.event_processing import io_wrapper @@ -78,8 +77,8 @@ def testIsSummaryEventsFileFalse(self): self.assertFalse(io_wrapper.IsSummaryEventsFile("/logdir/model.ckpt")) def testIsIsTensorFlowEventsFileWithEmptyInput(self): - with six.assertRaisesRegex( - self, ValueError, r"Path must be a nonempty string" + with self.assertRaisesRegex( + ValueError, r"Path must be a nonempty string" ): io_wrapper.IsTensorFlowEventsFile("") diff --git a/tensorboard/backend/event_processing/plugin_event_accumulator_test.py b/tensorboard/backend/event_processing/plugin_event_accumulator_test.py index c022b36c7f..913ef62ed6 100644 --- a/tensorboard/backend/event_processing/plugin_event_accumulator_test.py +++ b/tensorboard/backend/event_processing/plugin_event_accumulator_test.py @@ -884,7 +884,7 @@ def testPluginTagToContent_PluginsCannotJumpOnTheBandwagon(self): self.assertEqual( acc.PluginTagToContent("outlet"), {"you_are_it": b"120v"} ) - with six.assertRaisesRegex(self, KeyError, "plug"): + with self.assertRaisesRegex(KeyError, "plug"): acc.PluginTagToContent("plug") self.assertItemsEqual(acc.ActivePlugins(), ["outlet"]) diff --git a/tensorboard/backend/http_util_test.py b/tensorboard/backend/http_util_test.py index a8977c4557..fcb63ef849 100644 --- a/tensorboard/backend/http_util_test.py +++ b/tensorboard/backend/http_util_test.py @@ -196,7 +196,7 @@ def testPrecompressedResponse_streamingDecompression_catchesBadSize(self): # Streaming gunzip defers actual unzipping until response is used; once # we iterate over the whole file-wrapper application iterator, the # underlying GzipFile should be closed, and throw the size check error. - with six.assertRaisesRegex(self, IOError, "Incorrect length"): + with self.assertRaisesRegex(IOError, "Incorrect length"): _ = list(r.response) def testJson_getsAutoSerialized(self): diff --git a/tensorboard/compat/tensorflow_stub/io/gfile_s3_test.py b/tensorboard/compat/tensorflow_stub/io/gfile_s3_test.py index 40a5c2d306..af45537bff 100644 --- a/tensorboard/compat/tensorflow_stub/io/gfile_s3_test.py +++ b/tensorboard/compat/tensorflow_stub/io/gfile_s3_test.py @@ -16,7 +16,6 @@ import boto3 import os -import six import unittest from moto import mock_s3 @@ -59,8 +58,7 @@ def testGlob(self): ] expected_listing = [self._PathJoin(temp_dir, f) for f in expected] gotten_listing = gfile.glob(self._PathJoin(temp_dir, "*")) - six.assertCountEqual( - self, + self.assertCountEqual( expected_listing, gotten_listing, "Files must match. Expected %r. Got %r." @@ -86,7 +84,7 @@ def testListdir(self): "waldo", ] gotten_files = gfile.listdir(temp_dir) - six.assertCountEqual(self, expected_files, gotten_files) + self.assertCountEqual(expected_files, gotten_files) @mock_s3 def testMakeDirs(self): @@ -403,16 +401,14 @@ def _CompareFilesPerSubdirectory(self, expected, gotten): result[0]: list(result[2]) for result in gotten } - six.assertCountEqual( - self, + self.assertCountEqual( expected_directory_to_files.keys(), gotten_directory_to_files.keys(), ) for subdir, expected_listing in expected_directory_to_files.items(): gotten_listing = gotten_directory_to_files[subdir] - six.assertCountEqual( - self, + self.assertCountEqual( expected_listing, gotten_listing, "Files for subdir %r must match. Expected %r. Got %r." diff --git a/tensorboard/compat/tensorflow_stub/io/gfile_test.py b/tensorboard/compat/tensorflow_stub/io/gfile_test.py index 6170bdbe46..766f1f5d18 100644 --- a/tensorboard/compat/tensorflow_stub/io/gfile_test.py +++ b/tensorboard/compat/tensorflow_stub/io/gfile_test.py @@ -16,7 +16,6 @@ import io import os -import six from tensorboard import test as tb_test from tensorboard.compat.tensorflow_stub import errors @@ -44,8 +43,7 @@ def testGlob(self): ] expected_listing = [os.path.join(temp_dir, f) for f in expected] gotten_listing = gfile.glob(os.path.join(temp_dir, "*")) - six.assertCountEqual( - self, + self.assertCountEqual( expected_listing, gotten_listing, "Files must match. Expected %r. Got %r." @@ -67,7 +65,7 @@ def testListdir(self): "model.ckpt", "waldo", ) - six.assertCountEqual(self, expected_files, gfile.listdir(temp_dir)) + self.assertCountEqual(expected_files, gfile.listdir(temp_dir)) def testMakeDirs(self): temp_dir = self.get_temp_dir() @@ -375,16 +373,14 @@ def _CompareFilesPerSubdirectory(self, expected, gotten): result[0]: list(result[2]) for result in gotten } - six.assertCountEqual( - self, + self.assertCountEqual( expected_directory_to_files.keys(), gotten_directory_to_files.keys(), ) for subdir, expected_listing in expected_directory_to_files.items(): gotten_listing = gotten_directory_to_files[subdir] - six.assertCountEqual( - self, + self.assertCountEqual( expected_listing, gotten_listing, "Files for subdir %r must match. Expected %r. Got %r." diff --git a/tensorboard/data/provider_test.py b/tensorboard/data/provider_test.py index f7e93f2138..a58b35373f 100644 --- a/tensorboard/data/provider_test.py +++ b/tensorboard/data/provider_test.py @@ -16,7 +16,6 @@ import numpy as np -import six from tensorboard import test as tb_test from tensorboard.data import provider @@ -24,7 +23,7 @@ class DataProviderTest(tb_test.TestCase): def test_abstract(self): - with six.assertRaisesRegex(self, TypeError, "abstract class"): + with self.assertRaisesRegex(TypeError, "abstract class"): provider.DataProvider() @@ -217,7 +216,7 @@ def test_hash(self): x = provider.TensorDatum( step=12, wall_time=0.25, numpy=np.array([1.25]) ) - with six.assertRaisesRegex(self, TypeError, "unhashable type"): + with self.assertRaisesRegex(TypeError, "unhashable type"): hash(x) @@ -368,18 +367,18 @@ def test_defensive_copy(self): def test_validates_runs_tags(self): # Accidentally passed scalar strings - with six.assertRaisesRegex(self, TypeError, "runs:.*got.*str.*myrun"): + with self.assertRaisesRegex(TypeError, "runs:.*got.*str.*myrun"): provider.RunTagFilter(runs="myrun") - with six.assertRaisesRegex(self, TypeError, "tags:.*got.*str.*mytag"): + with self.assertRaisesRegex(TypeError, "tags:.*got.*str.*mytag"): provider.RunTagFilter(tags="mytag") # Passed collections with non-string elements - with six.assertRaisesRegex( - self, TypeError, "runs:.*got item of type.*NoneType.*None" + with self.assertRaisesRegex( + TypeError, "runs:.*got item of type.*NoneType.*None" ): provider.RunTagFilter(runs=[None]) - with six.assertRaisesRegex( - self, TypeError, "tags:.*got item of type.*int.*3" + with self.assertRaisesRegex( + TypeError, "tags:.*got item of type.*int.*3" ): provider.RunTagFilter(tags=["one", "two", 3]) diff --git a/tensorboard/lazy_test.py b/tensorboard/lazy_test.py index b965174ee8..9dd57bb06f 100644 --- a/tensorboard/lazy_test.py +++ b/tensorboard/lazy_test.py @@ -15,7 +15,6 @@ """Unit tests for the `tensorboard.lazy` module.""" -import six import unittest from tensorboard import lazy @@ -57,7 +56,7 @@ def outer(): return inner expected_message = "Circular import when resolving LazyModule 'inner'" - with six.assertRaisesRegex(self, ImportError, expected_message): + with self.assertRaisesRegex(ImportError, expected_message): outer.bar def test_repr_before_load(self): @@ -88,9 +87,9 @@ def test_failed_load_idempotent(self): def bad(): raise ValueError(expected_message) - with six.assertRaisesRegex(self, ValueError, expected_message): + with self.assertRaisesRegex(ValueError, expected_message): bad.day - with six.assertRaisesRegex(self, ValueError, expected_message): + with self.assertRaisesRegex(ValueError, expected_message): bad.day def test_loads_only_once_even_when_result_equal_to_everything(self): diff --git a/tensorboard/manager_test.py b/tensorboard/manager_test.py index 4f4406128f..48ba560618 100644 --- a/tensorboard/manager_test.py +++ b/tensorboard/manager_test.py @@ -23,7 +23,6 @@ import tempfile from unittest import mock -import six from tensorboard import manager from tensorboard import test as tb_test @@ -65,8 +64,7 @@ def test_roundtrip_serialization(self): def test_serialization_rejects_bad_types(self): bad_time = datetime.datetime.fromtimestamp(1549061116) # not an int info = _make_info()._replace(start_time=bad_time) - with six.assertRaisesRegex( - self, + with self.assertRaisesRegex( ValueError, r"expected 'start_time' of type.*int.*, but found: datetime\.", ): @@ -74,8 +72,7 @@ def test_serialization_rejects_bad_types(self): def test_serialization_rejects_wrong_version(self): info = _make_info()._replace(version="reversion") - with six.assertRaisesRegex( - self, + with self.assertRaisesRegex( ValueError, "expected 'version' to be '.*', but found: 'reversion'", ): @@ -83,13 +80,13 @@ def test_serialization_rejects_wrong_version(self): def test_deserialization_rejects_bad_json(self): bad_input = "parse me if you dare" - with six.assertRaisesRegex(self, ValueError, "invalid JSON:"): + with self.assertRaisesRegex(ValueError, "invalid JSON:"): manager._info_from_string(bad_input) def test_deserialization_rejects_non_object_json(self): bad_input = "[1, 2]" - with six.assertRaisesRegex( - self, ValueError, re.escape("not a JSON object: [1, 2]") + with self.assertRaisesRegex( + ValueError, re.escape("not a JSON object: [1, 2]") ): manager._info_from_string(bad_input) @@ -98,8 +95,8 @@ def test_deserialization_rejects_missing_version(self): json_value = json.loads(manager._info_to_string(info)) del json_value["version"] bad_input = json.dumps(json_value) - with six.assertRaisesRegex( - self, ValueError, re.escape("missing keys: ['version']") + with self.assertRaisesRegex( + ValueError, re.escape("missing keys: ['version']") ): manager._info_from_string(bad_input) @@ -124,8 +121,8 @@ def test_deserialization_rejects_missing_keys(self): json_value = json.loads(manager._info_to_string(info)) del json_value["start_time"] bad_input = json.dumps(json_value) - with six.assertRaisesRegex( - self, ValueError, re.escape("missing keys: ['start_time']") + with self.assertRaisesRegex( + ValueError, re.escape("missing keys: ['start_time']") ): manager._info_from_string(bad_input) @@ -134,8 +131,7 @@ def test_deserialization_rejects_bad_types(self): json_value = json.loads(manager._info_to_string(info)) json_value["start_time"] = "2001-02-03T04:05:06" bad_input = json.dumps(json_value) - with six.assertRaisesRegex( - self, + with self.assertRaisesRegex( ValueError, "expected 'start_time' of type.*int.*, but found:.*" "'2001-02-03T04:05:06'", @@ -332,8 +328,7 @@ def test_write_info_file_rejects_bad_types(self): # `TensorBoardInfoTest` above. bad_time = datetime.datetime.fromtimestamp(1549061116) info = _make_info()._replace(start_time=bad_time) - with six.assertRaisesRegex( - self, + with self.assertRaisesRegex( ValueError, r"expected 'start_time' of type.*int.*, but found: datetime\.", ): @@ -344,8 +339,7 @@ def test_write_info_file_rejects_wrong_version(self): # The particulars of validation are tested more thoroughly in # `TensorBoardInfoTest` above. info = _make_info()._replace(version="reversion") - with six.assertRaisesRegex( - self, + with self.assertRaisesRegex( ValueError, "expected 'version' to be '.*', but found: 'reversion'", ): diff --git a/tensorboard/plugins/audio/summary_test.py b/tensorboard/plugins/audio/summary_test.py index a4d080cf60..3bfb41846a 100644 --- a/tensorboard/plugins/audio/summary_test.py +++ b/tensorboard/plugins/audio/summary_test.py @@ -20,7 +20,6 @@ import os import numpy as np -import six import tensorflow as tf from tensorboard.compat import tf2 @@ -131,18 +130,18 @@ def test_audio_count_when_more_than_max(self): def test_requires_nonnegative_max_outputs(self): data = np.array(1, np.float32, ndmin=3) - with six.assertRaisesRegex( - self, (ValueError, tf.errors.InvalidArgumentError), ">= 0" + with self.assertRaisesRegex( + (ValueError, tf.errors.InvalidArgumentError), ">= 0" ): self.audio("k488", data, 44100, max_outputs=-1) def test_requires_rank_3(self): - with six.assertRaisesRegex(self, ValueError, "must have rank 3"): + with self.assertRaisesRegex(ValueError, "must have rank 3"): self.audio("k488", np.array([[1]]), 44100) def test_requires_wav(self): data = np.array(1, np.float32, ndmin=3) - with six.assertRaisesRegex(self, ValueError, "Unknown encoding"): + with self.assertRaisesRegex(ValueError, "Unknown encoding"): self.audio("k488", data, 44100, encoding="pptx") diff --git a/tensorboard/plugins/core/core_plugin_test.py b/tensorboard/plugins/core/core_plugin_test.py index f6e2de5bfe..39ea19d1e2 100644 --- a/tensorboard/plugins/core/core_plugin_test.py +++ b/tensorboard/plugins/core/core_plugin_test.py @@ -87,19 +87,17 @@ def testFlag(self): ) event_or_logdir_req = r"Must specify either --logdir or --event_file.$" - with six.assertRaisesRegex(self, ValueError, event_or_logdir_req): + with self.assertRaisesRegex(ValueError, event_or_logdir_req): loader.fix_flags(FakeFlags(inspect=True)) - with six.assertRaisesRegex( - self, ValueError, one_of_event_or_logdir_req - ): + with self.assertRaisesRegex(ValueError, one_of_event_or_logdir_req): loader.fix_flags( FakeFlags( inspect=True, logdir="/tmp", event_file="/tmp/event.out" ) ) - with six.assertRaisesRegex(self, ValueError, logdir_or_db_req): + with self.assertRaisesRegex(ValueError, logdir_or_db_req): loader.fix_flags(FakeFlags(inspect=False)) - with six.assertRaisesRegex(self, ValueError, logdir_or_db_req): + with self.assertRaisesRegex(ValueError, logdir_or_db_req): loader.fix_flags( FakeFlags(inspect=False, event_file="/tmp/event.out") ) diff --git a/tensorboard/plugins/hparams/keras_test.py b/tensorboard/plugins/hparams/keras_test.py index 1c5e736b7d..b06bc40ef9 100644 --- a/tensorboard/plugins/hparams/keras_test.py +++ b/tensorboard/plugins/hparams/keras_test.py @@ -18,7 +18,6 @@ from unittest import mock from google.protobuf import text_format -import six import tensorflow as tf from tensorboard.plugins.hparams import keras @@ -150,22 +149,21 @@ def test_non_eager_failure(self): with tf.compat.v1.Graph().as_default(): assert not tf.executing_eagerly() self._initialize_model(writer=self.logdir) - with six.assertRaisesRegex( - self, RuntimeError, "only supported in TensorFlow eager mode" + with self.assertRaisesRegex( + RuntimeError, "only supported in TensorFlow eager mode" ): self.model.fit(x=[(1,)], y=[(2,)], callbacks=[self.callback]) def test_reuse_failure(self): self._initialize_model(writer=self.logdir) self.model.fit(x=[(1,)], y=[(2,)], callbacks=[self.callback]) - with six.assertRaisesRegex( - self, RuntimeError, "cannot be reused across training sessions" + with self.assertRaisesRegex( + RuntimeError, "cannot be reused across training sessions" ): self.model.fit(x=[(1,)], y=[(2,)], callbacks=[self.callback]) def test_invalid_writer(self): - with six.assertRaisesRegex( - self, + with self.assertRaisesRegex( TypeError, "writer must be a `SummaryWriter` or `str`, not None", ): @@ -176,8 +174,8 @@ def test_duplicate_hparam_names_across_object_and_string(self): "foo": 1, hp.HParam("foo"): 1, } - with six.assertRaisesRegex( - self, ValueError, "multiple values specified for hparam 'foo'" + with self.assertRaisesRegex( + ValueError, "multiple values specified for hparam 'foo'" ): keras.Callback(self.get_temp_dir(), hparams) @@ -186,14 +184,14 @@ def test_duplicate_hparam_names_from_two_objects(self): hp.HParam("foo"): 1, hp.HParam("foo"): 1, } - with six.assertRaisesRegex( - self, ValueError, "multiple values specified for hparam 'foo'" + with self.assertRaisesRegex( + ValueError, "multiple values specified for hparam 'foo'" ): keras.Callback(self.get_temp_dir(), hparams) def test_invalid_trial_id(self): - with six.assertRaisesRegex( - self, TypeError, "`trial_id` should be a `str`, but got: 12" + with self.assertRaisesRegex( + TypeError, "`trial_id` should be a `str`, but got: 12" ): keras.Callback(self.get_temp_dir(), {}, trial_id=12) diff --git a/tensorboard/plugins/hparams/summary_v2_test.py b/tensorboard/plugins/hparams/summary_v2_test.py index b5ff7eb336..72460b1b7d 100644 --- a/tensorboard/plugins/hparams/summary_v2_test.py +++ b/tensorboard/plugins/hparams/summary_v2_test.py @@ -21,7 +21,6 @@ from google.protobuf import text_format import numpy as np -import six from tensorboard import test from tensorboard.compat import tf @@ -170,8 +169,8 @@ def test_pb_explicit_trial_id(self): self._check_summary(result, check_group_name=True) def test_pb_invalid_trial_id(self): - with six.assertRaisesRegex( - self, TypeError, "`trial_id` should be a `str`, but got: 12" + with self.assertRaisesRegex( + TypeError, "`trial_id` should be a `str`, but got: 12" ): hp.hparams_pb(self.hparams, trial_id=12) @@ -216,8 +215,8 @@ def test_duplicate_hparam_names_across_object_and_string(self): "foo": 1, hp.HParam("foo"): 1, } - with six.assertRaisesRegex( - self, ValueError, "multiple values specified for hparam 'foo'" + with self.assertRaisesRegex( + ValueError, "multiple values specified for hparam 'foo'" ): hp.hparams_pb(hparams) @@ -226,8 +225,8 @@ def test_duplicate_hparam_names_from_two_objects(self): hp.HParam("foo"): 1, hp.HParam("foo"): 1, } - with six.assertRaisesRegex( - self, ValueError, "multiple values specified for hparam 'foo'" + with self.assertRaisesRegex( + ValueError, "multiple values specified for hparam 'foo'" ): hp.hparams_pb(hparams) @@ -508,17 +507,17 @@ def test_singleton_domain(self): self.assertEqual(domain.dtype, int) def test_non_ints(self): - with six.assertRaisesRegex( - self, TypeError, "min_value must be an int: -inf" + with self.assertRaisesRegex( + TypeError, "min_value must be an int: -inf" ): hp.IntInterval(float("-inf"), 0) - with six.assertRaisesRegex( - self, TypeError, "max_value must be an int: 'eleven'" + with self.assertRaisesRegex( + TypeError, "max_value must be an int: 'eleven'" ): hp.IntInterval(7, "eleven") def test_backward_endpoints(self): - with six.assertRaisesRegex(self, ValueError, "123 > 45"): + with self.assertRaisesRegex(ValueError, "123 > 45"): hp.IntInterval(123, 45) def test_sample_uniform(self): @@ -565,17 +564,17 @@ def test_infinite_domain(self): self.assertEqual(domain.dtype, float) def test_non_ints(self): - with six.assertRaisesRegex( - self, TypeError, "min_value must be a float: True" + with self.assertRaisesRegex( + TypeError, "min_value must be a float: True" ): hp.RealInterval(True, 2.0) - with six.assertRaisesRegex( - self, TypeError, "max_value must be a float: 'wat'" + with self.assertRaisesRegex( + TypeError, "max_value must be a float: 'wat'" ): hp.RealInterval(1.2, "wat") def test_backward_endpoints(self): - with six.assertRaisesRegex(self, ValueError, "2.1 > 1.2"): + with self.assertRaisesRegex(ValueError, "2.1 > 1.2"): hp.RealInterval(2.1, 1.2) def test_sample_uniform(self): @@ -614,14 +613,14 @@ def test_empty_with_explicit_dtype(self): self.assertEqual(domain.values, []) def test_empty_with_unspecified_dtype(self): - with six.assertRaisesRegex( - self, ValueError, "Empty domain with no dtype specified" + with self.assertRaisesRegex( + ValueError, "Empty domain with no dtype specified" ): hp.Discrete([]) def test_dtype_mismatch(self): - with six.assertRaisesRegex( - self, TypeError, r"dtype mismatch: not isinstance\(2, str\)" + with self.assertRaisesRegex( + TypeError, r"dtype mismatch: not isinstance\(2, str\)" ): hp.Discrete(["one", 2]) diff --git a/tensorboard/plugins/image/summary_test.py b/tensorboard/plugins/image/summary_test.py index ac911b9fc8..5b7dd6e4bd 100644 --- a/tensorboard/plugins/image/summary_test.py +++ b/tensorboard/plugins/image/summary_test.py @@ -20,7 +20,6 @@ import os import numpy as np -import six import tensorflow as tf from tensorboard.compat import tf2 @@ -131,13 +130,13 @@ def test_image_count_more_than_max_outputs(self): def test_requires_nonnegative_max_outputs(self): data = np.array(1, np.uint8, ndmin=4) - with six.assertRaisesRegex( - self, (ValueError, tf.errors.InvalidArgumentError), ">= 0" + with self.assertRaisesRegex( + (ValueError, tf.errors.InvalidArgumentError), ">= 0" ): self.image("mona_lisa", data, max_outputs=-1) def test_requires_rank_4(self): - with six.assertRaisesRegex(self, ValueError, "must have rank 4"): + with self.assertRaisesRegex(ValueError, "must have rank 4"): self.image("mona_lisa", [[[1], [2]], [[3], [4]]]) diff --git a/tensorboard/plugins/mesh/metadata_test.py b/tensorboard/plugins/mesh/metadata_test.py index e850ba2799..eabbc8d9b1 100644 --- a/tensorboard/plugins/mesh/metadata_test.py +++ b/tensorboard/plugins/mesh/metadata_test.py @@ -17,7 +17,6 @@ from unittest import mock -import six import tensorflow as tf from tensorboard.plugins.mesh import metadata from tensorboard.plugins.mesh import plugin_data_pb2 @@ -90,17 +89,15 @@ def test_metadata_version(self): def test_tensor_shape(self): """Tests that target tensor should be of particular shape.""" - with six.assertRaisesRegex( - self, ValueError, r"Tensor shape should be of shape BxNx3.*" + with self.assertRaisesRegex( + ValueError, r"Tensor shape should be of shape BxNx3.*" ): self._create_metadata([1]) def test_metadata_format(self): """Tests that metadata content must be passed as a serialized string.""" - with six.assertRaisesRegex( - self, TypeError, r"Content type must be bytes." - ): + with self.assertRaisesRegex(TypeError, r"Content type must be bytes."): metadata.parse_plugin_metadata(123) def test_default_components(self): diff --git a/tensorboard/plugins/pr_curve/pr_curves_plugin_test.py b/tensorboard/plugins/pr_curve/pr_curves_plugin_test.py index a238c4fe22..9ae6652422 100644 --- a/tensorboard/plugins/pr_curve/pr_curves_plugin_test.py +++ b/tensorboard/plugins/pr_curve/pr_curves_plugin_test.py @@ -20,7 +20,6 @@ import os.path import numpy as np -import six import tensorflow as tf from tensorboard import context @@ -286,16 +285,12 @@ def testPrCurvesRaisesValueErrorWhenNoData(self): The handler should raise a ValueError when no PR curve data can be found for a certain run-tag combination. """ - with six.assertRaisesRegex( - self, ValueError, r"No PR curves could be found" - ): + with self.assertRaisesRegex(ValueError, r"No PR curves could be found"): self.plugin.pr_curves_impl( context.RequestContext(), "123", ["colors"], "non_existent_tag" ) - with six.assertRaisesRegex( - self, ValueError, r"No PR curves could be found" - ): + with self.assertRaisesRegex(ValueError, r"No PR curves could be found"): self.plugin.pr_curves_impl( context.RequestContext(), "123", diff --git a/tensorboard/plugins/projector/projector_api_test.py b/tensorboard/plugins/projector/projector_api_test.py index 94fe1d1f06..7518dbbc34 100644 --- a/tensorboard/plugins/projector/projector_api_test.py +++ b/tensorboard/plugins/projector/projector_api_test.py @@ -17,7 +17,6 @@ import os -import six import tensorflow as tf from google.protobuf import text_format @@ -73,8 +72,8 @@ def test_visualize_embeddings_with_file_writer(self): self.assertEqual(config, config2) def test_visualize_embeddings_no_logdir(self): - with six.assertRaisesRegex( - self, ValueError, "Expected logdir to be a path, but got None" + with self.assertRaisesRegex( + ValueError, "Expected logdir to be a path, but got None" ): projector.visualize_embeddings(None, create_dummy_config()) diff --git a/tensorboard/plugins/scalar/summary_test.py b/tensorboard/plugins/scalar/summary_test.py index 86a45f67c5..b24495e377 100644 --- a/tensorboard/plugins/scalar/summary_test.py +++ b/tensorboard/plugins/scalar/summary_test.py @@ -21,7 +21,6 @@ import numpy as np -import six import tensorflow as tf from tensorboard.compat import tf2 @@ -94,13 +93,13 @@ def test_bool_value(self): def test_string_value(self): # Use str.* in regex because PY3 numpy refers to string arrays using # length-dependent type names in the format "str%d" % (32 * len(str)). - with six.assertRaisesRegex( - self, (ValueError, tf.errors.UnimplementedError), r"Cast str.*float" + with self.assertRaisesRegex( + (ValueError, tf.errors.UnimplementedError), r"Cast str.*float" ): self.scalar("a", np.array("113")) def test_requires_rank_0(self): - with six.assertRaisesRegex(self, ValueError, r"Expected scalar shape"): + with self.assertRaisesRegex(ValueError, r"Expected scalar shape"): self.scalar("a", np.array([1, 1, 3])) diff --git a/tensorboard/plugins/text/summary_test.py b/tensorboard/plugins/text/summary_test.py index f369d6283c..1226132913 100644 --- a/tensorboard/plugins/text/summary_test.py +++ b/tensorboard/plugins/text/summary_test.py @@ -116,7 +116,7 @@ def test_np_array_unicode_value(self): self.assertIsInstance(value, six.binary_type) def test_non_string_value(self): - with six.assertRaisesRegex(self, TypeError, r"must be of type.*string"): + with self.assertRaisesRegex(TypeError, r"must be of type.*string"): self.text("la", np.array(range(42))) @@ -131,8 +131,8 @@ def test_tag(self): ) def test_non_string_value(self): - with six.assertRaisesRegex( - self, ValueError, r"Expected binary or unicode string, got 0" + with self.assertRaisesRegex( + ValueError, r"Expected binary or unicode string, got 0" ): self.text("la", np.array(range(42))) diff --git a/tensorboard/program_test.py b/tensorboard/program_test.py index 73399f058b..003235e559 100644 --- a/tensorboard/program_test.py +++ b/tensorboard/program_test.py @@ -48,7 +48,7 @@ def testPlugins_pluginLoader(self): def testPlugins_invalidType(self): plugin_instance = core_plugin.CorePlugin(base_plugin.TBContext()) - with six.assertRaisesRegex(self, TypeError, "CorePlugin"): + with self.assertRaisesRegex(TypeError, "CorePlugin"): tb = program.TensorBoard(plugins=[plugin_instance]) def testConfigure(self): @@ -58,9 +58,7 @@ def testConfigure(self): def testConfigure_unknownFlag(self): tb = program.TensorBoard(plugins=[core_plugin.CorePlugin]) - with six.assertRaisesRegex( - self, ValueError, "Unknown TensorBoard flag" - ): + with self.assertRaisesRegex(ValueError, "Unknown TensorBoard flag"): tb.configure(foo="bar") diff --git a/tensorboard/util/encoder_test.py b/tensorboard/util/encoder_test.py index a050a2db7d..42ed5848a3 100644 --- a/tensorboard/util/encoder_test.py +++ b/tensorboard/util/encoder_test.py @@ -14,7 +14,6 @@ import numpy as np -import six import tensorflow as tf from tensorboard.util import encoder @@ -37,11 +36,11 @@ def _check_png(self, data): self.assertGreater(len(data), 128) def test_invalid_non_numpy(self): - with six.assertRaisesRegex(self, ValueError, "must be a numpy array"): + with self.assertRaisesRegex(ValueError, "must be a numpy array"): self._encode(self._rgb.tolist()) def test_invalid_non_uint8(self): - with six.assertRaisesRegex(self, ValueError, "dtype must be uint8"): + with self.assertRaisesRegex(ValueError, "dtype must be uint8"): self._encode(self._rgb.astype(np.float32)) def test_encodes_png(self): From f9da27d7831abc25e57a61b63d6834e4cb381b84 Mon Sep 17 00:00:00 2001 From: William Chargin Date: Wed, 23 Dec 2020 15:09:31 -0800 Subject: [PATCH 2/9] py3: replace `six.iteritems`, et al. Summary: `six.iteritems(...) -> (...).items()`, and likewise for `itervalues` and `iterkeys`. Incantation (requires `vim-fugitive` for `Ggrep`): ``` vim +'let @q = "df.4x\"zdw%a.\z()\bbhhdsb" | let @w = "@q:noau w|cn\@w" | Ggrep 'six\.iter' | normal @w' +q && flake8 tensorboard/ --select=F401 | cut -d : -f 1 | xargs git sed '/import six/d' && black tensorboard ``` Test Plan: It suffices that all tests pass. wchargin-branch: py3-six-iter-helpers --- tensorboard/backend/application.py | 3 +-- .../backend/event_processing/data_ingester.py | 3 +-- .../backend/event_processing/data_provider.py | 6 +++--- .../backend/event_processing/data_provider_test.py | 3 +-- .../backend/event_processing/event_multiplexer.py | 7 +++---- tensorboard/backend/event_processing/io_wrapper.py | 3 +-- .../event_processing/plugin_event_accumulator.py | 3 +-- .../event_processing/plugin_event_multiplexer.py | 9 ++++----- tensorboard/compat/tensorflow_stub/flags.py | 3 +-- .../tensorboard_plugin_example/plugin.py | 3 +-- .../plugin.py | 5 +---- tensorboard/plugins/histogram/histograms_plugin.py | 4 ++-- tensorboard/plugins/hparams/backend_context.py | 5 ++--- tensorboard/plugins/hparams/hparams_util.py | 4 +--- tensorboard/plugins/hparams/list_session_groups.py | 4 ++-- tensorboard/plugins/hparams/summary.py | 2 +- tensorboard/plugins/hparams/summary_v2.py | 2 +- tensorboard/plugins/image/images_plugin.py | 5 ++--- tensorboard/plugins/mesh/mesh_plugin.py | 7 +++---- tensorboard/plugins/npmi/npmi_plugin.py | 13 ++++++------- tensorboard/plugins/pr_curve/pr_curves_plugin.py | 3 +-- tensorboard/plugins/scalar/scalars_plugin.py | 5 ++--- tensorboard/plugins/text/text_plugin.py | 3 +-- tensorboard/plugins/text_v2/text_v2_plugin.py | 3 +-- tensorboard/program.py | 2 +- tensorboard/program_test.py | 2 +- tensorboard/uploader/logdir_loader_test.py | 3 +-- tensorboard/uploader/uploader.py | 3 +-- tensorboard/util/tensor_util.py | 5 ++--- 29 files changed, 49 insertions(+), 74 deletions(-) diff --git a/tensorboard/backend/application.py b/tensorboard/backend/application.py index da4f9fc230..4c2e5e0b1f 100644 --- a/tensorboard/backend/application.py +++ b/tensorboard/backend/application.py @@ -27,7 +27,6 @@ import time import zipfile -import six from six.moves.urllib import ( parse as urlparse, ) # pylint: disable=wrong-import-order @@ -316,7 +315,7 @@ def __init__( # over a more general one (e.g., a catchall route `/*` should come last). self.prefix_routes = collections.OrderedDict( sorted( - six.iteritems(unordered_prefix_routes), + unordered_prefix_routes.items(), key=lambda x: len(x[0]), reverse=True, ) diff --git a/tensorboard/backend/event_processing/data_ingester.py b/tensorboard/backend/event_processing/data_ingester.py index d091448b2e..d3d201ffc3 100644 --- a/tensorboard/backend/event_processing/data_ingester.py +++ b/tensorboard/backend/event_processing/data_ingester.py @@ -19,7 +19,6 @@ import threading import time -import six from tensorboard.backend.event_processing import data_provider from tensorboard.backend.event_processing import plugin_event_multiplexer @@ -97,7 +96,7 @@ def _reload(): while True: start = time.time() logger.info("TensorBoard reload process beginning") - for path, name in six.iteritems(self._path_to_run): + for path, name in self._path_to_run.items(): self._multiplexer.AddRunsFromDirectory(path, name) logger.info( "TensorBoard reload process: Reload the whole Multiplexer" diff --git a/tensorboard/backend/event_processing/data_provider.py b/tensorboard/backend/event_processing/data_provider.py index 733027f170..754f897450 100644 --- a/tensorboard/backend/event_processing/data_provider.py +++ b/tensorboard/backend/event_processing/data_provider.py @@ -260,10 +260,10 @@ def _read(self, convert_event, index, downsample): suitable to be returned from `read_scalars` or `read_tensors`. """ result = {} - for (run, tags_for_run) in six.iteritems(index): + for (run, tags_for_run) in index.items(): result_for_run = {} result[run] = result_for_run - for (tag, metadata) in six.iteritems(tags_for_run): + for (tag, metadata) in tags_for_run.items(): events = self._multiplexer.Tensors(run, tag) data = [convert_event(e) for e in events] result_for_run[tag] = _downsample(data, downsample) @@ -319,7 +319,7 @@ def read_blob_sequences( plugin_name, run_tag_filter, summary_pb2.DATA_CLASS_BLOB_SEQUENCE ) result = {} - for (run, tags) in six.iteritems(index): + for (run, tags) in index.items(): result_for_run = {} result[run] = result_for_run for tag in tags: diff --git a/tensorboard/backend/event_processing/data_provider_test.py b/tensorboard/backend/event_processing/data_provider_test.py index f2c304e9ea..256227721a 100644 --- a/tensorboard/backend/event_processing/data_provider_test.py +++ b/tensorboard/backend/event_processing/data_provider_test.py @@ -17,7 +17,6 @@ import os -import six from six.moves import xrange # pylint: disable=redefined-builtin import numpy as np @@ -200,7 +199,7 @@ def FirstEventTimestamp(multiplexer, run): base_provider.Run( run_id=run, run_name=run, start_time=start_time ) - for (run, start_time) in six.iteritems(start_times) + for (run, start_time) in start_times.items() ], ) diff --git a/tensorboard/backend/event_processing/event_multiplexer.py b/tensorboard/backend/event_processing/event_multiplexer.py index 911bf362b3..c049602eab 100644 --- a/tensorboard/backend/event_processing/event_multiplexer.py +++ b/tensorboard/backend/event_processing/event_multiplexer.py @@ -18,7 +18,6 @@ import os import threading -import six from tensorboard.backend.event_processing import directory_watcher from tensorboard.backend.event_processing import event_accumulator @@ -96,7 +95,7 @@ def __init__( "Event Multplexer doing initialization load for %s", run_path_map, ) - for (run, path) in six.iteritems(run_path_map): + for (run, path) in run_path_map.items(): self.AddRun(path, run) logger.info("Event Multiplexer done initializing") @@ -218,7 +217,7 @@ def PluginAssets(self, plugin_name): """ with self._accumulators_mutex: # To avoid nested locks, we construct a copy of the run-accumulator map - items = list(six.iteritems(self._accumulators)) + items = list(self._accumulators.items()) return {run: accum.PluginAssets(plugin_name) for run, accum in items} @@ -485,7 +484,7 @@ def Runs(self): """ with self._accumulators_mutex: # To avoid nested locks, we construct a copy of the run-accumulator map - items = list(six.iteritems(self._accumulators)) + items = list(self._accumulators.items()) return {run_name: accumulator.Tags() for run_name, accumulator in items} def RunPaths(self): diff --git a/tensorboard/backend/event_processing/io_wrapper.py b/tensorboard/backend/event_processing/io_wrapper.py index 4e0d7661d9..6707ee57e4 100644 --- a/tensorboard/backend/event_processing/io_wrapper.py +++ b/tensorboard/backend/event_processing/io_wrapper.py @@ -18,7 +18,6 @@ import os import re -import six from tensorboard.compat import tf from tensorboard.util import tb_logging @@ -145,7 +144,7 @@ def ListRecursivelyViaGlobbing(top): pairs = collections.defaultdict(list) for file_path in glob: pairs[os.path.dirname(file_path)].append(file_path) - for dir_name, file_paths in six.iteritems(pairs): + for dir_name, file_paths in pairs.items(): yield (dir_name, tuple(file_paths)) if len(pairs) == 1: diff --git a/tensorboard/backend/event_processing/plugin_event_accumulator.py b/tensorboard/backend/event_processing/plugin_event_accumulator.py index 7dc096ccc0..afa2f4d420 100644 --- a/tensorboard/backend/event_processing/plugin_event_accumulator.py +++ b/tensorboard/backend/event_processing/plugin_event_accumulator.py @@ -17,7 +17,6 @@ import collections import threading -import six from tensorboard.backend.event_processing import directory_loader from tensorboard.backend.event_processing import directory_watcher @@ -595,7 +594,7 @@ def _Purge(self, event, by_tags): _NotExpired, _TENSOR_RESERVOIR_KEY ) else: - for tag_reservoir in six.itervalues(self.tensors_by_tag): + for tag_reservoir in self.tensors_by_tag.values(): num_expired += tag_reservoir.FilterItems( _NotExpired, _TENSOR_RESERVOIR_KEY ) diff --git a/tensorboard/backend/event_processing/plugin_event_multiplexer.py b/tensorboard/backend/event_processing/plugin_event_multiplexer.py index e9ec0fc464..9d12f6f56c 100644 --- a/tensorboard/backend/event_processing/plugin_event_multiplexer.py +++ b/tensorboard/backend/event_processing/plugin_event_multiplexer.py @@ -18,7 +18,6 @@ import os import threading -import six from six.moves import queue, xrange # pylint: disable=redefined-builtin from tensorboard.backend.event_processing import directory_watcher @@ -117,7 +116,7 @@ def __init__( "Event Multplexer doing initialization load for %s", run_path_map, ) - for (run, path) in six.iteritems(run_path_map): + for (run, path) in run_path_map.items(): self.AddRun(path, run) logger.info("Event Multiplexer done initializing") @@ -274,7 +273,7 @@ def PluginAssets(self, plugin_name): """ with self._accumulators_mutex: # To avoid nested locks, we construct a copy of the run-accumulator map - items = list(six.iteritems(self._accumulators)) + items = list(self._accumulators.items()) return {run: accum.PluginAssets(plugin_name) for run, accum in items} @@ -462,7 +461,7 @@ def AllSummaryMetadata(self): """ with self._accumulators_mutex: # To avoid nested locks, we construct a copy of the run-accumulator map - items = list(six.iteritems(self._accumulators)) + items = list(self._accumulators.items()) return { run_name: accumulator.AllSummaryMetadata() for run_name, accumulator in items @@ -479,7 +478,7 @@ def Runs(self): """ with self._accumulators_mutex: # To avoid nested locks, we construct a copy of the run-accumulator map - items = list(six.iteritems(self._accumulators)) + items = list(self._accumulators.items()) return {run_name: accumulator.Tags() for run_name, accumulator in items} def RunPaths(self): diff --git a/tensorboard/compat/tensorflow_stub/flags.py b/tensorboard/compat/tensorflow_stub/flags.py index 7f7b1cffc0..a07ed8be90 100644 --- a/tensorboard/compat/tensorflow_stub/flags.py +++ b/tensorboard/compat/tensorflow_stub/flags.py @@ -23,7 +23,6 @@ # go/tf-wildcard-import from absl.flags import * # pylint: disable=wildcard-import -import six as _six # Since we wrap absl.flags DEFINE functions, we need to declare this module @@ -44,7 +43,7 @@ def _wrap_define_function(original_function): def wrapper(*args, **kwargs): """Wrapper function that turns old keyword names to new ones.""" has_old_names = False - for old_name, new_name in _six.iteritems(_RENAMED_ARGUMENTS): + for old_name, new_name in __RENAMED_ARGUMENTS.items(): if old_name in kwargs: has_old_names = True value = kwargs.pop(old_name) diff --git a/tensorboard/examples/plugins/example_basic/tensorboard_plugin_example/plugin.py b/tensorboard/examples/plugins/example_basic/tensorboard_plugin_example/plugin.py index ee9145f50c..1f2dfcfeab 100644 --- a/tensorboard/examples/plugins/example_basic/tensorboard_plugin_example/plugin.py +++ b/tensorboard/examples/plugins/example_basic/tensorboard_plugin_example/plugin.py @@ -18,7 +18,6 @@ import json import os -import six from tensorboard.plugins import base_plugin from tensorboard.util import tensor_util import werkzeug @@ -75,7 +74,7 @@ def _serve_tags(self, request): metadata.PLUGIN_NAME ) result = {run: {} for run in self._multiplexer.Runs()} - for (run, tag_to_content) in six.iteritems(mapping): + for (run, tag_to_content) in mapping.items(): for tag in tag_to_content: summary_metadata = self._multiplexer.SummaryMetadata(run, tag) result[run][tag] = { diff --git a/tensorboard/examples/plugins/example_raw_scalars/tensorboard_plugin_example_raw_scalars/plugin.py b/tensorboard/examples/plugins/example_raw_scalars/tensorboard_plugin_example_raw_scalars/plugin.py index f1028b1fa8..2ba9ebcfde 100644 --- a/tensorboard/examples/plugins/example_raw_scalars/tensorboard_plugin_example_raw_scalars/plugin.py +++ b/tensorboard/examples/plugins/example_raw_scalars/tensorboard_plugin_example_raw_scalars/plugin.py @@ -18,7 +18,6 @@ import mimetypes import os -import six from werkzeug import wrappers from tensorboard import errors @@ -62,9 +61,7 @@ def _serve_tags(self, request): run_tag_mapping = self._multiplexer.PluginRunToTagToContent( _SCALAR_PLUGIN_NAME ) - run_info = { - run: list(tags) for (run, tags) in six.iteritems(run_tag_mapping) - } + run_info = {run: list(tags) for (run, tags) in run_tag_mapping.items()} return http_util.Respond(request, run_info, "application/json") diff --git a/tensorboard/plugins/histogram/histograms_plugin.py b/tensorboard/plugins/histogram/histograms_plugin.py index 495dcdc78f..838af875e5 100644 --- a/tensorboard/plugins/histogram/histograms_plugin.py +++ b/tensorboard/plugins/histogram/histograms_plugin.py @@ -79,8 +79,8 @@ def index_impl(self, ctx, experiment): plugin_name=metadata.PLUGIN_NAME, ) result = {run: {} for run in mapping} - for (run, tag_to_content) in six.iteritems(mapping): - for (tag, metadatum) in six.iteritems(tag_to_content): + for (run, tag_to_content) in mapping.items(): + for (tag, metadatum) in tag_to_content.items(): description = plugin_util.markdown_to_safe_html( metadatum.description ) diff --git a/tensorboard/plugins/hparams/backend_context.py b/tensorboard/plugins/hparams/backend_context.py index 225c50f959..3a35e540c7 100644 --- a/tensorboard/plugins/hparams/backend_context.py +++ b/tensorboard/plugins/hparams/backend_context.py @@ -19,7 +19,6 @@ import collections import os -import six from tensorboard.plugins.hparams import api_pb2 from tensorboard.plugins.hparams import metadata @@ -211,13 +210,13 @@ def _compute_hparam_infos(self, hparams_run_to_tag_to_content): start_info = metadata.parse_session_start_info_plugin_data( tag_to_content[metadata.SESSION_START_INFO_TAG] ) - for (name, value) in six.iteritems(start_info.hparams): + for (name, value) in start_info.hparams.items(): hparams[name].append(value) # Try to construct an HParamInfo for each hparam from its name and list # of values. result = [] - for (name, values) in six.iteritems(hparams): + for (name, values) in hparams.items(): hparam_info = self._compute_hparam_info_from_values(name, values) if hparam_info is not None: result.append(hparam_info) diff --git a/tensorboard/plugins/hparams/hparams_util.py b/tensorboard/plugins/hparams/hparams_util.py index c22528b3da..7dd3a084c7 100644 --- a/tensorboard/plugins/hparams/hparams_util.py +++ b/tensorboard/plugins/hparams/hparams_util.py @@ -47,7 +47,6 @@ from absl import flags from google.protobuf import struct_pb2 from google.protobuf import text_format -import six import tensorflow as tf from tensorboard.plugins.hparams import api_pb2 @@ -186,8 +185,7 @@ def start_session(): # Convert hparams.hparams values from google.protobuf.Value to Python native # objects. hparams = { - key: value_to_python(value) - for (key, value) in six.iteritems(hparams.hparams) + key: value_to_python(value) for (key, value) in hparams.hparams.items() } write_summary( summary.session_start_pb( diff --git a/tensorboard/plugins/hparams/list_session_groups.py b/tensorboard/plugins/hparams/list_session_groups.py index 68fa950258..cb3a837d79 100644 --- a/tensorboard/plugins/hparams/list_session_groups.py +++ b/tensorboard/plugins/hparams/list_session_groups.py @@ -167,7 +167,7 @@ def _add_session(self, session, start_info, groups_by_name): # hyperparameter values) into result. # There doesn't seem to be a way to initialize a protobuffer map in the # constructor. - for (key, value) in six.iteritems(start_info.hparams): + for (key, value) in start_info.hparams.items(): group.hparams[key].CopyFrom(value) groups_by_name[group_name] = group @@ -627,7 +627,7 @@ def _set_avg_session_metrics(session_group): stats.total_wall_time_secs += metric_value.wall_time_secs del session_group.metric_values[:] - for (metric_name, stats) in six.iteritems(metric_stats): + for (metric_name, stats) in metric_stats.items(): session_group.metric_values.add( name=api_pb2.MetricName( group=metric_name.group, tag=metric_name.tag diff --git a/tensorboard/plugins/hparams/summary.py b/tensorboard/plugins/hparams/summary.py index a4398b7bb6..30329bda36 100644 --- a/tensorboard/plugins/hparams/summary.py +++ b/tensorboard/plugins/hparams/summary.py @@ -126,7 +126,7 @@ def session_start_pb( group_name=group_name, start_time_secs=start_time_secs, ) - for (hp_name, hp_val) in six.iteritems(hparams): + for (hp_name, hp_val) in hparams.items(): if isinstance(hp_val, (float, int)): session_start_info.hparams[hp_name].number_value = hp_val elif isinstance(hp_val, six.string_types): diff --git a/tensorboard/plugins/hparams/summary_v2.py b/tensorboard/plugins/hparams/summary_v2.py index 54734ce8f4..ed08c5ac46 100644 --- a/tensorboard/plugins/hparams/summary_v2.py +++ b/tensorboard/plugins/hparams/summary_v2.py @@ -197,7 +197,7 @@ def _normalize_hparams(hparams): hyperparameter name. """ result = {} - for (k, v) in six.iteritems(hparams): + for (k, v) in hparams.items(): if isinstance(k, HParam): k = k.name if k in result: diff --git a/tensorboard/plugins/image/images_plugin.py b/tensorboard/plugins/image/images_plugin.py index 1114bac621..b18726ed07 100644 --- a/tensorboard/plugins/image/images_plugin.py +++ b/tensorboard/plugins/image/images_plugin.py @@ -17,7 +17,6 @@ import imghdr -import six from six.moves import urllib from werkzeug import wrappers @@ -88,8 +87,8 @@ def _index_impl(self, ctx, experiment): plugin_name=metadata.PLUGIN_NAME, ) result = {run: {} for run in mapping} - for (run, tag_to_content) in six.iteritems(mapping): - for (tag, metadatum) in six.iteritems(tag_to_content): + for (run, tag_to_content) in mapping.items(): + for (tag, metadatum) in tag_to_content.items(): description = plugin_util.markdown_to_safe_html( metadatum.description ) diff --git a/tensorboard/plugins/mesh/mesh_plugin.py b/tensorboard/plugins/mesh/mesh_plugin.py index eb6309ef1c..34b4d0e609 100644 --- a/tensorboard/plugins/mesh/mesh_plugin.py +++ b/tensorboard/plugins/mesh/mesh_plugin.py @@ -15,7 +15,6 @@ """TensorBoard 3D mesh visualizer plugin.""" import numpy as np -import six from werkzeug import wrappers from tensorboard.backend import http_util @@ -57,7 +56,7 @@ def _instance_tags(self, run, tag): ) return [ instance_tag - for (instance_tag, content) in six.iteritems(index) + for (instance_tag, content) in index.items() if tag == metadata.parse_plugin_metadata(content).name ] @@ -84,9 +83,9 @@ def _serve_tags(self, request): # to obtain a list of tags associated with each run. For each tag estimate # number of samples. response = dict() - for run, tag_to_content in six.iteritems(all_runs): + for run, tag_to_content in all_runs.items(): response[run] = dict() - for instance_tag, _ in six.iteritems(tag_to_content): + for instance_tag, _ in tag_to_content.items(): # Make sure we only operate on user-defined tags here. tag = self._tag(run, instance_tag) meta = self._instance_tag_metadata(run, instance_tag) diff --git a/tensorboard/plugins/npmi/npmi_plugin.py b/tensorboard/plugins/npmi/npmi_plugin.py index 2d31399fd7..82b45ff1d2 100644 --- a/tensorboard/plugins/npmi/npmi_plugin.py +++ b/tensorboard/plugins/npmi/npmi_plugin.py @@ -15,7 +15,6 @@ """The nPMI visualization plugin.""" -import six import math from werkzeug import wrappers @@ -101,9 +100,9 @@ def tags_impl(self, ctx, experiment): ctx, experiment_id=experiment, plugin_name=self.plugin_name ) result = {run: {} for run in mapping} - for (run, tag_to_content) in six.iteritems(mapping): + for (run, tag_to_content) in mapping.items(): result[run] = [] - for (tag, metadatum) in six.iteritems(tag_to_content): + for (tag, metadatum) in tag_to_content.items(): content = metadata.parse_plugin_metadata( metadatum.plugin_content ) @@ -120,7 +119,7 @@ def annotations_impl(self, ctx, experiment): ), ) result = {run: {} for run in mapping} - for (run, _) in six.iteritems(mapping): + for (run, _) in mapping.items(): all_annotations = self._data_provider.read_tensors( ctx, experiment_id=experiment, @@ -148,7 +147,7 @@ def metrics_impl(self, ctx, experiment): run_tag_filter=provider.RunTagFilter(tags=[metadata.METRICS_TAG]), ) result = {run: {} for run in mapping} - for (run, _) in six.iteritems(mapping): + for (run, _) in mapping.items(): all_metrics = self._data_provider.read_tensors( ctx, experiment_id=experiment, @@ -171,7 +170,7 @@ def values_impl(self, ctx, experiment): run_tag_filter=provider.RunTagFilter(tags=[metadata.VALUES_TAG]), ) result = {run: {} for run in mapping} - for (run, _) in six.iteritems(mapping): + for (run, _) in mapping.items(): all_values = self._data_provider.read_tensors( ctx, experiment_id=experiment, @@ -197,7 +196,7 @@ def embeddings_impl(self, ctx, experiment): ), ) result = {run: {} for run in mapping} - for (run, _) in six.iteritems(mapping): + for (run, _) in mapping.items(): all_embeddings = self._data_provider.read_tensors( ctx, experiment_id=experiment, diff --git a/tensorboard/plugins/pr_curve/pr_curves_plugin.py b/tensorboard/plugins/pr_curve/pr_curves_plugin.py index 7d1e260b66..fe19da4792 100644 --- a/tensorboard/plugins/pr_curve/pr_curves_plugin.py +++ b/tensorboard/plugins/pr_curve/pr_curves_plugin.py @@ -14,7 +14,6 @@ import numpy as np -import six from werkzeug import wrappers from tensorboard import plugin_util @@ -144,7 +143,7 @@ def tags_impl(self, ctx, experiment): ctx, experiment_id=experiment, plugin_name=metadata.PLUGIN_NAME ) result = {run: {} for run in mapping} - for (run, tag_to_time_series) in six.iteritems(mapping): + for (run, tag_to_time_series) in mapping.items(): for (tag, time_series) in tag_to_time_series.items(): result[run][tag] = { "displayName": time_series.display_name, diff --git a/tensorboard/plugins/scalar/scalars_plugin.py b/tensorboard/plugins/scalar/scalars_plugin.py index 740d2c609d..969a3f28d9 100644 --- a/tensorboard/plugins/scalar/scalars_plugin.py +++ b/tensorboard/plugins/scalar/scalars_plugin.py @@ -21,7 +21,6 @@ import csv -import six from six import StringIO import werkzeug.exceptions from werkzeug import wrappers @@ -81,8 +80,8 @@ def index_impl(self, ctx, experiment=None): plugin_name=metadata.PLUGIN_NAME, ) result = {run: {} for run in mapping} - for (run, tag_to_content) in six.iteritems(mapping): - for (tag, metadatum) in six.iteritems(tag_to_content): + for (run, tag_to_content) in mapping.items(): + for (tag, metadatum) in tag_to_content.items(): description = plugin_util.markdown_to_safe_html( metadatum.description ) diff --git a/tensorboard/plugins/text/text_plugin.py b/tensorboard/plugins/text/text_plugin.py index 40dc42aecd..79818bffbc 100644 --- a/tensorboard/plugins/text/text_plugin.py +++ b/tensorboard/plugins/text/text_plugin.py @@ -23,7 +23,6 @@ # pylint: enable=g-bad-import-order -import six from werkzeug import wrappers from tensorboard import plugin_util @@ -225,7 +224,7 @@ def index_impl(self, ctx, experiment): ) return { run: list(tag_to_content) - for (run, tag_to_content) in six.iteritems(mapping) + for (run, tag_to_content) in mapping.items() } @wrappers.Request.application diff --git a/tensorboard/plugins/text_v2/text_v2_plugin.py b/tensorboard/plugins/text_v2/text_v2_plugin.py index 01113e58d1..c5cd68ff90 100644 --- a/tensorboard/plugins/text_v2/text_v2_plugin.py +++ b/tensorboard/plugins/text_v2/text_v2_plugin.py @@ -18,7 +18,6 @@ # Necessary for an internal test with special behavior for numpy. import numpy as np -import six from werkzeug import wrappers from tensorboard.plugins import base_plugin @@ -145,7 +144,7 @@ def index_impl(self, ctx, experiment): ) return { run: list(tag_to_content) - for (run, tag_to_content) in six.iteritems(mapping) + for (run, tag_to_content) in mapping.items() } def text_impl(self, ctx, run, tag, experiment): diff --git a/tensorboard/program.py b/tensorboard/program.py index 68b9b86969..af7193b86e 100644 --- a/tensorboard/program.py +++ b/tensorboard/program.py @@ -212,7 +212,7 @@ def configure(self, argv=("",), **kwargs): # any positional arguments to `serve`. serve_parser = serve_subparser - for (name, subcommand) in six.iteritems(self.subcommands): + for (name, subcommand) in self.subcommands.items(): subparser = subparsers.add_parser( name, help=subcommand.help(), diff --git a/tensorboard/program_test.py b/tensorboard/program_test.py index 003235e559..230f500d1b 100644 --- a/tensorboard/program_test.py +++ b/tensorboard/program_test.py @@ -76,7 +76,7 @@ def make_flags(self, **kwargs): flags = argparse.Namespace() kwargs.setdefault("host", None) kwargs.setdefault("bind_all", kwargs["host"] is None) - for k, v in six.iteritems(kwargs): + for k, v in kwargs.items(): setattr(flags, k, v) return flags diff --git a/tensorboard/uploader/logdir_loader_test.py b/tensorboard/uploader/logdir_loader_test.py index 27ef91d1a6..9e0f7498d6 100644 --- a/tensorboard/uploader/logdir_loader_test.py +++ b/tensorboard/uploader/logdir_loader_test.py @@ -17,7 +17,6 @@ import os.path import shutil -import six from tensorboard.uploader import logdir_loader from tensorboard import test as tb_test @@ -49,7 +48,7 @@ def _extract_tags(self, event_generator): def _extract_run_to_tags(self, run_to_events): """Returns run-to-tags dict from run-to-event-generator dict.""" run_to_tags = {} - for run_name, event_generator in six.iteritems(run_to_events): + for run_name, event_generator in run_to_events.items(): # There should be no duplicate runs. self.assertNotIn(run_name, run_to_tags) run_to_tags[run_name] = self._extract_tags(event_generator) diff --git a/tensorboard/uploader/uploader.py b/tensorboard/uploader/uploader.py index eaddd443ff..0c46d4d0ed 100644 --- a/tensorboard/uploader/uploader.py +++ b/tensorboard/uploader/uploader.py @@ -20,7 +20,6 @@ import time import grpc -import six from google.protobuf import message from tensorboard.compat.proto import graph_pb2 @@ -462,7 +461,7 @@ def _run_values(self, run_to_events): # stream contains runs with no events, or events with no values, we'll # lose that information. This is not a problem: we would need to prune # such data from the request anyway. - for (run_name, events) in six.iteritems(run_to_events): + for (run_name, events) in run_to_events.items(): for event in events: _filter_graph_defs(event) for value in event.summary.value: diff --git a/tensorboard/util/tensor_util.py b/tensorboard/util/tensor_util.py index 4fe220c366..9d5134b2d2 100644 --- a/tensorboard/util/tensor_util.py +++ b/tensorboard/util/tensor_util.py @@ -15,7 +15,6 @@ """Utilities to manipulate TensorProtos.""" import numpy as np -import six from tensorboard.compat.proto import tensor_pb2 from tensorboard.compat.tensorflow_stub import dtypes, compat, tensor_shape @@ -124,10 +123,10 @@ def SlowAppendBoolArrayToTensorProto(tensor_proto, proto_values): def GetFromNumpyDTypeDict(dtype_dict, dtype): # NOTE: dtype_dict.get(dtype) always returns None. - for key, val in six.iteritems(dtype_dict): + for key, val in dtype_dict.items(): if key == dtype: return val - for key, val in six.iteritems(BACKUP_DICT): + for key, val in BACKUP_DICT.items(): if key == dtype: return val return None From a95439a8bd01f40d4ef25c58eb5ec62dc38b9795 Mon Sep 17 00:00:00 2001 From: William Chargin Date: Wed, 23 Dec 2020 15:33:06 -0800 Subject: [PATCH 3/9] py3: replace `six.add_metaclass` Summary: `@six.add_metaclass(M) class C(object) -> class C(metaclass=M)`. Incantation (requires `vim-fugitive` for `Ggrep`): ``` vim +'let @q = "f(\"zdibdd0f(cibmetaclass=\z\" | let @w = "@q:noau w|cn\@w" | Ggrep '@six.add_metaclass' | normal @w' +q && flake8 tensorboard/ --select=F401 | cut -d : -f 1 | xargs git sed '/import six/d' && black tensorboard ``` Test Plan: It suffices that all tests pass. wchargin-branch: py3-six-add-metaclass --- tensorboard/auth.py | 5 +---- .../backend/event_processing/event_file_loader_test.py | 4 +--- tensorboard/data/provider.py | 3 +-- tensorboard/plugins/base_plugin.py | 5 +---- tensorboard/plugins/hparams/summary_v2.py | 3 +-- tensorboard/program.py | 7 ++----- tensorboard/uploader/uploader_subcommand.py | 3 +-- 7 files changed, 8 insertions(+), 22 deletions(-) diff --git a/tensorboard/auth.py b/tensorboard/auth.py index e01610b087..475c850906 100644 --- a/tensorboard/auth.py +++ b/tensorboard/auth.py @@ -16,11 +16,8 @@ import abc -import six - -@six.add_metaclass(abc.ABCMeta) -class AuthProvider(object): +class AuthProvider(metaclass=abc.ABCMeta): """Authentication provider for a specific kind of credential.""" def authenticate(self, environ): diff --git a/tensorboard/backend/event_processing/event_file_loader_test.py b/tensorboard/backend/event_processing/event_file_loader_test.py index 1e61d8eac5..7c1cbf686b 100644 --- a/tensorboard/backend/event_processing/event_file_loader_test.py +++ b/tensorboard/backend/event_processing/event_file_loader_test.py @@ -20,7 +20,6 @@ import io import os -import six import tensorflow as tf @@ -32,8 +31,7 @@ FILENAME = "test.events" -@six.add_metaclass(abc.ABCMeta) -class EventFileLoaderTestBase(object): +class EventFileLoaderTestBase(metaclass=abc.ABCMeta): def _append_record(self, data): with open(os.path.join(self.get_temp_dir(), FILENAME), "ab") as f: record_writer.RecordWriter(f).write(data) diff --git a/tensorboard/data/provider.py b/tensorboard/data/provider.py index 1f7eb5fb60..5e6dc3ca98 100644 --- a/tensorboard/data/provider.py +++ b/tensorboard/data/provider.py @@ -21,8 +21,7 @@ import numpy as np -@six.add_metaclass(abc.ABCMeta) -class DataProvider(object): +class DataProvider(metaclass=abc.ABCMeta): """Interface for reading TensorBoard scalar, tensor, and blob data. These APIs are under development and subject to change. For instance, diff --git a/tensorboard/plugins/base_plugin.py b/tensorboard/plugins/base_plugin.py index bcc88ff3e7..7e2dd6a69d 100644 --- a/tensorboard/plugins/base_plugin.py +++ b/tensorboard/plugins/base_plugin.py @@ -19,14 +19,11 @@ """ -import six - from abc import ABCMeta from abc import abstractmethod -@six.add_metaclass(ABCMeta) -class TBPlugin(object): +class TBPlugin(metaclass=ABCMeta): """TensorBoard plugin interface. Every plugin must extend from this class. diff --git a/tensorboard/plugins/hparams/summary_v2.py b/tensorboard/plugins/hparams/summary_v2.py index ed08c5ac46..64308e6072 100644 --- a/tensorboard/plugins/hparams/summary_v2.py +++ b/tensorboard/plugins/hparams/summary_v2.py @@ -335,8 +335,7 @@ def description(self): return self._description -@six.add_metaclass(abc.ABCMeta) -class Domain(object): +class Domain(metaclass=abc.ABCMeta): """The domain of a hyperparameter. Domains are restricted to values of the simple types `float`, `int`, diff --git a/tensorboard/program.py b/tensorboard/program.py index af7193b86e..73a369bbba 100644 --- a/tensorboard/program.py +++ b/tensorboard/program.py @@ -45,7 +45,6 @@ from absl import flags as absl_flags from absl.flags import argparse_flags import absl.logging -import six from six.moves import urllib from six.moves import xrange # pylint: disable=redefined-builtin from werkzeug import serving @@ -438,8 +437,7 @@ def _make_server(self): return self.server_class(app, self.flags) -@six.add_metaclass(ABCMeta) -class TensorBoardSubcommand(object): +class TensorBoardSubcommand(metaclass=ABCMeta): """Experimental private API for defining subcommands to tensorboard(1).""" @abstractmethod @@ -488,8 +486,7 @@ def description(self): return None -@six.add_metaclass(ABCMeta) -class TensorBoardServer(object): +class TensorBoardServer(metaclass=ABCMeta): """Class for customizing TensorBoard WSGI app serving.""" @abstractmethod diff --git a/tensorboard/uploader/uploader_subcommand.py b/tensorboard/uploader/uploader_subcommand.py index 0923ecd802..5040116da5 100644 --- a/tensorboard/uploader/uploader_subcommand.py +++ b/tensorboard/uploader/uploader_subcommand.py @@ -137,8 +137,7 @@ def _run(flags, experiment_url_callback=None): intent.execute(server_info, channel) -@six.add_metaclass(abc.ABCMeta) -class _Intent(object): +class _Intent(metaclass=abc.ABCMeta): """A description of the user's intent in invoking this program. Each valid set of CLI flags corresponds to one intent: e.g., "upload From 4f1ce745b17cd56fa135acc50691d92a318e81b2 Mon Sep 17 00:00:00 2001 From: William Chargin Date: Wed, 23 Dec 2020 15:46:55 -0800 Subject: [PATCH 4/9] py3: replace non-`xrange` `six.moves` Summary: We use `six.moves` mostly for `xrange`, but also `urllib`, and occasionally `queue` or `input`. Upgrading `urllib` is context-sensitive because `six.moves.urllib` always has all submodules, even though `urllib` may not: ```python >>> import urllib >>> urllib.request Traceback (most recent call last): File "", line 1, in AttributeError: module 'urllib' has no attribute 'request' >>> from six.moves import urllib >>> urllib.request ``` We make those moves here, and also clean up the rest of the long tail such that the remaining moves for `xrange` are all mechanical and can be fixed in a follow-up commit. Test Plan: It suffices that tests pass (but they really have to run; `flake8` does not suffice here). Remaining: ``` $ git grep -h 'six.*moves' | sort -u from six.moves import xrange from six.moves import xrange # pylint: disable=redefined-builtin ``` wchargin-branch: py3-six-non-xrange-moves --- docs/image_summaries.ipynb | 1 - tensorboard/backend/application.py | 5 +---- .../backend/event_processing/data_provider.py | 2 +- .../event_processing/plugin_event_multiplexer.py | 3 ++- .../plugin_event_multiplexer_test.py | 13 +++++++------ tensorboard/lib_test.py | 4 ++-- tensorboard/manager_e2e_test.py | 2 +- tensorboard/plugins/audio/audio_plugin.py | 3 ++- tensorboard/plugins/audio/audio_plugin_test.py | 2 +- tensorboard/plugins/histogram/histograms_plugin.py | 3 +-- tensorboard/plugins/image/images_demo.py | 2 +- tensorboard/plugins/image/images_plugin.py | 2 +- tensorboard/plugins/image/images_plugin_test.py | 2 +- tensorboard/program.py | 2 +- tensorboard/summary/writer/event_file_writer.py | 7 +++---- tensorboard/uploader/uploader_subcommand.py | 3 +-- 16 files changed, 26 insertions(+), 30 deletions(-) diff --git a/docs/image_summaries.ipynb b/docs/image_summaries.ipynb index 266729b42c..cfcd323fb0 100644 --- a/docs/image_summaries.ipynb +++ b/docs/image_summaries.ipynb @@ -120,7 +120,6 @@ "import io\n", "import itertools\n", "from packaging import version\n", - "from six.moves import range\n", "\n", "import tensorflow as tf\n", "from tensorflow import keras\n", diff --git a/tensorboard/backend/application.py b/tensorboard/backend/application.py index 4c2e5e0b1f..99590b2ebd 100644 --- a/tensorboard/backend/application.py +++ b/tensorboard/backend/application.py @@ -25,12 +25,9 @@ import re import textwrap import time +from urllib import parse as urlparse import zipfile -from six.moves.urllib import ( - parse as urlparse, -) # pylint: disable=wrong-import-order - from werkzeug import wrappers from tensorboard import errors diff --git a/tensorboard/backend/event_processing/data_provider.py b/tensorboard/backend/event_processing/data_provider.py index 754f897450..703ea83b6a 100644 --- a/tensorboard/backend/event_processing/data_provider.py +++ b/tensorboard/backend/event_processing/data_provider.py @@ -506,7 +506,7 @@ def _downsample(xs, k): return list(xs) if k == 0: return [] - indices = random.Random(0).sample(six.moves.xrange(len(xs) - 1), k - 1) + indices = random.Random(0).sample(range(len(xs) - 1), k - 1) indices.sort() indices += [len(xs) - 1] return [xs[i] for i in indices] diff --git a/tensorboard/backend/event_processing/plugin_event_multiplexer.py b/tensorboard/backend/event_processing/plugin_event_multiplexer.py index 9d12f6f56c..cd0a451575 100644 --- a/tensorboard/backend/event_processing/plugin_event_multiplexer.py +++ b/tensorboard/backend/event_processing/plugin_event_multiplexer.py @@ -16,9 +16,10 @@ import os +import queue import threading -from six.moves import queue, xrange # pylint: disable=redefined-builtin +from six.moves import xrange # pylint: disable=redefined-builtin from tensorboard.backend.event_processing import directory_watcher from tensorboard.backend.event_processing import ( diff --git a/tensorboard/backend/event_processing/plugin_event_multiplexer_test.py b/tensorboard/backend/event_processing/plugin_event_multiplexer_test.py index 3e70368d99..cf5769d1d8 100644 --- a/tensorboard/backend/event_processing/plugin_event_multiplexer_test.py +++ b/tensorboard/backend/event_processing/plugin_event_multiplexer_test.py @@ -16,6 +16,7 @@ import os import os.path +import queue import shutil import tensorflow as tf @@ -351,8 +352,8 @@ def testReloadWithMoreRunsThanThreads(self): ) start_mock = patcher.start() self.addCleanup(patcher.stop) - patcher = tf.compat.v1.test.mock.patch( - "six.moves.queue.Queue.join", autospec=True + patcher = tf.compat.v1.test.mock.patch.object( + queue.Queue, "join", autospec=True ) join_mock = patcher.start() self.addCleanup(patcher.stop) @@ -373,8 +374,8 @@ def testReloadWithMoreThreadsThanRuns(self): ) start_mock = patcher.start() self.addCleanup(patcher.stop) - patcher = tf.compat.v1.test.mock.patch( - "six.moves.queue.Queue.join", autospec=True + patcher = tf.compat.v1.test.mock.patch.object( + queue.Queue, "join", autospec=True ) join_mock = patcher.start() self.addCleanup(patcher.stop) @@ -396,8 +397,8 @@ def testReloadWith1Thread(self): ) start_mock = patcher.start() self.addCleanup(patcher.stop) - patcher = tf.compat.v1.test.mock.patch( - "six.moves.queue.Queue.join", autospec=True + patcher = tf.compat.v1.test.mock.patch.object( + queue.Queue, "join", autospec=True ) join_mock = patcher.start() self.addCleanup(patcher.stop) diff --git a/tensorboard/lib_test.py b/tensorboard/lib_test.py index 4756bfb097..b931bc78d8 100644 --- a/tensorboard/lib_test.py +++ b/tensorboard/lib_test.py @@ -13,7 +13,7 @@ # limitations under the License. -from six import moves +import importlib import sys import unittest @@ -28,7 +28,7 @@ def test_functional_after_reload(self): module_name: dir(getattr(tensorboard, module_name)) for module_name in submodules } - tensorboard = moves.reload_module(tensorboard) + tensorboard = importlib.reload(tensorboard) dirs_after = { module_name: dir(getattr(tensorboard, module_name)) for module_name in submodules diff --git a/tensorboard/manager_e2e_test.py b/tensorboard/manager_e2e_test.py index 3c3e7acb7d..e617f88a96 100644 --- a/tensorboard/manager_e2e_test.py +++ b/tensorboard/manager_e2e_test.py @@ -27,8 +27,8 @@ import tempfile import textwrap from unittest import mock +import urllib.request -from six.moves import urllib import tensorflow as tf from tensorboard import manager diff --git a/tensorboard/plugins/audio/audio_plugin.py b/tensorboard/plugins/audio/audio_plugin.py index b9b469da1a..9d134dce8f 100644 --- a/tensorboard/plugins/audio/audio_plugin.py +++ b/tensorboard/plugins/audio/audio_plugin.py @@ -15,7 +15,8 @@ """The TensorBoard Audio plugin.""" -from six.moves import urllib +import urllib.parse + from werkzeug import wrappers from tensorboard import errors diff --git a/tensorboard/plugins/audio/audio_plugin_test.py b/tensorboard/plugins/audio/audio_plugin_test.py index 3b5ad1fb59..4c948870da 100644 --- a/tensorboard/plugins/audio/audio_plugin_test.py +++ b/tensorboard/plugins/audio/audio_plugin_test.py @@ -20,9 +20,9 @@ import os import shutil import tempfile +import urllib.parse import numpy -from six.moves import urllib from six.moves import xrange # pylint: disable=redefined-builtin import tensorflow as tf from werkzeug import test as werkzeug_test diff --git a/tensorboard/plugins/histogram/histograms_plugin.py b/tensorboard/plugins/histogram/histograms_plugin.py index 838af875e5..591414c089 100644 --- a/tensorboard/plugins/histogram/histograms_plugin.py +++ b/tensorboard/plugins/histogram/histograms_plugin.py @@ -21,7 +21,6 @@ import random -import six from werkzeug import wrappers from tensorboard import errors @@ -169,6 +168,6 @@ def _downsample(rng, xs, k): if k > len(xs): return list(xs) - indices = rng.sample(six.moves.xrange(len(xs)), k) + indices = rng.sample(range(len(xs)), k) indices.sort() return [xs[i] for i in indices] diff --git a/tensorboard/plugins/image/images_demo.py b/tensorboard/plugins/image/images_demo.py index eb42efdac1..faeaca4e71 100644 --- a/tensorboard/plugins/image/images_demo.py +++ b/tensorboard/plugins/image/images_demo.py @@ -20,8 +20,8 @@ import contextlib import os.path import textwrap +import urllib.request -from six.moves import urllib from six.moves import xrange # pylint: disable=redefined-builtin import tensorflow.compat.v1 as tf diff --git a/tensorboard/plugins/image/images_plugin.py b/tensorboard/plugins/image/images_plugin.py index b18726ed07..678dc40084 100644 --- a/tensorboard/plugins/image/images_plugin.py +++ b/tensorboard/plugins/image/images_plugin.py @@ -16,8 +16,8 @@ import imghdr +import urllib.parse -from six.moves import urllib from werkzeug import wrappers from tensorboard import errors diff --git a/tensorboard/plugins/image/images_plugin_test.py b/tensorboard/plugins/image/images_plugin_test.py index 49f7d8913b..457288d44c 100644 --- a/tensorboard/plugins/image/images_plugin_test.py +++ b/tensorboard/plugins/image/images_plugin_test.py @@ -20,9 +20,9 @@ import os import shutil import tempfile +import urllib.parse import numpy -from six.moves import urllib from six.moves import xrange # pylint: disable=redefined-builtin import tensorflow as tf from werkzeug import test as werkzeug_test diff --git a/tensorboard/program.py b/tensorboard/program.py index 73a369bbba..4670f40215 100644 --- a/tensorboard/program.py +++ b/tensorboard/program.py @@ -41,11 +41,11 @@ import sys import threading import time +import urllib.parse from absl import flags as absl_flags from absl.flags import argparse_flags import absl.logging -from six.moves import urllib from six.moves import xrange # pylint: disable=redefined-builtin from werkzeug import serving diff --git a/tensorboard/summary/writer/event_file_writer.py b/tensorboard/summary/writer/event_file_writer.py index 127b1f807e..1387b0526b 100644 --- a/tensorboard/summary/writer/event_file_writer.py +++ b/tensorboard/summary/writer/event_file_writer.py @@ -16,12 +16,11 @@ import os +import queue import socket import threading import time -import six - from tensorboard.compat import tf from tensorboard.compat.proto import event_pb2 from tensorboard.summary.writer.record_writer import RecordWriter @@ -153,7 +152,7 @@ def __init__(self, record_writer, max_queue_size=20, flush_secs=120): """ self._writer = record_writer self._closed = False - self._byte_queue = six.moves.queue.Queue(max_queue_size) + self._byte_queue = queue.Queue(max_queue_size) self._worker = _AsyncWriterThread( self._byte_queue, self._writer, flush_secs ) @@ -234,7 +233,7 @@ def run(self): return self._record_writer.write(data) self._has_pending_data = True - except six.moves.queue.Empty: + except queue.Empty: pass finally: if data: diff --git a/tensorboard/uploader/uploader_subcommand.py b/tensorboard/uploader/uploader_subcommand.py index 5040116da5..29e98d17c0 100644 --- a/tensorboard/uploader/uploader_subcommand.py +++ b/tensorboard/uploader/uploader_subcommand.py @@ -23,7 +23,6 @@ from absl import logging import grpc -import six from tensorboard.uploader.proto import experiment_pb2 from tensorboard.uploader.proto import export_service_pb2_grpc @@ -64,7 +63,7 @@ def _prompt_for_user_ack(intent): user_ack_message = "\n".join((header, body, _MESSAGE_TOS)) sys.stderr.write(user_ack_message) sys.stderr.write("\n") - response = six.moves.input("Continue? (yes/NO) ") + response = input("Continue? (yes/NO) ") if response.lower() not in ("y", "yes"): sys.exit(0) sys.stderr.write("\n") From 1a0abbcf3b5b98521969e75ba7866b76e24b4a6e Mon Sep 17 00:00:00 2001 From: William Chargin Date: Wed, 23 Dec 2020 15:53:48 -0800 Subject: [PATCH 5/9] py3: replace `six.moves.xrange` Summary: Generated with: ``` git sed '/six.*moves/d' && git sed s/xrange/range/ && black tensorboard ``` where [`git-sed`] does what it says on the tin. [`git-sed`]: https://gist.github.com/wchargin/ea868384294e26103b90ac42e0de82d9 Test Plan: It suffices that all tests pass. wchargin-branch: py3-six-moves-xrange --- .../event_processing/data_provider_test.py | 9 ++-- .../event_accumulator_test.py | 19 ++++---- .../plugin_event_accumulator_test.py | 17 ++++---- .../plugin_event_multiplexer.py | 3 +- .../backend/event_processing/reservoir.py | 6 +-- .../event_processing/reservoir_test.py | 43 +++++++++---------- tensorboard/encode_png_benchmark.py | 5 +-- tensorboard/plugins/audio/audio_demo.py | 3 +- .../plugins/audio/audio_plugin_test.py | 7 ++- .../custom_scalar/custom_scalar_demo.py | 3 +- .../distribution/distributions_plugin_test.py | 5 +-- .../plugins/histogram/histograms_demo.py | 3 +- .../histogram/histograms_plugin_test.py | 3 +- tensorboard/plugins/hparams/hparams_demo.py | 9 ++-- .../plugins/hparams/hparams_minimal_demo.py | 7 ++- tensorboard/plugins/image/images_demo.py | 7 ++- .../plugins/image/images_plugin_test.py | 5 +-- tensorboard/plugins/pr_curve/README.md | 6 +-- tensorboard/plugins/pr_curve/pr_curve_demo.py | 3 +- tensorboard/plugins/scalar/scalars_demo.py | 3 +- .../plugins/scalar/scalars_plugin_test.py | 3 +- tensorboard/program.py | 3 +- tensorboard/scripts/generate_testdata.py | 11 +++-- 23 files changed, 80 insertions(+), 103 deletions(-) diff --git a/tensorboard/backend/event_processing/data_provider_test.py b/tensorboard/backend/event_processing/data_provider_test.py index 256227721a..5f70d3bae5 100644 --- a/tensorboard/backend/event_processing/data_provider_test.py +++ b/tensorboard/backend/event_processing/data_provider_test.py @@ -17,7 +17,6 @@ import os -from six.moves import xrange # pylint: disable=redefined-builtin import numpy as np from tensorboard import context @@ -50,7 +49,7 @@ def setUp(self): logdir = os.path.join(self.logdir, "polynomials") with tf.summary.create_file_writer(logdir).as_default(): - for i in xrange(10): + for i in range(10): scalar_summary.scalar( "square", i ** 2, step=2 * i, description="boxen" ) @@ -58,7 +57,7 @@ def setUp(self): logdir = os.path.join(self.logdir, "waves") with tf.summary.create_file_writer(logdir).as_default(): - for i in xrange(10): + for i in range(10): scalar_summary.scalar("sine", tf.sin(float(i)), step=i) scalar_summary.scalar( "square", tf.sign(tf.sin(float(i))), step=i @@ -86,7 +85,7 @@ def setUp(self): ] for (description, distribution, name) in data: tensor = tf.constant([distribution], dtype=tf.float64) - for i in xrange(1, 11): + for i in range(1, 11): histogram_summary.histogram( name, tensor * i, step=i, description=description ) @@ -100,7 +99,7 @@ def setUp(self): ] for (name, color, description) in data: image_1x1 = tf.constant([[[color]]], dtype=tf.uint8) - for i in xrange(1, 11): + for i in range(1, 11): # Use a non-monotonic sequence of sample sizes to # test `max_length` calculation. k = 6 - abs(6 - i) # 1, .., 6, .., 2 diff --git a/tensorboard/backend/event_processing/event_accumulator_test.py b/tensorboard/backend/event_processing/event_accumulator_test.py index 31b922b70a..0acfc57313 100644 --- a/tensorboard/backend/event_processing/event_accumulator_test.py +++ b/tensorboard/backend/event_processing/event_accumulator_test.py @@ -18,7 +18,6 @@ import numpy as np import six -from six.moves import xrange # pylint: disable=redefined-builtin import tensorflow as tf from tensorboard.backend.event_processing import event_accumulator as ea @@ -727,18 +726,16 @@ def testTFSummaryScalar(self): tf.compat.v1.summary.scalar("scalar2", ipt * ipt) merged = tf.compat.v1.summary.merge_all() writer.add_graph(sess.graph) - for i in xrange(10): + for i in range(10): summ = sess.run(merged, feed_dict={ipt: i}) writer.add_summary(summ, global_step=i) accumulator = ea.EventAccumulator(event_sink) accumulator.Reload() - seq1 = [ - ea.ScalarEvent(wall_time=0, step=i, value=i) for i in xrange(10) - ] + seq1 = [ea.ScalarEvent(wall_time=0, step=i, value=i) for i in range(10)] seq2 = [ - ea.ScalarEvent(wall_time=0, step=i, value=i * i) for i in xrange(10) + ea.ScalarEvent(wall_time=0, step=i, value=i * i) for i in range(10) ] self.assertTagsEqual( @@ -774,7 +771,7 @@ def testTFSummaryImage(self): tf.compat.v1.summary.image("images", ipt, max_outputs=3) merged = tf.compat.v1.summary.merge_all() writer.add_graph(sess.graph) - for i in xrange(10): + for i in range(10): summ = sess.run(merged) writer.add_summary(summ, global_step=i) @@ -869,7 +866,7 @@ def FakeScalarSummary(tag, value): writer.add_run_metadata(run_metadata, "test run") # Write a bunch of events using the writer. - for i in xrange(30): + for i in range(30): summ_id = FakeScalarSummary("id", i) summ_sq = FakeScalarSummary("sq", i * i) writer.add_summary(summ_id, i * 5) @@ -892,14 +889,14 @@ def FakeScalarSummary(tag, value): sq_events = acc.Scalars("sq") self.assertEqual(30, len(id_events)) self.assertEqual(30, len(sq_events)) - for i in xrange(30): + for i in range(30): self.assertEqual(i * 5, id_events[i].step) self.assertEqual(i * 5, sq_events[i].step) self.assertEqual(i, id_events[i].value) self.assertEqual(i * i, sq_events[i].value) # Write a few more events to test incremental reloading - for i in xrange(30, 40): + for i in range(30, 40): summ_id = FakeScalarSummary("id", i) summ_sq = FakeScalarSummary("sq", i * i) writer.add_summary(summ_id, i * 5) @@ -912,7 +909,7 @@ def FakeScalarSummary(tag, value): sq_events = acc.Scalars("sq") self.assertEqual(40, len(id_events)) self.assertEqual(40, len(sq_events)) - for i in xrange(40): + for i in range(40): self.assertEqual(i * 5, id_events[i].step) self.assertEqual(i * 5, sq_events[i].step) self.assertEqual(i, id_events[i].value) diff --git a/tensorboard/backend/event_processing/plugin_event_accumulator_test.py b/tensorboard/backend/event_processing/plugin_event_accumulator_test.py index 913ef62ed6..a3c40bcc5a 100644 --- a/tensorboard/backend/event_processing/plugin_event_accumulator_test.py +++ b/tensorboard/backend/event_processing/plugin_event_accumulator_test.py @@ -18,7 +18,6 @@ import numpy as np import six -from six.moves import xrange # pylint: disable=redefined-builtin import tensorflow as tf from tensorboard import data_compat @@ -399,7 +398,7 @@ def testNewStyleScalarSummary(self): scalar_summary.op("xent", 1.0 / (step + tf.constant(1.0))) merged = tf.compat.v1.summary.merge_all() writer.add_graph(sess.graph) - for i in xrange(10): + for i in range(10): summ = sess.run(merged, feed_dict={step: float(i)}) writer.add_summary(summ, global_step=i) @@ -447,7 +446,7 @@ def testNewStyleAudioSummary(self): ) merged = tf.compat.v1.summary.merge_all() writer.add_graph(sess.graph) - for i in xrange(10): + for i in range(10): summ = sess.run(merged) writer.add_summary(summ, global_step=i) @@ -494,7 +493,7 @@ def testNewStyleImageSummary(self): image_summary.op("images", ipt, max_outputs=3) merged = tf.compat.v1.summary.merge_all() writer.add_graph(sess.graph) - for i in xrange(10): + for i in range(10): summ = sess.run(merged) writer.add_summary(summ, global_step=i) @@ -578,7 +577,7 @@ def _testTFSummaryTensor_SizeGuidance( summary_metadata=summary_metadata, ) merged = tf.compat.v1.summary.merge_all() - for step in xrange(steps): + for step in range(steps): writer.add_summary(sess.run(merged), global_step=step) accumulator = ea.EventAccumulator( @@ -666,7 +665,7 @@ def FakeScalarSummary(tag, value): writer.add_run_metadata(run_metadata, "test run") # Write a bunch of events using the writer. - for i in xrange(30): + for i in range(30): summ_id = FakeScalarSummary("id", i) summ_sq = FakeScalarSummary("sq", i * i) writer.add_summary(summ_id, i * 5) @@ -694,7 +693,7 @@ def FakeScalarSummary(tag, value): sq_events = acc.Tensors("sq") self.assertEqual(30, len(id_events)) self.assertEqual(30, len(sq_events)) - for i in xrange(30): + for i in range(30): self.assertEqual(i * 5, id_events[i].step) self.assertEqual(i * 5, sq_events[i].step) self.assertEqual( @@ -706,7 +705,7 @@ def FakeScalarSummary(tag, value): ) # Write a few more events to test incremental reloading - for i in xrange(30, 40): + for i in range(30, 40): summ_id = FakeScalarSummary("id", i) summ_sq = FakeScalarSummary("sq", i * i) writer.add_summary(summ_id, i * 5) @@ -719,7 +718,7 @@ def FakeScalarSummary(tag, value): sq_events = acc.Tensors("sq") self.assertEqual(40, len(id_events)) self.assertEqual(40, len(sq_events)) - for i in xrange(40): + for i in range(40): self.assertEqual(i * 5, id_events[i].step) self.assertEqual(i * 5, sq_events[i].step) self.assertEqual( diff --git a/tensorboard/backend/event_processing/plugin_event_multiplexer.py b/tensorboard/backend/event_processing/plugin_event_multiplexer.py index cd0a451575..891985cafa 100644 --- a/tensorboard/backend/event_processing/plugin_event_multiplexer.py +++ b/tensorboard/backend/event_processing/plugin_event_multiplexer.py @@ -19,7 +19,6 @@ import queue import threading -from six.moves import xrange # pylint: disable=redefined-builtin from tensorboard.backend.event_processing import directory_watcher from tensorboard.backend.event_processing import ( @@ -243,7 +242,7 @@ def Worker(): if self._max_reload_threads > 1: num_threads = min(self._max_reload_threads, len(items)) logger.info("Starting %d threads to reload runs", num_threads) - for i in xrange(num_threads): + for i in range(num_threads): thread = threading.Thread(target=Worker, name="Reloader %d" % i) thread.daemon = True thread.start() diff --git a/tensorboard/backend/event_processing/reservoir.py b/tensorboard/backend/event_processing/reservoir.py index 2c9be75cdf..dfba4c37ff 100644 --- a/tensorboard/backend/event_processing/reservoir.py +++ b/tensorboard/backend/event_processing/reservoir.py @@ -36,11 +36,11 @@ class Reservoir(object): >>> separate_reservoir = reservoir.Reservoir(10) >>> interleaved_reservoir = reservoir.Reservoir(10) - >>> for i in xrange(100): + >>> for i in range(100): >>> separate_reservoir.AddItem('key1', i) - >>> for i in xrange(100): + >>> for i in range(100): >>> separate_reservoir.AddItem('key2', i) - >>> for i in xrange(100): + >>> for i in range(100): >>> interleaved_reservoir.AddItem('key1', i) >>> interleaved_reservoir.AddItem('key2', i) diff --git a/tensorboard/backend/event_processing/reservoir_test.py b/tensorboard/backend/event_processing/reservoir_test.py index 221c862c56..0db2a41625 100644 --- a/tensorboard/backend/event_processing/reservoir_test.py +++ b/tensorboard/backend/event_processing/reservoir_test.py @@ -14,7 +14,6 @@ # ============================================================================== -from six.moves import xrange # pylint: disable=redefined-builtin import tensorflow as tf from tensorboard.backend.event_processing import reservoir @@ -53,7 +52,7 @@ def testDeterminism(self): key = "key" r1 = reservoir.Reservoir(10) r2 = reservoir.Reservoir(10) - for i in xrange(100): + for i in range(100): r1.AddItem("key", i) r2.AddItem("key", i) @@ -67,11 +66,11 @@ def testBucketDeterminism(self): """ separate_reservoir = reservoir.Reservoir(10) interleaved_reservoir = reservoir.Reservoir(10) - for i in xrange(100): + for i in range(100): separate_reservoir.AddItem("key1", i) - for i in xrange(100): + for i in range(100): separate_reservoir.AddItem("key2", i) - for i in xrange(100): + for i in range(100): interleaved_reservoir.AddItem("key1", i) interleaved_reservoir.AddItem("key2", i) @@ -86,14 +85,14 @@ def testUsesSeed(self): key = "key" r1 = reservoir.Reservoir(10, seed=0) r2 = reservoir.Reservoir(10, seed=1) - for i in xrange(100): + for i in range(100): r1.AddItem("key", i) r2.AddItem("key", i) self.assertNotEqual(r1.Items(key), r2.Items(key)) def testFilterItemsByKey(self): r = reservoir.Reservoir(100, seed=0) - for i in xrange(10): + for i in range(10): r.AddItem("key1", i) r.AddItem("key2", i) @@ -116,21 +115,21 @@ def testEmptyBucket(self): def testFillToSize(self): b = reservoir._ReservoirBucket(100) - for i in xrange(100): + for i in range(100): b.AddItem(i) - self.assertEqual(b.Items(), list(xrange(100))) + self.assertEqual(b.Items(), list(range(100))) self.assertEqual(b._num_items_seen, 100) def testDoesntOverfill(self): b = reservoir._ReservoirBucket(10) - for i in xrange(1000): + for i in range(1000): b.AddItem(i) self.assertEqual(len(b.Items()), 10) self.assertEqual(b._num_items_seen, 1000) def testMaintainsOrder(self): b = reservoir._ReservoirBucket(100) - for i in xrange(10000): + for i in range(10000): b.AddItem(i) items = b.Items() prev = -1 @@ -140,21 +139,21 @@ def testMaintainsOrder(self): def testKeepsLatestItem(self): b = reservoir._ReservoirBucket(5) - for i in xrange(100): + for i in range(100): b.AddItem(i) last = b.Items()[-1] self.assertEqual(last, i) def testSizeOneBucket(self): b = reservoir._ReservoirBucket(1) - for i in xrange(20): + for i in range(20): b.AddItem(i) self.assertEqual(b.Items(), [i]) self.assertEqual(b._num_items_seen, 20) def testSizeZeroBucket(self): b = reservoir._ReservoirBucket(0) - for i in xrange(20): + for i in range(20): b.AddItem(i) self.assertEqual(b.Items(), list(range(i + 1))) self.assertEqual(b._num_items_seen, 20) @@ -167,7 +166,7 @@ def testSizeRequirement(self): def testRemovesItems(self): b = reservoir._ReservoirBucket(100) - for i in xrange(10): + for i in range(10): b.AddItem(i) self.assertEqual(len(b.Items()), 10) self.assertEqual(b._num_items_seen, 10) @@ -177,7 +176,7 @@ def testRemovesItems(self): def testRemovesItemsWhenItemsAreReplaced(self): b = reservoir._ReservoirBucket(100) - for i in xrange(10000): + for i in range(10000): b.AddItem(i) self.assertEqual(b._num_items_seen, 10000) @@ -212,20 +211,20 @@ def increment_and_double(self, x): 100, FakeRandom(), always_keep_last=False ) incrementer = Incrementer() - for i in xrange(1000): + for i in range(1000): b.AddItem(i, incrementer.increment_and_double) self.assertEqual(incrementer.n, 100) - self.assertEqual(b.Items(), [x * 2 for x in xrange(100)]) + self.assertEqual(b.Items(), [x * 2 for x in range(100)]) # This time, we will always keep the last item, meaning that the function # should get invoked once for every item we add. b = reservoir._ReservoirBucket(100, FakeRandom(), always_keep_last=True) incrementer = Incrementer() - for i in xrange(1000): + for i in range(1000): b.AddItem(i, incrementer.increment_and_double) self.assertEqual(incrementer.n, 1000) - self.assertEqual(b.Items(), [x * 2 for x in xrange(99)] + [999 * 2]) + self.assertEqual(b.Items(), [x * 2 for x in range(99)] + [999 * 2]) class ReservoirBucketStatisticalDistributionTest(tf.test.TestCase): @@ -263,7 +262,7 @@ def testBucketReservoirSamplingViaStatisticalProperties(self): b = reservoir._ReservoirBucket(_max_size=self.samples) # add one extra item because we always keep the most recent item, which # would skew the distribution; we can just slice it off the end instead. - for i in xrange(self.total + 1): + for i in range(self.total + 1): b.AddItem(i) divbins = [0] * self.n_buckets @@ -273,7 +272,7 @@ def testBucketReservoirSamplingViaStatisticalProperties(self): divbins[item // self.total_per_bucket] += 1 modbins[item % self.n_buckets] += 1 - for bucket_index in xrange(self.n_buckets): + for bucket_index in range(self.n_buckets): divbin = divbins[bucket_index] modbin = modbins[bucket_index] self.AssertBinomialQuantity(divbin) diff --git a/tensorboard/encode_png_benchmark.py b/tensorboard/encode_png_benchmark.py index cae79a1228..9c7d1d96c4 100644 --- a/tensorboard/encode_png_benchmark.py +++ b/tensorboard/encode_png_benchmark.py @@ -46,7 +46,6 @@ import threading import time -from six.moves import xrange from absl import app from absl import logging @@ -68,7 +67,7 @@ def bench(image, thread_count): """ threads = [ threading.Thread(target=lambda: encoder.encode_png(image)) - for _ in xrange(thread_count) + for _ in range(thread_count) ] start_time = datetime.datetime.now() for thread in threads: @@ -129,7 +128,7 @@ def main(unused_argv): for thread_count in thread_counts: time.sleep(1.0) total_time = min( - bench(image, thread_count) for _ in xrange(3) + bench(image, thread_count) for _ in range(3) ) # best-of-three timing unit_time = total_time / thread_count if total_time < 2.0: diff --git a/tensorboard/plugins/audio/audio_demo.py b/tensorboard/plugins/audio/audio_demo.py index 82fea9d58e..7aee4c9362 100644 --- a/tensorboard/plugins/audio/audio_demo.py +++ b/tensorboard/plugins/audio/audio_demo.py @@ -21,7 +21,6 @@ from absl import app from absl import flags -from six.moves import xrange # pylint: disable=redefined-builtin import tensorflow as tf from tensorboard.plugins.audio import summary @@ -145,7 +144,7 @@ def run(logdir, run_name, wave_name, wave_constructor): writer = tf.summary.FileWriter(os.path.join(logdir, run_name)) writer.add_graph(sess.graph) sess.run(tf.compat.v1.global_variables_initializer()) - for step in xrange(FLAGS.steps): + for step in range(FLAGS.steps): s = sess.run(summ, feed_dict={step_placeholder: float(step)}) writer.add_summary(s, global_step=step) writer.close() diff --git a/tensorboard/plugins/audio/audio_plugin_test.py b/tensorboard/plugins/audio/audio_plugin_test.py index 4c948870da..79743188a5 100644 --- a/tensorboard/plugins/audio/audio_plugin_test.py +++ b/tensorboard/plugins/audio/audio_plugin_test.py @@ -23,7 +23,6 @@ import urllib.parse import numpy -from six.moves import xrange # pylint: disable=redefined-builtin import tensorflow as tf from werkzeug import test as werkzeug_test from werkzeug import wrappers @@ -59,7 +58,7 @@ def setUp(self): foo_directory = os.path.join(self.log_dir, "foo") with test_util.FileWriterCache.get(foo_directory) as writer: writer.add_graph(sess.graph) - for step in xrange(2): + for step in range(2): # The floats (sample data) range from -1 to 1. writer.add_summary( sess.run( @@ -89,7 +88,7 @@ def setUp(self): bar_directory = os.path.join(self.log_dir, "bar") with test_util.FileWriterCache.get(bar_directory) as writer: writer.add_graph(sess.graph) - for step in xrange(2): + for step in range(2): # The floats (sample data) range from -1 to 1. writer.add_summary( sess.run( @@ -105,7 +104,7 @@ def setUp(self): "step **%s**, sample %s" % (step, sample) ) - for sample in xrange(42) + for sample in range(42) ], }, ), diff --git a/tensorboard/plugins/custom_scalar/custom_scalar_demo.py b/tensorboard/plugins/custom_scalar/custom_scalar_demo.py index fca0b74897..b810ffff3f 100644 --- a/tensorboard/plugins/custom_scalar/custom_scalar_demo.py +++ b/tensorboard/plugins/custom_scalar/custom_scalar_demo.py @@ -20,7 +20,6 @@ from absl import app -from six.moves import xrange # pylint: disable=redefined-builtin import tensorflow as tf from tensorboard.summary import v1 as summary_lib @@ -114,7 +113,7 @@ def run(): ) writer.add_summary(layout_summary) - for i in xrange(42): + for i in range(42): summary = sess.run(merged_summary, feed_dict={step: i}) writer.add_summary(summary, global_step=i) diff --git a/tensorboard/plugins/distribution/distributions_plugin_test.py b/tensorboard/plugins/distribution/distributions_plugin_test.py index 8911d3d007..da330d19ae 100644 --- a/tensorboard/plugins/distribution/distributions_plugin_test.py +++ b/tensorboard/plugins/distribution/distributions_plugin_test.py @@ -19,7 +19,6 @@ import collections.abc import os.path -from six.moves import xrange # pylint: disable=redefined-builtin import tensorflow as tf from tensorboard import errors @@ -102,7 +101,7 @@ def generate_run(self, logdir, run_name): subdir = os.path.join(logdir, run_name) with test_util.FileWriterCache.get(subdir) as writer: writer.add_graph(sess.graph) - for step in xrange(self._STEPS): + for step in range(self._STEPS): feed_dict = {placeholder: [1 + step, 2 + step, 3 + step]} s = sess.run(summ, feed_dict=feed_dict) writer.add_summary(s, global_step=step) @@ -158,7 +157,7 @@ def _test_distributions(self, run_name, tag_name, should_work=True): ) self.assertEqual("application/json", mime_type) self.assertEqual(len(data), self._STEPS) - for i in xrange(self._STEPS): + for i in range(self._STEPS): [_unused_wall_time, step, bps_and_icdfs] = data[i] self.assertEqual(i, step) (bps, _unused_icdfs) = zip(*bps_and_icdfs) diff --git a/tensorboard/plugins/histogram/histograms_demo.py b/tensorboard/plugins/histogram/histograms_demo.py index 4d690cfaf1..29999b5040 100644 --- a/tensorboard/plugins/histogram/histograms_demo.py +++ b/tensorboard/plugins/histogram/histograms_demo.py @@ -17,7 +17,6 @@ from absl import app -from six.moves import xrange # pylint: disable=redefined-builtin import tensorflow as tf from tensorboard.plugins.histogram import summary as histogram_summary @@ -116,7 +115,7 @@ def run_all(logdir, verbose=False, num_summaries=400): # Setup a loop and write the summaries to disk N = num_summaries - for step in xrange(N): + for step in range(N): k_val = step / float(N) summ = sess.run(summaries, feed_dict={k: k_val}) writer.add_summary(summ, global_step=step) diff --git a/tensorboard/plugins/histogram/histograms_plugin_test.py b/tensorboard/plugins/histogram/histograms_plugin_test.py index 77ee88727e..53f4e2e5e3 100644 --- a/tensorboard/plugins/histogram/histograms_plugin_test.py +++ b/tensorboard/plugins/histogram/histograms_plugin_test.py @@ -19,7 +19,6 @@ import collections.abc import os.path -from six.moves import xrange # pylint: disable=redefined-builtin import tensorflow as tf from tensorboard import errors @@ -104,7 +103,7 @@ def generate_run(self, logdir, run_name): subdir = os.path.join(logdir, run_name) with test_util.FileWriterCache.get(subdir) as writer: writer.add_graph(sess.graph) - for step in xrange(self._STEPS): + for step in range(self._STEPS): feed_dict = {placeholder: [1 + step, 2 + step, 3 + step]} s = sess.run(summ, feed_dict=feed_dict) writer.add_summary(s, global_step=step) diff --git a/tensorboard/plugins/hparams/hparams_demo.py b/tensorboard/plugins/hparams/hparams_demo.py index 3a7b5aebc2..aec170e531 100644 --- a/tensorboard/plugins/hparams/hparams_demo.py +++ b/tensorboard/plugins/hparams/hparams_demo.py @@ -27,7 +27,6 @@ from absl import app from absl import flags import numpy as np -from six.moves import xrange # pylint: disable=redefined-builtin import tensorflow as tf from tensorboard.plugins.hparams import api as hp @@ -125,7 +124,7 @@ def model_fn(hparams, seed): # Add convolutional layers. conv_filters = 8 - for _ in xrange(hparams[HP_CONV_LAYERS]): + for _ in range(hparams[HP_CONV_LAYERS]): model.add( tf.keras.layers.Conv2D( filters=conv_filters, @@ -142,7 +141,7 @@ def model_fn(hparams, seed): # Add fully connected layers. dense_neurons = 32 - for _ in xrange(hparams[HP_DENSE_LAYERS]): + for _ in range(hparams[HP_DENSE_LAYERS]): model.add(tf.keras.layers.Dense(dense_neurons, activation="relu")) dense_neurons *= 2 @@ -215,10 +214,10 @@ def run_all(logdir, verbose=False): sessions_per_group = 2 num_sessions = flags.FLAGS.num_session_groups * sessions_per_group session_index = 0 # across all session groups - for group_index in xrange(flags.FLAGS.num_session_groups): + for group_index in range(flags.FLAGS.num_session_groups): hparams = {h: h.domain.sample_uniform(rng) for h in HPARAMS} hparams_string = str(hparams) - for repeat_index in xrange(sessions_per_group): + for repeat_index in range(sessions_per_group): session_id = str(session_index) session_index += 1 if verbose: diff --git a/tensorboard/plugins/hparams/hparams_minimal_demo.py b/tensorboard/plugins/hparams/hparams_minimal_demo.py index dbc196b2c7..897d7f378f 100644 --- a/tensorboard/plugins/hparams/hparams_minimal_demo.py +++ b/tensorboard/plugins/hparams/hparams_minimal_demo.py @@ -31,7 +31,6 @@ import os.path import shutil -from six.moves import xrange # pylint: disable=redefined-builtin # TODO(erez): This code currently does not support eager mode and can't # be run in tensorflow 2.0. Some of the issues are that it uses @@ -83,7 +82,7 @@ def init_temperature_list(): global TEMPERATURE_LIST TEMPERATURE_LIST = [ 270 + i * 50.0 - for i in xrange( + for i in range( 0, int(math.sqrt(FLAGS.num_session_groups / len(HEAT_COEFFICIENTS))) ) ] @@ -234,7 +233,7 @@ def run(logdir, session_id, hparams, group_name): sess = tf.Session() sess.run(tf.global_variables_initializer()) - for step in xrange(FLAGS.num_steps): + for step in range(FLAGS.num_steps): # By asking TensorFlow to compute the update step, we force it to # change the value of the temperature variable. We don't actually # care about this value, so we discard it; instead, we grab the @@ -273,7 +272,7 @@ def run_all(logdir, verbose=False): } hparam_str = str(hparams) group_name = fingerprint(hparam_str) - for repeat_idx in xrange(2): + for repeat_idx in range(2): session_id = str(session_num) if verbose: print( diff --git a/tensorboard/plugins/image/images_demo.py b/tensorboard/plugins/image/images_demo.py index faeaca4e71..2d8b901b01 100644 --- a/tensorboard/plugins/image/images_demo.py +++ b/tensorboard/plugins/image/images_demo.py @@ -22,7 +22,6 @@ import textwrap import urllib.request -from six.moves import xrange # pylint: disable=redefined-builtin import tensorflow.compat.v1 as tf @@ -139,7 +138,7 @@ def run_box_to_gaussian(logdir, verbose=False): iterations = 4 images = [tf.cast(image, tf.float32) / 255.0] - for _ in xrange(iterations): + for _ in range(iterations): images.append(convolve(images[-1], pixel_filter)) with tf.name_scope("convert_to_uint8"): images = tf.stack( @@ -193,7 +192,7 @@ def run_box_to_gaussian(logdir, verbose=False): sess.run(image.initializer) writer = tf.summary.FileWriter(os.path.join(logdir, "box_to_gaussian")) writer.add_graph(sess.graph) - for step in xrange(8): + for step in range(8): if verbose: logger.info("--- box_to_gaussian: step: %s" % step) feed_dict = {blur_radius: step} @@ -281,7 +280,7 @@ def run_sobel(logdir, verbose=False): sess.run(image.initializer) writer = tf.summary.FileWriter(os.path.join(logdir, "sobel")) writer.add_graph(sess.graph) - for step in xrange(8): + for step in range(8): if verbose: logger.info("--- sobel: step: %s" % step) feed_dict = {kernel_radius: step} diff --git a/tensorboard/plugins/image/images_plugin_test.py b/tensorboard/plugins/image/images_plugin_test.py index 457288d44c..b272533d8d 100644 --- a/tensorboard/plugins/image/images_plugin_test.py +++ b/tensorboard/plugins/image/images_plugin_test.py @@ -23,7 +23,6 @@ import urllib.parse import numpy -from six.moves import xrange # pylint: disable=redefined-builtin import tensorflow as tf from werkzeug import test as werkzeug_test from werkzeug import wrappers @@ -69,7 +68,7 @@ def _create_data(self): foo_directory = os.path.join(self.log_dir, "foo") with test_util.FileWriterCache.get(foo_directory) as writer: writer.add_graph(sess.graph) - for step in xrange(2): + for step in range(2): writer.add_summary( sess.run( merged_summary_op, @@ -95,7 +94,7 @@ def _create_data(self): bar_directory = os.path.join(self.log_dir, "bar") with test_util.FileWriterCache.get(bar_directory) as writer: writer.add_graph(sess.graph) - for step in xrange(2): + for step in range(2): writer.add_summary( sess.run( merged_summary_op, diff --git a/tensorboard/plugins/pr_curve/README.md b/tensorboard/plugins/pr_curve/README.md index 0c50e50474..b05e8a2dfb 100644 --- a/tensorboard/plugins/pr_curve/README.md +++ b/tensorboard/plugins/pr_curve/README.md @@ -38,7 +38,7 @@ merged_summary = tf.summary.merge_all() with tf.Session() as sess: writer = tf.summary.FileWriter('/tmp/logdir', sess.graph) sess.run(tf.local_variables_initializer()) - for step in xrange(43): + for step in range(43): sess.run([update_op]) if step % 6 == 0: writer.add_summary(sess.run(merged_summary), global_step=step) @@ -147,7 +147,7 @@ merged_summary = tf.summary.merge_all() with tf.Session() as sess: writer = tf.summary.FileWriter('/tmp/logdir', sess.graph) sess.run(tf.local_variables_initializer()) - for step in xrange(43): + for step in range(43): sess.run([update_op]) if step % 6 == 0: writer.add_summary(sess.run(merged_summary), global_step=step) @@ -200,7 +200,7 @@ merged_summary = tf.summary.merge_all() with tf.Session() as sess: writer = tf.summary.FileWriter('/tmp/logdir', sess.graph) - for step in xrange(43): + for step in range(43): writer.add_summary(sess.run(merged_summary), global_step=step) ``` diff --git a/tensorboard/plugins/pr_curve/pr_curve_demo.py b/tensorboard/plugins/pr_curve/pr_curve_demo.py index efeee237e8..e8f1c55659 100644 --- a/tensorboard/plugins/pr_curve/pr_curve_demo.py +++ b/tensorboard/plugins/pr_curve/pr_curve_demo.py @@ -31,7 +31,6 @@ from absl import app from absl import flags -from six.moves import xrange # pylint: disable=redefined-builtin import tensorflow as tf from tensorboard.plugins.pr_curve import summary @@ -224,7 +223,7 @@ def start_runs( sess = tf.compat.v1.Session() writer = tf.compat.v1.summary.FileWriter(events_directory, sess.graph) - for step in xrange(steps): + for step in range(steps): feed_dict = { iteration: step, } diff --git a/tensorboard/plugins/scalar/scalars_demo.py b/tensorboard/plugins/scalar/scalars_demo.py index a2410497dc..3bc786c63a 100644 --- a/tensorboard/plugins/scalar/scalars_demo.py +++ b/tensorboard/plugins/scalar/scalars_demo.py @@ -18,7 +18,6 @@ import os.path from absl import app -from six.moves import xrange # pylint: disable=redefined-builtin import tensorflow.compat.v1 as tf from tensorboard.plugins.scalar import summary @@ -116,7 +115,7 @@ def run( writer = tf.summary.FileWriter(os.path.join(logdir, run_name)) writer.add_graph(sess.graph) sess.run(tf.global_variables_initializer()) - for step in xrange(STEPS): + for step in range(STEPS): # By asking TensorFlow to compute the update step, we force it to # change the value of the temperature variable. We don't actually # care about this value, so we discard it; instead, we grab the diff --git a/tensorboard/plugins/scalar/scalars_plugin_test.py b/tensorboard/plugins/scalar/scalars_plugin_test.py index 91eb964b0f..352f182ad3 100644 --- a/tensorboard/plugins/scalar/scalars_plugin_test.py +++ b/tensorboard/plugins/scalar/scalars_plugin_test.py @@ -21,7 +21,6 @@ import os.path from six import StringIO -from six.moves import xrange # pylint: disable=redefined-builtin import tensorflow as tf from werkzeug import test as werkzeug_test from werkzeug import wrappers @@ -88,7 +87,7 @@ def load_server(self, run_names): def generate_run(self, logdir, run_name): subdir = os.path.join(logdir, run_name) with test_util.FileWriterCache.get(subdir) as writer: - for step in xrange(self._STEPS): + for step in range(self._STEPS): data = [1 + step, 2 + step, 3 + step] if run_name == self._RUN_WITH_LEGACY_SCALARS: summ = tf.compat.v1.summary.scalar( diff --git a/tensorboard/program.py b/tensorboard/program.py index 4670f40215..af423dc6bb 100644 --- a/tensorboard/program.py +++ b/tensorboard/program.py @@ -46,7 +46,6 @@ from absl import flags as absl_flags from absl.flags import argparse_flags import absl.logging -from six.moves import xrange # pylint: disable=redefined-builtin from werkzeug import serving from tensorboard import manager @@ -583,7 +582,7 @@ def init(wsgi_app, flags): max_attempts = 100 if should_scan else 1 base_port = min(base_port + max_attempts, 0x10000) - max_attempts - for port in xrange(base_port, base_port + max_attempts): + for port in range(base_port, base_port + max_attempts): subflags = argparse.Namespace(**vars(flags)) subflags.port = port try: diff --git a/tensorboard/scripts/generate_testdata.py b/tensorboard/scripts/generate_testdata.py index ffe5e632f0..9fd9026284 100644 --- a/tensorboard/scripts/generate_testdata.py +++ b/tensorboard/scripts/generate_testdata.py @@ -25,7 +25,6 @@ from absl import app from absl import flags import numpy as np -from six.moves import xrange # pylint: disable=redefined-builtin import tensorflow as tf @@ -69,7 +68,7 @@ def _MakeHistogram(values): counts[idx] += 1 limit_counts = [ - (limits[i], counts[i]) for i in xrange(len(limits)) if counts[i] + (limits[i], counts[i]) for i in range(len(limits)) if counts[i] ] bucket_limit = [lc[0] for lc in limit_counts] bucket = [lc[1] for lc in limit_counts] @@ -89,7 +88,7 @@ def WriteScalarSeries(writer, tag, f, n=5): """Write a series of scalar events to writer, using f to create values.""" step = 0 wall_time = _start_time - for i in xrange(n): + for i in range(n): v = f(i) value = tf.Summary.Value(tag=tag, simple_value=v) summary = tf.Summary(value=[value]) @@ -104,7 +103,7 @@ def WriteHistogramSeries(writer, tag, mu_sigma_tuples, n=20): step = 0 wall_time = _start_time for [mean, stddev] in mu_sigma_tuples: - data = [random.normalvariate(mean, stddev) for _ in xrange(n)] + data = [random.normalvariate(mean, stddev) for _ in range(n)] histo = _MakeHistogram(data) summary = tf.Summary(value=[tf.Summary.Value(tag=tag, histo=histo)]) event = tf.Event(wall_time=wall_time, step=step, summary=summary) @@ -119,7 +118,7 @@ def WriteImageSeries(writer, tag, n_images=1): session = tf.compat.v1.Session() p = tf.compat.v1.placeholder("uint8", (1, 4, 4, 3)) s = tf.compat.v1.summary.image(tag, p) - for _ in xrange(n_images): + for _ in range(n_images): im = np.random.random_integers(0, 255, (1, 4, 4, 3)) summ = session.run(s, feed_dict={p: im}) writer.add_summary(summ, step) @@ -144,7 +143,7 @@ def WriteAudioSeries(writer, tag, n_audio=1): ) s = tf.compat.v1.summary.audio(tag, p, sample_rate) - for _ in xrange(n_audio): + for _ in range(n_audio): # Generate a different frequency for each channel to show stereo works. frequencies = np.random.random_integers( min_frequency_hz, From ef5d61cc76ac4fd230b6911079f15fde9c7c4d66 Mon Sep 17 00:00:00 2001 From: William Chargin Date: Wed, 23 Dec 2020 16:01:13 -0800 Subject: [PATCH 6/9] py3: replace `six` text/bytes types MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Summary: Generated with: ``` git sed 's/_\?six\.binary_type/bytes/g' && git sed 's/_\?six\.text_type/str/g' && git sed 's/_\?six\.string_types/str/g' && git sed 's/_\?six\.b(\("[^"]*"\))/b\1/g' && flake8 tensorboard/ --select=F401 | cut -d : -f 1 | xargs git sed '/import six/d' && black tensorboard ``` where [`git-sed`] does what it says on the tin. The `b("foo")` regex suffices because all of our calls to `six.b` are with string literals short enough to fit on one line. The `_\?`s are because some modules import `six` as `_six`. The `string_types` to `str` replacement is sound even though `string_types` was actually defined as `(str,)` because they’re equivalent in all actual call sites (usually `isinstance`s). [`git-sed`]: https://gist.github.com/wchargin/ea868384294e26103b90ac42e0de82d9 Test Plan: It suffices that all tests pass. wchargin-branch: py3-six-text-bytes --- .../backend/event_processing/event_accumulator_test.py | 5 ++--- .../event_processing/plugin_event_accumulator_test.py | 5 ++--- tensorboard/backend/http_util_test.py | 2 +- tensorboard/compat/tensorflow_stub/compat/__init__.py | 6 +++--- tensorboard/compat/tensorflow_stub/io/gfile.py | 5 ++--- tensorboard/data/provider.py | 5 ++--- tensorboard/manager.py | 5 ++--- tensorboard/plugin_util.py | 3 +-- tensorboard/plugin_util_test.py | 5 ++--- .../plugins/debugger_v2/debugger_v2_plugin_test.py | 5 ++--- tensorboard/plugins/hparams/list_session_groups.py | 2 +- tensorboard/plugins/hparams/metrics.py | 3 +-- tensorboard/plugins/hparams/summary.py | 5 ++--- tensorboard/plugins/hparams/summary_v2.py | 5 ++--- tensorboard/plugins/text/summary_test.py | 9 ++++----- tensorboard/util/grpc_util_test.py | 3 +-- 16 files changed, 30 insertions(+), 43 deletions(-) diff --git a/tensorboard/backend/event_processing/event_accumulator_test.py b/tensorboard/backend/event_processing/event_accumulator_test.py index 0acfc57313..cbbfd52028 100644 --- a/tensorboard/backend/event_processing/event_accumulator_test.py +++ b/tensorboard/backend/event_processing/event_accumulator_test.py @@ -17,7 +17,6 @@ import os import numpy as np -import six import tensorflow as tf from tensorboard.backend.event_processing import event_accumulator as ea @@ -807,7 +806,7 @@ def testTFSummaryTensor(self): "vector", tf.constant([1.0, 2.0, 3.0]) ) tf.compat.v1.summary.tensor_summary( - "string", tf.constant(six.b("foobar")) + "string", tf.constant(b"foobar") ) merged = tf.compat.v1.summary.merge_all() summ = sess.run(merged) @@ -832,7 +831,7 @@ def testTFSummaryTensor(self): self.assertTrue(np.array_equal(scalar, 1.0)) self.assertTrue(np.array_equal(vector, [1.0, 2.0, 3.0])) - self.assertTrue(np.array_equal(string, six.b("foobar"))) + self.assertTrue(np.array_equal(string, b"foobar")) class RealisticEventAccumulatorTest(EventAccumulatorTest): diff --git a/tensorboard/backend/event_processing/plugin_event_accumulator_test.py b/tensorboard/backend/event_processing/plugin_event_accumulator_test.py index a3c40bcc5a..eb2ffb6b88 100644 --- a/tensorboard/backend/event_processing/plugin_event_accumulator_test.py +++ b/tensorboard/backend/event_processing/plugin_event_accumulator_test.py @@ -17,7 +17,6 @@ import os import numpy as np -import six import tensorflow as tf from tensorboard import data_compat @@ -530,7 +529,7 @@ def testTFSummaryTensor(self): tensor_summary = tf.compat.v1.summary.tensor_summary tensor_summary("scalar", tf.constant(1.0)) tensor_summary("vector", tf.constant([1.0, 2.0, 3.0])) - tensor_summary("string", tf.constant(six.b("foobar"))) + tensor_summary("string", tf.constant(b"foobar")) merged = tf.compat.v1.summary.merge_all() summ = sess.run(merged) writer.add_summary(summ, 0) @@ -554,7 +553,7 @@ def testTFSummaryTensor(self): self.assertTrue(np.array_equal(scalar, 1.0)) self.assertTrue(np.array_equal(vector, [1.0, 2.0, 3.0])) - self.assertTrue(np.array_equal(string, six.b("foobar"))) + self.assertTrue(np.array_equal(string, b"foobar")) self.assertItemsEqual(accumulator.ActivePlugins(), []) diff --git a/tensorboard/backend/http_util_test.py b/tensorboard/backend/http_util_test.py index fcb63ef849..be70d04a44 100644 --- a/tensorboard/backend/http_util_test.py +++ b/tensorboard/backend/http_util_test.py @@ -33,7 +33,7 @@ def testHelloWorld(self): q = wrappers.Request(wtest.EnvironBuilder().get_environ()) r = http_util.Respond(q, "hello world", "text/html") self.assertEqual(r.status_code, 200) - self.assertEqual(r.response, [six.b("hello world")]) + self.assertEqual(r.response, [b"hello world"]) self.assertEqual(r.headers.get("Content-Length"), "18") def testHeadRequest_doesNotWrite(self): diff --git a/tensorboard/compat/tensorflow_stub/compat/__init__.py b/tensorboard/compat/tensorflow_stub/compat/__init__.py index 6e06edf730..ff9451b02b 100644 --- a/tensorboard/compat/tensorflow_stub/compat/__init__.py +++ b/tensorboard/compat/tensorflow_stub/compat/__init__.py @@ -49,7 +49,7 @@ def as_bytes(bytes_or_text, encoding="utf-8"): Raises: TypeError: If `bytes_or_text` is not a binary or unicode string. """ - if isinstance(bytes_or_text, _six.text_type): + if isinstance(bytes_or_text, str): return bytes_or_text.encode(encoding) elif isinstance(bytes_or_text, bytes): return bytes_or_text @@ -72,7 +72,7 @@ def as_text(bytes_or_text, encoding="utf-8"): Raises: TypeError: If `bytes_or_text` is not a binary or unicode string. """ - if isinstance(bytes_or_text, _six.text_type): + if isinstance(bytes_or_text, str): return bytes_or_text elif isinstance(bytes_or_text, bytes): return bytes_or_text.decode(encoding) @@ -135,6 +135,6 @@ def path_to_str(path): # tf_export('compat.complex_types').export_constant(__name__, 'complex_types') # Either bytes or text. -bytes_or_text_types = (bytes, _six.text_type) +bytes_or_text_types = (bytes, str) # tf_export('compat.bytes_or_text_types').export_constant(__name__, # 'bytes_or_text_types') diff --git a/tensorboard/compat/tensorflow_stub/io/gfile.py b/tensorboard/compat/tensorflow_stub/io/gfile.py index 9ee684e9a9..60ce4bccb5 100644 --- a/tensorboard/compat/tensorflow_stub/io/gfile.py +++ b/tensorboard/compat/tensorflow_stub/io/gfile.py @@ -24,7 +24,6 @@ import glob as py_glob import io import os -import six import sys import tempfile @@ -158,7 +157,7 @@ def _write(self, filename, file_content, mode): def glob(self, filename): """Returns a list of files that match the given pattern(s).""" - if isinstance(filename, six.string_types): + if isinstance(filename, str): return [ # Convert the filenames to string from bytes. compat.as_str_any(matching_filename) @@ -319,7 +318,7 @@ def write(self, filename, file_content, binary_mode=False): bucket, path = self.bucket_and_path(filename) # Always convert to bytes for writing if binary_mode: - if not isinstance(file_content, six.binary_type): + if not isinstance(file_content, bytes): raise TypeError("File content type must be bytes") else: file_content = compat.as_bytes(file_content) diff --git a/tensorboard/data/provider.py b/tensorboard/data/provider.py index 5e6dc3ca98..086f468619 100644 --- a/tensorboard/data/provider.py +++ b/tensorboard/data/provider.py @@ -17,7 +17,6 @@ import abc -import six import numpy as np @@ -964,7 +963,7 @@ def __init__(self, runs=None, tags=None): def _parse_optional_string_set(self, name, value): if value is None: return None - if isinstance(value, six.string_types): + if isinstance(value, str): # Prevent confusion: strings _are_ iterable, but as # sequences of characters, so this likely signals an error. raise TypeError( @@ -973,7 +972,7 @@ def _parse_optional_string_set(self, name, value): ) value = frozenset(value) for item in value: - if not isinstance(item, six.string_types): + if not isinstance(item, str): raise TypeError( "%s: expected `None` or collection of strings; " "got item of type %r: %r" % (name, type(item), item) diff --git a/tensorboard/manager.py b/tensorboard/manager.py index 61b1c0fbf7..ca4c4ba9cb 100644 --- a/tensorboard/manager.py +++ b/tensorboard/manager.py @@ -25,7 +25,6 @@ import tempfile import time -import six from tensorboard import version from tensorboard.util import tb_logging @@ -52,9 +51,9 @@ deserialize=lambda n: n, ) _type_str = _FieldType( - serialized_type=six.text_type, # `json.loads` always gives Unicode + serialized_type=str, # `json.loads` always gives Unicode runtime_type=str, - serialize=six.text_type, + serialize=str, deserialize=str, ) diff --git a/tensorboard/plugin_util.py b/tensorboard/plugin_util.py index ba02182bc7..1e6d7f7929 100644 --- a/tensorboard/plugin_util.py +++ b/tensorboard/plugin_util.py @@ -22,7 +22,6 @@ # pylint: disable=g-bad-import-order # Google-only: import markdown_freewisdom import markdown -import six from tensorboard import context as _context from tensorboard.backend import experiment_id as _experiment_id @@ -126,7 +125,7 @@ def markdowns_to_safe_html(markdown_strings, combine): for source in markdown_strings: # Convert to utf-8 whenever we have a binary input. - if isinstance(source, six.binary_type): + if isinstance(source, bytes): source_decoded = source.decode("utf-8") # Remove null bytes and warn if there were any, since it probably means # we were given a bad encoding. diff --git a/tensorboard/plugin_util_test.py b/tensorboard/plugin_util_test.py index 5c8dcae8d3..c22b03e4ea 100644 --- a/tensorboard/plugin_util_test.py +++ b/tensorboard/plugin_util_test.py @@ -15,7 +15,6 @@ import textwrap -import six from tensorboard import context from tensorboard import plugin_util @@ -109,14 +108,14 @@ def test_javascript_hrefs_sanitized(self): def test_byte_strings_interpreted_as_utf8(self): s = "> Look\u2014some UTF-8!".encode("utf-8") - assert isinstance(s, six.binary_type), (type(s), six.binary_type) + assert isinstance(s, bytes), (type(s), bytes) self._test( s, "
\n

Look\u2014some UTF-8!

\n
" ) def test_unicode_strings_passed_through(self): s = "> Look\u2014some UTF-8!" - assert not isinstance(s, six.binary_type), (type(s), six.binary_type) + assert not isinstance(s, bytes), (type(s), bytes) self._test( s, "
\n

Look\u2014some UTF-8!

\n
" ) diff --git a/tensorboard/plugins/debugger_v2/debugger_v2_plugin_test.py b/tensorboard/plugins/debugger_v2/debugger_v2_plugin_test.py index 7d220f8f8a..022118ebfd 100644 --- a/tensorboard/plugins/debugger_v2/debugger_v2_plugin_test.py +++ b/tensorboard/plugins/debugger_v2/debugger_v2_plugin_test.py @@ -18,7 +18,6 @@ import collections import json import os -import six import socket import threading @@ -1597,11 +1596,11 @@ def testServeStackFrames(self): self.assertIsInstance(item, list) self.assertLen(item, 4) # [host_name, file_path, lineno, function]. self.assertEqual(item[0], _HOST_NAME) - self.assertIsInstance(item[1], six.string_types) + self.assertIsInstance(item[1], str) self.assertTrue(item[1]) self.assertIsInstance(item[2], int) self.assertGreaterEqual(item[2], 1) - self.assertIsInstance(item[3], six.string_types) + self.assertIsInstance(item[3], str) self.assertTrue(item[3]) # Assert that the current file and current function should be in the # stack frames. diff --git a/tensorboard/plugins/hparams/list_session_groups.py b/tensorboard/plugins/hparams/list_session_groups.py index cb3a837d79..97e0808825 100644 --- a/tensorboard/plugins/hparams/list_session_groups.py +++ b/tensorboard/plugins/hparams/list_session_groups.py @@ -492,7 +492,7 @@ def _create_regexp_filter(regex): compiled_regex = re.compile(regex) def filter_fn(value): - if not isinstance(value, six.string_types): + if not isinstance(value, str): raise error.HParamsError( "Cannot use a regexp filter for a value of type %s. Value: %s" % (type(value), value) diff --git a/tensorboard/plugins/hparams/metrics.py b/tensorboard/plugins/hparams/metrics.py index fce55eaf16..4155c6cd85 100644 --- a/tensorboard/plugins/hparams/metrics.py +++ b/tensorboard/plugins/hparams/metrics.py @@ -17,7 +17,6 @@ import os -import six from tensorboard.plugins.hparams import api_pb2 @@ -31,7 +30,7 @@ def run_tag_from_session_and_metric(session_name, metric_name): metric_name: MetricName protobuffer. Returns: (run, tag) tuple. """ - assert isinstance(session_name, six.string_types) + assert isinstance(session_name, str) assert isinstance(metric_name, api_pb2.MetricName) # os.path.join() will append a final slash if the group is empty; it seems # like multiplexer.Tensors won't recognize paths that end with a '/' so diff --git a/tensorboard/plugins/hparams/summary.py b/tensorboard/plugins/hparams/summary.py index 30329bda36..be71e78fde 100644 --- a/tensorboard/plugins/hparams/summary.py +++ b/tensorboard/plugins/hparams/summary.py @@ -34,7 +34,6 @@ import time -import six import tensorflow as tf @@ -101,7 +100,7 @@ def session_start_pb( +-----------------+---------------------------------+ |DATA_TYPE_BOOL | bool | |DATA_TYPE_FLOAT64| int, float | - |DATA_TYPE_STRING | six.string_types, tuple, list | + |DATA_TYPE_STRING | str, tuple, list | +-----------------+---------------------------------+ Tuple and list instances will be converted to their string @@ -129,7 +128,7 @@ def session_start_pb( for (hp_name, hp_val) in hparams.items(): if isinstance(hp_val, (float, int)): session_start_info.hparams[hp_name].number_value = hp_val - elif isinstance(hp_val, six.string_types): + elif isinstance(hp_val, str): session_start_info.hparams[hp_name].string_value = hp_val elif isinstance(hp_val, bool): session_start_info.hparams[hp_name].bool_value = hp_val diff --git a/tensorboard/plugins/hparams/summary_v2.py b/tensorboard/plugins/hparams/summary_v2.py index 64308e6072..b48e6f1edc 100644 --- a/tensorboard/plugins/hparams/summary_v2.py +++ b/tensorboard/plugins/hparams/summary_v2.py @@ -25,7 +25,6 @@ import time import numpy as np -import six from tensorboard.compat import tf2 as tf from tensorboard.compat.proto import summary_pb2 @@ -95,7 +94,7 @@ def hparams_pb(hparams, trial_id=None, start_time_secs=None): session_start_info.hparams[hp_name].bool_value = hp_value elif isinstance(hp_value, (float, int)): session_start_info.hparams[hp_name].number_value = hp_value - elif isinstance(hp_value, six.string_types): + elif isinstance(hp_value, str): session_start_info.hparams[hp_name].string_value = hp_value else: raise TypeError( @@ -227,7 +226,7 @@ def _normalize_numpy_value(value): def _derive_session_group_name(trial_id, hparams): if trial_id is not None: - if not isinstance(trial_id, six.string_types): + if not isinstance(trial_id, str): raise TypeError( "`trial_id` should be a `str`, but got: %r" % (trial_id,) ) diff --git a/tensorboard/plugins/text/summary_test.py b/tensorboard/plugins/text/summary_test.py index 1226132913..e55450972e 100644 --- a/tensorboard/plugins/text/summary_test.py +++ b/tensorboard/plugins/text/summary_test.py @@ -21,7 +21,6 @@ import numpy as np -import six import tensorflow as tf from tensorboard.compat import tf2 @@ -74,13 +73,13 @@ def test_explicit_description(self): def test_bytes_value(self): pb = self.text("mi", b"A name\xe2\x80\xa6I call myself") value = tensor_util.make_ndarray(pb.value[0].tensor).item() - self.assertIsInstance(value, six.binary_type) + self.assertIsInstance(value, bytes) self.assertEqual(b"A name\xe2\x80\xa6I call myself", value) def test_unicode_value(self): pb = self.text("mi", "A name\u2026I call myself") value = tensor_util.make_ndarray(pb.value[0].tensor).item() - self.assertIsInstance(value, six.binary_type) + self.assertIsInstance(value, bytes) self.assertEqual(b"A name\xe2\x80\xa6I call myself", value) def test_np_array_bytes_value(self): @@ -98,7 +97,7 @@ def test_np_array_bytes_value(self): # Check that all entries are byte strings. for vectors in values: for value in vectors: - self.assertIsInstance(value, six.binary_type) + self.assertIsInstance(value, bytes) def test_np_array_unicode_value(self): pb = self.text( @@ -113,7 +112,7 @@ def test_np_array_unicode_value(self): # Check that all entries are byte strings. for vectors in values: for value in vectors: - self.assertIsInstance(value, six.binary_type) + self.assertIsInstance(value, bytes) def test_non_string_value(self): with self.assertRaisesRegex(TypeError, r"must be of type.*string"): diff --git a/tensorboard/util/grpc_util_test.py b/tensorboard/util/grpc_util_test.py index ad553d73c9..52b117fec8 100644 --- a/tensorboard/util/grpc_util_test.py +++ b/tensorboard/util/grpc_util_test.py @@ -21,7 +21,6 @@ from concurrent import futures import grpc -import six from tensorboard.util import grpc_util from tensorboard.util import grpc_util_test_pb2 @@ -171,7 +170,7 @@ def test_structure(self): self.assertLen(kv, 2) (k, v) = kv self.assertIsInstance(k, str) - self.assertIsInstance(v, six.string_types) + self.assertIsInstance(v, str) def test_roundtrip(self): result = grpc_util.extract_version(grpc_util.version_metadata()) From f966cd8e2cea7e24877351b266dc59c4f474a41d Mon Sep 17 00:00:00 2001 From: William Chargin Date: Wed, 23 Dec 2020 16:55:07 -0800 Subject: [PATCH 7/9] py3: replace `six.StringIO`, `six.BytesIO` MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Summary: Generated with `git sed 's/six\.\(StringIO\|BytesIO\)/io.\1/g'` and then manually fixing imports in affected files, where [`git-sed`] does what it says on the tin. Manual updates needed to `scalars_plugin.py` and its tests, since they didn’t follow the “only import modules” style rule and thus didn’t match the `sed` expression. [`git-sed`]: https://gist.github.com/wchargin/ea868384294e26103b90ac42e0de82d9 Test Plan: It suffices that all tests pass. wchargin-branch: py3-six-stringio-bytesio --- tensorboard/backend/http_util.py | 7 +++---- tensorboard/backend/http_util_test.py | 6 +++--- tensorboard/plugins/core/core_plugin.py | 4 ++-- tensorboard/plugins/core/core_plugin_test.py | 6 +++--- tensorboard/plugins/hparams/download_data.py | 5 ++--- tensorboard/plugins/scalar/scalars_plugin.py | 4 ++-- tensorboard/plugins/scalar/scalars_plugin_test.py | 4 ++-- tensorboard/program_test.py | 5 ++--- tensorboard/summary/writer/record_writer_test.py | 4 ++-- 9 files changed, 21 insertions(+), 24 deletions(-) diff --git a/tensorboard/backend/http_util.py b/tensorboard/backend/http_util.py index 263fe3b64e..e0dfc47eba 100644 --- a/tensorboard/backend/http_util.py +++ b/tensorboard/backend/http_util.py @@ -16,14 +16,13 @@ import gzip +import io import json import re import struct import time import wsgiref.handlers -import six - import werkzeug from tensorboard.backend import json_util @@ -163,7 +162,7 @@ def Respond( ) # Automatically gzip uncompressed text data if accepted. if textual and not content_encoding and gzip_accepted: - out = six.BytesIO() + out = io.BytesIO() # Set mtime to zero to make payload for a given input deterministic. with gzip.GzipFile( fileobj=out, mode="wb", compresslevel=3, mtime=0 @@ -176,7 +175,7 @@ def Respond( direct_passthrough = False # Automatically streamwise-gunzip precompressed data if not accepted. if content_encoding == "gzip" and not gzip_accepted: - gzip_file = gzip.GzipFile(fileobj=six.BytesIO(content), mode="rb") + gzip_file = gzip.GzipFile(fileobj=io.BytesIO(content), mode="rb") # Last 4 bytes of gzip formatted data (little-endian) store the original # content length mod 2^32; we just assume it's the content length. That # means we can't streamwise-gunzip >4 GB precompressed file; this is ok. diff --git a/tensorboard/backend/http_util_test.py b/tensorboard/backend/http_util_test.py index be70d04a44..6c6eb87a57 100644 --- a/tensorboard/backend/http_util_test.py +++ b/tensorboard/backend/http_util_test.py @@ -17,10 +17,10 @@ import gzip +import io import struct from unittest import mock -import six from werkzeug import test as wtest from werkzeug import wrappers @@ -325,14 +325,14 @@ def testCsp_globalDomainWhiteList(self): def _gzip(bs): - out = six.BytesIO() + out = io.BytesIO() with gzip.GzipFile(fileobj=out, mode="wb") as f: f.write(bs) return out.getvalue() def _gunzip(bs): - with gzip.GzipFile(fileobj=six.BytesIO(bs), mode="rb") as f: + with gzip.GzipFile(fileobj=io.BytesIO(bs), mode="rb") as f: return f.read() diff --git a/tensorboard/plugins/core/core_plugin.py b/tensorboard/plugins/core/core_plugin.py index a7670aa0df..22084af1c9 100644 --- a/tensorboard/plugins/core/core_plugin.py +++ b/tensorboard/plugins/core/core_plugin.py @@ -17,10 +17,10 @@ import functools import gzip +import io import mimetypes import zipfile -import six from werkzeug import utils from werkzeug import wrappers @@ -568,7 +568,7 @@ def load(self, context): def _gzip(bytestring): - out = six.BytesIO() + out = io.BytesIO() # Set mtime to zero for deterministic results across TensorBoard launches. with gzip.GzipFile(fileobj=out, mode="wb", compresslevel=3, mtime=0) as f: f.write(bytestring) diff --git a/tensorboard/plugins/core/core_plugin_test.py b/tensorboard/plugins/core/core_plugin_test.py index 39ea19d1e2..5558a503e4 100644 --- a/tensorboard/plugins/core/core_plugin_test.py +++ b/tensorboard/plugins/core/core_plugin_test.py @@ -17,9 +17,9 @@ import collections.abc import contextlib +import io import json import os -import six from unittest import mock import zipfile @@ -371,12 +371,12 @@ def FirstEventTimestamp_stub(run_name): def get_test_assets_zip_provider(): - memfile = six.BytesIO() + memfile = io.BytesIO() with zipfile.ZipFile( memfile, mode="w", compression=zipfile.ZIP_DEFLATED ) as zf: zf.writestr("index.html", FAKE_INDEX_HTML) - return lambda: contextlib.closing(six.BytesIO(memfile.getvalue())) + return lambda: contextlib.closing(io.BytesIO(memfile.getvalue())) if __name__ == "__main__": diff --git a/tensorboard/plugins/hparams/download_data.py b/tensorboard/plugins/hparams/download_data.py index 2ad1c361c2..ae15485095 100644 --- a/tensorboard/plugins/hparams/download_data.py +++ b/tensorboard/plugins/hparams/download_data.py @@ -16,10 +16,9 @@ import csv +import io import math -import six - from tensorboard.plugins.hparams import error @@ -147,7 +146,7 @@ def latex_format(value): bottom_part = "\\hline\n\\end{tabular}\n\\end{table}\n" body = top_part + header_part + middle_part + bottom_part elif response_format == OutputFormat.CSV: - string_io = six.StringIO() + string_io = io.StringIO() writer = csv.writer(string_io) writer.writerow(header) writer.writerows(rows) diff --git a/tensorboard/plugins/scalar/scalars_plugin.py b/tensorboard/plugins/scalar/scalars_plugin.py index 969a3f28d9..cfdad1939b 100644 --- a/tensorboard/plugins/scalar/scalars_plugin.py +++ b/tensorboard/plugins/scalar/scalars_plugin.py @@ -20,8 +20,8 @@ import csv +import io -from six import StringIO import werkzeug.exceptions from werkzeug import wrappers @@ -107,7 +107,7 @@ def scalars_impl(self, ctx, tag, run, experiment, output_format): ) values = [(x.wall_time, x.step, x.value) for x in scalars] if output_format == OutputFormat.CSV: - string_io = StringIO() + string_io = io.StringIO() writer = csv.writer(string_io) writer.writerow(["Wall time", "Step", "Value"]) writer.writerows(values) diff --git a/tensorboard/plugins/scalar/scalars_plugin_test.py b/tensorboard/plugins/scalar/scalars_plugin_test.py index 352f182ad3..43f1b4909d 100644 --- a/tensorboard/plugins/scalar/scalars_plugin_test.py +++ b/tensorboard/plugins/scalar/scalars_plugin_test.py @@ -17,10 +17,10 @@ import csv +import io import json import os.path -from six import StringIO import tensorflow as tf from werkzeug import test as werkzeug_test from werkzeug import wrappers @@ -366,7 +366,7 @@ def test_download_url_csv(self): "text/csv; charset=utf-8", response.headers["Content-Type"] ) payload = response.get_data() - s = StringIO(payload.decode("utf-8")) + s = io.StringIO(payload.decode("utf-8")) reader = csv.reader(s) self.assertEqual(["Wall time", "Step", "Value"], next(reader)) self.assertEqual(len(list(reader)), self._STEPS) diff --git a/tensorboard/program_test.py b/tensorboard/program_test.py index 230f500d1b..c465e23c98 100644 --- a/tensorboard/program_test.py +++ b/tensorboard/program_test.py @@ -16,11 +16,10 @@ import argparse +import io import sys from unittest import mock -import six - from tensorboard import program from tensorboard import test as tb_test from tensorboard.plugins import base_plugin @@ -131,7 +130,7 @@ def testSpecifiedHost(self): class SubcommandTest(tb_test.TestCase): def setUp(self): super(SubcommandTest, self).setUp() - self.stderr = six.StringIO() + self.stderr = io.StringIO() patchers = [ mock.patch.object(program.TensorBoard, "_install_signal_handler"), mock.patch.object(program.TensorBoard, "_run_serve_subcommand"), diff --git a/tensorboard/summary/writer/record_writer_test.py b/tensorboard/summary/writer/record_writer_test.py index 3cc0210140..d48ae1f5be 100644 --- a/tensorboard/summary/writer/record_writer_test.py +++ b/tensorboard/summary/writer/record_writer_test.py @@ -16,7 +16,7 @@ # """Tests for RecordWriter""" -import six +import io import os from tensorboard.summary.writer.record_writer import RecordWriter from tensorboard.compat.tensorflow_stub import errors @@ -82,7 +82,7 @@ def test_record_immediate_read(self): def test_expect_bytes_written_bytes_IO(self): byte_len = 64 - Bytes_io = six.BytesIO() + Bytes_io = io.BytesIO() w = RecordWriter(Bytes_io) bytes_to_write = b"x" * byte_len w.write(bytes_to_write) From 80ca6a154cbb746c0d6940e1b312eba6d216f67b Mon Sep 17 00:00:00 2001 From: William Chargin Date: Wed, 23 Dec 2020 17:05:54 -0800 Subject: [PATCH 8/9] py3: replace miscellaneous `six` usage MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Summary: A few long-tail stragglers, manually updated. We still have the `six` dependency in our codebase because protobuf requires it to exist(…), but all of our code is now six-free. Test Plan: All tests pass, and `git grep '\(import\|from\) six'` fails. wchargin-branch: py3-six-misc --- tensorboard/backend/event_processing/data_provider.py | 4 +--- tensorboard/compat/tensorflow_stub/compat/__init__.py | 10 +--------- tensorboard/plugins/hparams/list_session_groups.py | 11 +++-------- 3 files changed, 5 insertions(+), 20 deletions(-) diff --git a/tensorboard/backend/event_processing/data_provider.py b/tensorboard/backend/event_processing/data_provider.py index 703ea83b6a..a391ef1a45 100644 --- a/tensorboard/backend/event_processing/data_provider.py +++ b/tensorboard/backend/event_processing/data_provider.py @@ -19,8 +19,6 @@ import json import random -import six - from tensorboard import errors from tensorboard.compat.proto import summary_pb2 from tensorboard.data import provider @@ -392,7 +390,7 @@ def _encode_blob_key(experiment_id, plugin_name, run, tag, step, index): ) bytesified = stringified.encode("ascii") encoded = base64.urlsafe_b64encode(bytesified) - return six.ensure_str(encoded).rstrip("=") + return encoded.decode("ascii").rstrip("=") # Any changes to this function need not be backward-compatible, even though diff --git a/tensorboard/compat/tensorflow_stub/compat/__init__.py b/tensorboard/compat/tensorflow_stub/compat/__init__.py index ff9451b02b..3609676778 100644 --- a/tensorboard/compat/tensorflow_stub/compat/__init__.py +++ b/tensorboard/compat/tensorflow_stub/compat/__init__.py @@ -30,7 +30,6 @@ import numbers as _numbers import numpy as _np -import six as _six from tensorboard.compat.tensorflow_stub.compat.v1 import * # noqa @@ -83,14 +82,7 @@ def as_text(bytes_or_text, encoding="utf-8"): # Convert an object to a `str` in both Python 2 and 3. -if _six.PY2: - as_str = as_bytes - # tf_export('compat.as_bytes', 'compat.as_str')(as_bytes) - # tf_export('compat.as_text')(as_text) -else: - as_str = as_text - # tf_export('compat.as_bytes')(as_bytes) - # tf_export('compat.as_text', 'compat.as_str')(as_text) +as_str = as_text # @tf_export('compat.as_str_any') diff --git a/tensorboard/plugins/hparams/list_session_groups.py b/tensorboard/plugins/hparams/list_session_groups.py index 97e0808825..e43f868df3 100644 --- a/tensorboard/plugins/hparams/list_session_groups.py +++ b/tensorboard/plugins/hparams/list_session_groups.py @@ -19,8 +19,6 @@ import operator import re -import six - from google.protobuf import struct_pb2 from tensorboard.data import provider @@ -508,15 +506,12 @@ def _create_interval_filter(interval): Args: interval: A tensorboard.hparams.Interval protobuf describing the interval. Returns: - A function taking a number (a float or an object of a type in - six.integer_types) that returns True if the number belongs to (the closed) - 'interval'. + A function taking a number (float or int) that returns True if the number + belongs to (the closed) 'interval'. """ def filter_fn(value): - if not isinstance(value, six.integer_types) and not isinstance( - value, float - ): + if not isinstance(value, (int, float)): raise error.HParamsError( "Cannot use an interval filter for a value of type: %s, Value: %s" % (type(value), value) From 4d66f559f1650d37ebd9c11bbaa8081c17211b2e Mon Sep 17 00:00:00 2001 From: William Chargin Date: Wed, 23 Dec 2020 17:09:47 -0800 Subject: [PATCH 9/9] py3: remove `@org_pythonhosted_six` deps Summary: As of the previous sequence of changes, none of our code uses `six`. We still keep it in the build workspace because protobuf requires that it exist there (see comment in `third_party/python.bzl`), but we can remove all dependency edges from our code to `@org_pythonhosted_six`. Generated with: ``` buildozer //tensorboard/...:all 'remove deps @org_pythonhosted_six' ``` Test Plan: It suffices that all tests pass both here and in a test sync. wchargin-branch: py3-no-six-deps --- tensorboard/BUILD | 22 ++-------------------- tensorboard/backend/BUILD | 3 --- tensorboard/backend/event_processing/BUILD | 8 -------- tensorboard/compat/proto/BUILD | 1 - tensorboard/compat/tensorflow_stub/BUILD | 1 - tensorboard/data/BUILD | 6 +----- tensorboard/data/experimental/BUILD | 5 +---- tensorboard/plugins/BUILD | 4 ---- tensorboard/plugins/audio/BUILD | 4 ---- tensorboard/plugins/core/BUILD | 2 -- tensorboard/plugins/custom_scalar/BUILD | 2 -- tensorboard/plugins/debugger_v2/BUILD | 1 - tensorboard/plugins/distribution/BUILD | 2 -- tensorboard/plugins/graph/BUILD | 2 -- tensorboard/plugins/histogram/BUILD | 5 ----- tensorboard/plugins/hparams/BUILD | 8 -------- tensorboard/plugins/image/BUILD | 4 ---- tensorboard/plugins/mesh/BUILD | 3 --- tensorboard/plugins/pr_curve/BUILD | 5 ----- tensorboard/plugins/projector/BUILD | 1 - tensorboard/plugins/scalar/BUILD | 5 ----- tensorboard/plugins/text/BUILD | 4 ---- tensorboard/plugins/text_v2/BUILD | 1 - tensorboard/scripts/BUILD | 1 - tensorboard/summary/writer/BUILD | 1 - tensorboard/uploader/BUILD | 3 --- tensorboard/util/BUILD | 2 -- 27 files changed, 4 insertions(+), 102 deletions(-) diff --git a/tensorboard/BUILD b/tensorboard/BUILD index 80a6426771..b37213e93b 100644 --- a/tensorboard/BUILD +++ b/tensorboard/BUILD @@ -75,19 +75,13 @@ py_test( srcs_version = "PY3", tags = ["support_notf"], visibility = ["//tensorboard:internal"], - deps = [ - ":lib", - "@org_pythonhosted_six", - ], + deps = [":lib"], ) py_library( name = "auth", srcs = ["auth.py"], srcs_version = "PY3", - deps = [ - "@org_pythonhosted_six", - ], ) py_test( @@ -148,7 +142,6 @@ py_library( deps = [ ":version", "//tensorboard/util:tb_logging", - "@org_pythonhosted_six", ], ) @@ -164,7 +157,6 @@ py_test( ":test", ":version", "//tensorboard/util:tb_logging", - "@org_pythonhosted_six", ], ) @@ -185,7 +177,6 @@ py_test( deps = [ ":manager", "//tensorboard:expect_tensorflow_installed", - "@org_pythonhosted_six", ], ) @@ -213,7 +204,6 @@ py_library( "//tensorboard/backend/event_processing:event_file_inspector", "//tensorboard/data:server_ingester", "@org_pocoo_werkzeug", - "@org_pythonhosted_six", ], ) @@ -230,7 +220,6 @@ py_test( "//tensorboard/plugins:base_plugin", "//tensorboard/plugins/core:core_plugin", "@org_pocoo_werkzeug", - "@org_pythonhosted_six", ], ) @@ -242,7 +231,6 @@ py_library( deps = [ "//tensorboard:expect_absl_testing_absltest_installed", "//tensorboard/util:tb_logging", - "@org_pythonhosted_six", ], ) @@ -523,7 +511,6 @@ py_binary( "//tensorboard:expect_tensorflow_installed", "//tensorboard/util:encoder", "//tensorboard/util:tb_logging", - "@org_pythonhosted_six", ], ) @@ -537,7 +524,6 @@ py_library( "//tensorboard/backend:experiment_id", "@org_mozilla_bleach", "@org_pythonhosted_markdown", - "@org_pythonhosted_six", ], ) @@ -552,7 +538,6 @@ py_test( ":plugin_util", ":test", "//tensorboard/backend:experiment_id", - "@org_pythonhosted_six", ], ) @@ -574,8 +559,5 @@ py_test( srcs = ["lazy_test.py"], srcs_version = "PY3", tags = ["support_notf"], - deps = [ - ":lazy", - "@org_pythonhosted_six", - ], + deps = [":lazy"], ) diff --git a/tensorboard/backend/BUILD b/tensorboard/backend/BUILD index a9e5dae7ca..50953c486a 100644 --- a/tensorboard/backend/BUILD +++ b/tensorboard/backend/BUILD @@ -13,7 +13,6 @@ py_library( deps = [ ":json_util", "@org_pocoo_werkzeug", - "@org_pythonhosted_six", ], ) @@ -27,7 +26,6 @@ py_test( ":http_util", "//tensorboard:test", "@org_pocoo_werkzeug", - "@org_pythonhosted_six", ], ) @@ -68,7 +66,6 @@ py_library( "//tensorboard/plugins/core:core_plugin", "//tensorboard/util:tb_logging", "@org_pocoo_werkzeug", - "@org_pythonhosted_six", ], ) diff --git a/tensorboard/backend/event_processing/BUILD b/tensorboard/backend/event_processing/BUILD index e7763d7cf6..6f2c58c456 100644 --- a/tensorboard/backend/event_processing/BUILD +++ b/tensorboard/backend/event_processing/BUILD @@ -13,7 +13,6 @@ py_library( deps = [ "//tensorboard/compat:tensorflow", "//tensorboard/util:tb_logging", - "@org_pythonhosted_six", ], ) @@ -25,7 +24,6 @@ py_test( deps = [ ":io_wrapper", "//tensorboard:expect_tensorflow_installed", - "@org_pythonhosted_six", ], ) @@ -68,7 +66,6 @@ py_library( "//tensorboard/data:provider", "//tensorboard/util:tb_logging", "//tensorboard/util:tensor_util", - "@org_pythonhosted_six", ], ) @@ -91,7 +88,6 @@ py_test( "//tensorboard/plugins/scalar:metadata", "//tensorboard/plugins/scalar:summary_v2", "//tensorboard/util:tensor_util", - "@org_pythonhosted_six", ], ) @@ -186,7 +182,6 @@ py_test( "//tensorboard:expect_tensorflow_installed", "//tensorboard/compat/proto:protos_all_py_pb2", "//tensorboard/summary/writer", - "@org_pythonhosted_six", ], ) @@ -202,7 +197,6 @@ py_test( "//tensorboard/compat:no_tensorflow", "//tensorboard/compat/proto:protos_all_py_pb2", "//tensorboard/summary/writer", - "@org_pythonhosted_six", ], ) @@ -274,7 +268,6 @@ py_test( "//tensorboard/util:tb_logging", "//tensorboard/util:tensor_util", "//tensorboard/util:test_util", - "@org_pythonhosted_six", ], ) @@ -291,7 +284,6 @@ py_library( ":event_accumulator", ":io_wrapper", "//tensorboard/util:tb_logging", - "@org_pythonhosted_six", ], ) diff --git a/tensorboard/compat/proto/BUILD b/tensorboard/compat/proto/BUILD index bcfe544dec..23599a083d 100644 --- a/tensorboard/compat/proto/BUILD +++ b/tensorboard/compat/proto/BUILD @@ -309,6 +309,5 @@ py_test( deps = [ ":protos_all_py_pb2", "//tensorboard:expect_tensorflow_installed", - "@org_pythonhosted_six", ], ) diff --git a/tensorboard/compat/tensorflow_stub/BUILD b/tensorboard/compat/tensorflow_stub/BUILD index 646297d846..aeb66c8b98 100644 --- a/tensorboard/compat/tensorflow_stub/BUILD +++ b/tensorboard/compat/tensorflow_stub/BUILD @@ -18,7 +18,6 @@ py_library( "//tensorboard:expect_absl_flags_installed", "//tensorboard:expect_numpy_installed", "//tensorboard/compat/proto:protos_all_py_pb2", - "@org_pythonhosted_six", ], ) diff --git a/tensorboard/data/BUILD b/tensorboard/data/BUILD index 5637760e84..340ddab4a4 100644 --- a/tensorboard/data/BUILD +++ b/tensorboard/data/BUILD @@ -19,10 +19,7 @@ py_library( name = "provider", srcs = ["provider.py"], srcs_version = "PY3", - deps = [ - "//tensorboard:expect_numpy_installed", - "@org_pythonhosted_six", - ], + deps = ["//tensorboard:expect_numpy_installed"], ) py_test( @@ -35,7 +32,6 @@ py_test( ":provider", "//tensorboard:expect_numpy_installed", "//tensorboard:test", - "@org_pythonhosted_six", ], ) diff --git a/tensorboard/data/experimental/BUILD b/tensorboard/data/experimental/BUILD index 2df4e8b423..a1ad64b5b9 100644 --- a/tensorboard/data/experimental/BUILD +++ b/tensorboard/data/experimental/BUILD @@ -61,8 +61,5 @@ py_binary( name = "test_binary", srcs = ["test_binary.py"], srcs_version = "PY3", - deps = [ - "//tensorboard/data/experimental:experiment_from_dev", - "@org_pythonhosted_six", - ], + deps = ["//tensorboard/data/experimental:experiment_from_dev"], ) diff --git a/tensorboard/plugins/BUILD b/tensorboard/plugins/BUILD index 1b5249253f..e840846539 100644 --- a/tensorboard/plugins/BUILD +++ b/tensorboard/plugins/BUILD @@ -10,9 +10,6 @@ py_library( srcs = ["base_plugin.py"], srcs_version = "PY3", visibility = ["//visibility:public"], - deps = [ - "@org_pythonhosted_six", - ], ) py_test( @@ -22,6 +19,5 @@ py_test( deps = [ ":base_plugin", "//tensorboard:test", - "@org_pythonhosted_six", ], ) diff --git a/tensorboard/plugins/audio/BUILD b/tensorboard/plugins/audio/BUILD index 273e019c8c..83dc202e54 100644 --- a/tensorboard/plugins/audio/BUILD +++ b/tensorboard/plugins/audio/BUILD @@ -19,7 +19,6 @@ py_library( "//tensorboard/data:provider", "//tensorboard/plugins:base_plugin", "@org_pocoo_werkzeug", - "@org_pythonhosted_six", ], ) @@ -39,7 +38,6 @@ py_test( "//tensorboard/plugins:base_plugin", "//tensorboard/util:test_util", "@org_pocoo_werkzeug", - "@org_pythonhosted_six", ], ) @@ -62,7 +60,6 @@ py_test( "//tensorboard/plugins:base_plugin", "//tensorboard/util:test_util", "@org_pocoo_werkzeug", - "@org_pythonhosted_six", ], ) @@ -114,7 +111,6 @@ py_binary( deps = [ ":summary", "//tensorboard:expect_tensorflow_installed", - "@org_pythonhosted_six", ], ) diff --git a/tensorboard/plugins/core/BUILD b/tensorboard/plugins/core/BUILD index 7e45cc17c0..4977541afe 100644 --- a/tensorboard/plugins/core/BUILD +++ b/tensorboard/plugins/core/BUILD @@ -15,7 +15,6 @@ py_library( "//tensorboard/plugins:base_plugin", "//tensorboard/util:tb_logging", "@org_pocoo_werkzeug", - "@org_pythonhosted_six", ], ) @@ -34,6 +33,5 @@ py_test( "//tensorboard/plugins:base_plugin", "//tensorboard/util:test_util", "@org_pocoo_werkzeug", - "@org_pythonhosted_six", ], ) diff --git a/tensorboard/plugins/custom_scalar/BUILD b/tensorboard/plugins/custom_scalar/BUILD index 17691f4792..f83c9ee24f 100644 --- a/tensorboard/plugins/custom_scalar/BUILD +++ b/tensorboard/plugins/custom_scalar/BUILD @@ -23,7 +23,6 @@ py_library( "//tensorboard/plugins/scalar:scalars_plugin", "//tensorboard/util:tensor_util", "@org_pocoo_werkzeug", - "@org_pythonhosted_six", ], ) @@ -127,6 +126,5 @@ py_binary( ":protos_all_py_pb2", "//tensorboard:expect_tensorflow_installed", "//tensorboard/summary:summary_v1", - "@org_pythonhosted_six", ], ) diff --git a/tensorboard/plugins/debugger_v2/BUILD b/tensorboard/plugins/debugger_v2/BUILD index 5fd9bee61d..55b5247a88 100644 --- a/tensorboard/plugins/debugger_v2/BUILD +++ b/tensorboard/plugins/debugger_v2/BUILD @@ -63,6 +63,5 @@ py_test( "//tensorboard:expect_tensorflow_installed", "//tensorboard/backend:application", "//tensorboard/plugins:base_plugin", - "@org_pythonhosted_six", ], ) diff --git a/tensorboard/plugins/distribution/BUILD b/tensorboard/plugins/distribution/BUILD index c9a1d50a31..c38f6f92c7 100644 --- a/tensorboard/plugins/distribution/BUILD +++ b/tensorboard/plugins/distribution/BUILD @@ -40,7 +40,6 @@ py_test( "//tensorboard/plugins/histogram:summary", "//tensorboard/util:test_util", "@org_pocoo_werkzeug", - "@org_pythonhosted_six", ], ) @@ -65,7 +64,6 @@ py_test( "//tensorboard/plugins/histogram:summary", "//tensorboard/util:test_util", "@org_pocoo_werkzeug", - "@org_pythonhosted_six", ], ) diff --git a/tensorboard/plugins/graph/BUILD b/tensorboard/plugins/graph/BUILD index 8c38414b6e..d881abe9dd 100644 --- a/tensorboard/plugins/graph/BUILD +++ b/tensorboard/plugins/graph/BUILD @@ -49,7 +49,6 @@ py_library( "//tensorboard/util:test_util", "@com_google_protobuf//:protobuf_python", "@org_pocoo_werkzeug", - "@org_pythonhosted_six", ], ) @@ -87,7 +86,6 @@ py_test( "//tensorboard/util:test_util", "@com_google_protobuf//:protobuf_python", "@org_pocoo_werkzeug", - "@org_pythonhosted_six", ], ) diff --git a/tensorboard/plugins/histogram/BUILD b/tensorboard/plugins/histogram/BUILD index 1f8bd3d0c4..c817978b79 100644 --- a/tensorboard/plugins/histogram/BUILD +++ b/tensorboard/plugins/histogram/BUILD @@ -20,7 +20,6 @@ py_library( "//tensorboard/data:provider", "//tensorboard/plugins:base_plugin", "@org_pocoo_werkzeug", - "@org_pythonhosted_six", ], ) @@ -43,7 +42,6 @@ py_test( "//tensorboard/plugins:base_plugin", "//tensorboard/util:test_util", "@org_pocoo_werkzeug", - "@org_pythonhosted_six", ], ) @@ -68,7 +66,6 @@ py_test( "//tensorboard/plugins:base_plugin", "//tensorboard/util:test_util", "@org_pocoo_werkzeug", - "@org_pythonhosted_six", ], ) @@ -125,7 +122,6 @@ py_test( "//tensorboard:expect_tensorflow_installed", "//tensorboard/compat/proto:protos_all_py_pb2", "//tensorboard/util:tensor_util", - "@org_pythonhosted_six", ], ) @@ -136,7 +132,6 @@ py_binary( deps = [ ":summary", "//tensorboard:expect_tensorflow_installed", - "@org_pythonhosted_six", ], ) diff --git a/tensorboard/plugins/hparams/BUILD b/tensorboard/plugins/hparams/BUILD index e1a00c29f7..3b89a77ef9 100644 --- a/tensorboard/plugins/hparams/BUILD +++ b/tensorboard/plugins/hparams/BUILD @@ -48,7 +48,6 @@ py_library( "//tensorboard/util:tensor_util", "@com_google_protobuf//:protobuf_python", "@org_pocoo_werkzeug", - "@org_pythonhosted_six", ], ) @@ -129,7 +128,6 @@ py_binary( "//tensorboard:expect_numpy_installed", "//tensorboard:expect_tensorflow_installed", "//tensorboard/plugins/scalar:summary", - "@org_pythonhosted_six", ], ) @@ -144,7 +142,6 @@ py_binary( "//tensorboard:expect_absl_flags_installed", "//tensorboard:expect_tensorflow_installed", "//tensorboard/plugins/scalar:summary", - "@org_pythonhosted_six", ], ) @@ -159,7 +156,6 @@ py_binary( "//tensorboard:expect_absl_flags_installed", "//tensorboard:expect_tensorflow_installed", "@com_google_protobuf//:protobuf_python", - "@org_pythonhosted_six", ], ) @@ -222,7 +218,6 @@ py_test( ":summary_v2", "//tensorboard:expect_tensorflow_installed", "@com_google_protobuf//:protobuf_python", - "@org_pythonhosted_six", ], ) @@ -236,7 +231,6 @@ py_library( "//tensorboard:expect_numpy_installed", "//tensorboard/compat", "//tensorboard/compat/proto:protos_all_py_pb2", - "@org_pythonhosted_six", ], ) @@ -254,7 +248,6 @@ py_test( "//tensorboard:test", "//tensorboard/compat/proto:protos_all_py_pb2", "@com_google_protobuf//:protobuf_python", - "@org_pythonhosted_six", ], ) @@ -274,7 +267,6 @@ py_test( "//tensorboard/compat:no_tensorflow", "//tensorboard/compat/proto:protos_all_py_pb2", "@com_google_protobuf//:protobuf_python", - "@org_pythonhosted_six", ], ) diff --git a/tensorboard/plugins/image/BUILD b/tensorboard/plugins/image/BUILD index f37f032702..237cb45ee1 100644 --- a/tensorboard/plugins/image/BUILD +++ b/tensorboard/plugins/image/BUILD @@ -18,7 +18,6 @@ py_library( "//tensorboard/backend:http_util", "//tensorboard/plugins:base_plugin", "@org_pocoo_werkzeug", - "@org_pythonhosted_six", ], ) @@ -32,7 +31,6 @@ py_binary( "//tensorboard:expect_tensorflow_installed", "//tensorboard/compat/proto:protos_all_py_pb2", "//tensorboard/util:tb_logging", - "@org_pythonhosted_six", ], ) @@ -52,7 +50,6 @@ py_test( "//tensorboard/plugins:base_plugin", "//tensorboard/util:test_util", "@org_pocoo_werkzeug", - "@org_pythonhosted_six", ], ) @@ -75,7 +72,6 @@ py_test( "//tensorboard/plugins:base_plugin", "//tensorboard/util:test_util", "@org_pocoo_werkzeug", - "@org_pythonhosted_six", ], ) diff --git a/tensorboard/plugins/mesh/BUILD b/tensorboard/plugins/mesh/BUILD index 755b9369c1..014fecb975 100644 --- a/tensorboard/plugins/mesh/BUILD +++ b/tensorboard/plugins/mesh/BUILD @@ -33,7 +33,6 @@ py_test( deps = [ ":metadata", "//tensorboard:expect_tensorflow_installed", - "@org_pythonhosted_six", ], ) @@ -50,7 +49,6 @@ py_library( "//tensorboard/plugins:base_plugin", "//tensorboard/util:tensor_util", "@org_pocoo_werkzeug", - "@org_pythonhosted_six", ], ) @@ -82,7 +80,6 @@ py_test( "//tensorboard/plugins:base_plugin", "//tensorboard/util:test_util", "@org_pocoo_werkzeug", - "@org_pythonhosted_six", ], ) diff --git a/tensorboard/plugins/pr_curve/BUILD b/tensorboard/plugins/pr_curve/BUILD index 669f664de3..645f28f3c9 100644 --- a/tensorboard/plugins/pr_curve/BUILD +++ b/tensorboard/plugins/pr_curve/BUILD @@ -31,7 +31,6 @@ py_library( "//tensorboard/data:provider", "//tensorboard/plugins:base_plugin", "@org_pocoo_werkzeug", - "@org_pythonhosted_six", ], ) @@ -51,7 +50,6 @@ py_test( "//tensorboard/backend/event_processing:event_multiplexer", "//tensorboard/plugins:base_plugin", "@org_pocoo_werkzeug", - "@org_pythonhosted_six", ], ) @@ -74,7 +72,6 @@ py_test( "//tensorboard/compat:no_tensorflow", "//tensorboard/plugins:base_plugin", "@org_pocoo_werkzeug", - "@org_pythonhosted_six", ], ) @@ -104,7 +101,6 @@ py_test( "//tensorboard/plugins:base_plugin", "//tensorboard/util:test_util", "@org_pocoo_werkzeug", - "@org_pythonhosted_six", ], ) @@ -128,6 +124,5 @@ py_library( deps = [ ":summary", "//tensorboard:expect_tensorflow_installed", - "@org_pythonhosted_six", ], ) diff --git a/tensorboard/plugins/projector/BUILD b/tensorboard/plugins/projector/BUILD index 82c79b6287..70bd3fc675 100644 --- a/tensorboard/plugins/projector/BUILD +++ b/tensorboard/plugins/projector/BUILD @@ -49,7 +49,6 @@ py_test( ":projector", "//tensorboard:expect_tensorflow_installed", "//tensorboard/util:test_util", - "@org_pythonhosted_six", ], ) diff --git a/tensorboard/plugins/scalar/BUILD b/tensorboard/plugins/scalar/BUILD index 139de9bd41..2017f8d7bf 100644 --- a/tensorboard/plugins/scalar/BUILD +++ b/tensorboard/plugins/scalar/BUILD @@ -20,7 +20,6 @@ py_library( "//tensorboard/data:provider", "//tensorboard/plugins:base_plugin", "@org_pocoo_werkzeug", - "@org_pythonhosted_six", ], ) @@ -42,7 +41,6 @@ py_test( "//tensorboard/plugins:base_plugin", "//tensorboard/util:test_util", "@org_pocoo_werkzeug", - "@org_pythonhosted_six", ], ) @@ -66,7 +64,6 @@ py_test( "//tensorboard/plugins:base_plugin", "//tensorboard/util:test_util", "@org_pocoo_werkzeug", - "@org_pythonhosted_six", ], ) @@ -77,7 +74,6 @@ py_binary( deps = [ ":summary", "//tensorboard:expect_tensorflow_installed", - "@org_pythonhosted_six", ], ) @@ -88,7 +84,6 @@ py_binary( deps = [ ":summary", "//tensorboard:expect_tensorflow_installed", - "@org_pythonhosted_six", ], ) diff --git a/tensorboard/plugins/text/BUILD b/tensorboard/plugins/text/BUILD index cf7b12356e..80b9a0c1c5 100644 --- a/tensorboard/plugins/text/BUILD +++ b/tensorboard/plugins/text/BUILD @@ -21,7 +21,6 @@ py_library( "@org_mozilla_bleach", "@org_pocoo_werkzeug", "@org_pythonhosted_markdown", - "@org_pythonhosted_six", ], ) @@ -41,7 +40,6 @@ py_test( "//tensorboard/plugins:base_plugin", "//tensorboard/util:test_util", "@org_pocoo_werkzeug", - "@org_pythonhosted_six", ], ) @@ -63,7 +61,6 @@ py_test( "//tensorboard/plugins:base_plugin", "//tensorboard/util:test_util", "@org_pocoo_werkzeug", - "@org_pythonhosted_six", ], ) @@ -119,7 +116,6 @@ py_test( "//tensorboard:expect_tensorflow_installed", "//tensorboard/compat/proto:protos_all_py_pb2", "//tensorboard/util:tensor_util", - "@org_pythonhosted_six", ], ) diff --git a/tensorboard/plugins/text_v2/BUILD b/tensorboard/plugins/text_v2/BUILD index 0c3fe5b287..5eefdb9b04 100644 --- a/tensorboard/plugins/text_v2/BUILD +++ b/tensorboard/plugins/text_v2/BUILD @@ -19,7 +19,6 @@ py_library( "//tensorboard/plugins:base_plugin", "//tensorboard/plugins/text:metadata", "@org_pocoo_werkzeug", - "@org_pythonhosted_six", ], ) diff --git a/tensorboard/scripts/BUILD b/tensorboard/scripts/BUILD index fbb566a18d..b3f2933cca 100644 --- a/tensorboard/scripts/BUILD +++ b/tensorboard/scripts/BUILD @@ -12,7 +12,6 @@ py_binary( deps = [ "//tensorboard:expect_numpy_installed", "//tensorboard:expect_tensorflow_installed", - "@org_pythonhosted_six", ], ) diff --git a/tensorboard/summary/writer/BUILD b/tensorboard/summary/writer/BUILD index 67db27f901..76fa13b28b 100644 --- a/tensorboard/summary/writer/BUILD +++ b/tensorboard/summary/writer/BUILD @@ -15,7 +15,6 @@ py_library( deps = [ "//tensorboard/compat:tensorflow", "//tensorboard/compat/proto:protos_all_py_pb2", - "@org_pythonhosted_six", ], ) diff --git a/tensorboard/uploader/BUILD b/tensorboard/uploader/BUILD index 28134e3033..2809247fb4 100644 --- a/tensorboard/uploader/BUILD +++ b/tensorboard/uploader/BUILD @@ -78,7 +78,6 @@ py_library( "//tensorboard:program", "//tensorboard/plugins:base_plugin", "//tensorboard/uploader/proto:protos_all_py_pb2_grpc", - "@org_pythonhosted_six", ], ) @@ -115,7 +114,6 @@ py_library( "//tensorboard/util:tb_logging", "//tensorboard/util:tensor_util", "@com_google_protobuf//:protobuf_python", - "@org_pythonhosted_six", ], ) @@ -229,7 +227,6 @@ py_test( "//tensorboard/backend/event_processing:event_file_loader", "//tensorboard/backend/event_processing:io_wrapper", "//tensorboard/util:test_util", - "@org_pythonhosted_six", ], ) diff --git a/tensorboard/util/BUILD b/tensorboard/util/BUILD index a5eef81506..9fe694893e 100644 --- a/tensorboard/util/BUILD +++ b/tensorboard/util/BUILD @@ -24,7 +24,6 @@ py_test( ":encoder", "//tensorboard:expect_numpy_installed", "//tensorboard:expect_tensorflow_installed", - "@org_pythonhosted_six", ], ) @@ -53,7 +52,6 @@ py_test( "//tensorboard:expect_grpc_installed", "//tensorboard:test", "//tensorboard:version", - "@org_pythonhosted_six", ], )