diff --git a/0001-skia-Some-includes-to-fix-build-with-GCC-12.patch b/0001-skia-Some-includes-to-fix-build-with-GCC-12.patch new file mode 100644 index 0000000000000000000000000000000000000000..f528af0190543764cbe69352b40e52dd3d35571b --- /dev/null +++ b/0001-skia-Some-includes-to-fix-build-with-GCC-12.patch @@ -0,0 +1,41 @@ +From 68799a1e0815b20ca59ce354a55280399257a201 Mon Sep 17 00:00:00 2001 +From: Fabian Vogt +Date: Fri, 25 Mar 2022 15:29:28 +0100 +Subject: [PATCH] skia: Some includes to fix build with GCC 12 + +Those includes got introduced upstream for other reasons and fixed building +with GCC 12 as a side effect. +--- + src/3rdparty/chromium/third_party/skia/include/core/SkColor.h | 2 ++ + src/3rdparty/chromium/third_party/skia/src/utils/SkParseColor.cpp | 2 ++ + 2 files changed, 4 insertions(+) + +diff --git a/src/3rdparty/chromium/third_party/skia/include/core/SkColor.h b/src/3rdparty/chromium/third_party/skia/include/core/SkColor.h +index 36527e38e53..f77c24ade82 100644 +--- a/src/3rdparty/chromium/third_party/skia/include/core/SkColor.h ++++ b/src/3rdparty/chromium/third_party/skia/include/core/SkColor.h +@@ -12,6 +12,8 @@ + #include "include/core/SkScalar.h" + #include "include/core/SkTypes.h" + ++#include ++ + /** \file SkColor.h + + Types, consts, functions, and macros for colors. +diff --git a/src/3rdparty/chromium/third_party/skia/src/utils/SkParseColor.cpp b/src/3rdparty/chromium/third_party/skia/src/utils/SkParseColor.cpp +index 7260365b2c6..3164650728e 100644 +--- a/src/3rdparty/chromium/third_party/skia/src/utils/SkParseColor.cpp ++++ b/src/3rdparty/chromium/third_party/skia/src/utils/SkParseColor.cpp +@@ -8,6 +8,8 @@ + + #include "include/utils/SkParse.h" + ++#include // std::lower_bound ++ + static constexpr const char* gColorNames[] = { + "aliceblue", + "antiquewhite", +-- +2.34.1 + diff --git a/armv6-ffmpeg-no-thumb.patch b/armv6-ffmpeg-no-thumb.patch new file mode 100644 index 0000000000000000000000000000000000000000..bdde7b5d6143e4ca4cf6a3747c16c3be98960c47 --- /dev/null +++ b/armv6-ffmpeg-no-thumb.patch @@ -0,0 +1,13 @@ +Index: qtwebengine-everywhere-src-5.11.0-alpha/src/3rdparty/chromium/third_party/ffmpeg/chromium/config/Chromium/linux/arm/config.h +=================================================================== +--- qtwebengine-everywhere-src-5.11.0-alpha.orig/src/3rdparty/chromium/third_party/ffmpeg/chromium/config/Chromium/linux/arm/config.h ++++ qtwebengine-everywhere-src-5.11.0-alpha/src/3rdparty/chromium/third_party/ffmpeg/chromium/config/Chromium/linux/arm/config.h +@@ -566,7 +566,7 @@ + #define CONFIG_NEON_CLOBBER_TEST 0 + #define CONFIG_OSSFUZZ 0 + #define CONFIG_PIC 1 +-#define CONFIG_THUMB 1 ++#define CONFIG_THUMB 0 + #define CONFIG_VALGRIND_BACKTRACE 0 + #define CONFIG_XMM_CLOBBER_TEST 0 + #define CONFIG_BSFS 1 diff --git a/disable-gpu-when-using-nouveau-boo-1005323.patch b/disable-gpu-when-using-nouveau-boo-1005323.patch new file mode 100644 index 0000000000000000000000000000000000000000..7ebb540ec1eb867f322ac4ec941c6f0dd023a260 --- /dev/null +++ b/disable-gpu-when-using-nouveau-boo-1005323.patch @@ -0,0 +1,94 @@ +From: Antonio Larrosa +Subject: Disable GPU when using nouveau or running on wayland +References: boo#1005323, boo#1060990 + +Qt WebEngine uses multi-threaded OpenGL, which nouveau does not support. +It also crashes when running on wayland, the cause is not yet known. +Work around these issues by not doing GPU-accelerated rendering in such +cases. + +Index: qtwebengine-everywhere-src-5.15.1/src/core/web_engine_context.cpp +=================================================================== +--- qtwebengine-everywhere-src-5.15.1.orig/src/core/web_engine_context.cpp ++++ qtwebengine-everywhere-src-5.15.1/src/core/web_engine_context.cpp +@@ -127,6 +127,7 @@ + #include + #if QT_CONFIG(opengl) + # include ++# include + #endif + #include + #include +@@ -186,6 +187,39 @@ void dummyGetPluginCallback(const std::v + } + #endif + ++#ifndef QT_NO_OPENGL ++QString openGLVendor() ++{ ++ QString vendor; ++ ++ QOpenGLContext *oldContext = QOpenGLContext::currentContext(); ++ QSurface *oldSurface = 0; ++ if (oldContext) ++ oldSurface = oldContext->surface(); ++ ++ QScopedPointer surface( new QOffscreenSurface ); ++ surface->create(); ++ QOpenGLContext context; ++ if (!context.create()) { ++ qDebug() << "Error creating openGL context"; ++ } ++ else if (!context.makeCurrent(surface.data())) { ++ qDebug() << "Error making openGL context current context"; ++ } else { ++ const GLubyte *p; ++ QOpenGLFunctions *f = context.functions(); ++ if ((p = f->glGetString(GL_VENDOR))) ++ vendor = QString::fromLatin1(reinterpret_cast(p)); ++ } ++ ++ context.doneCurrent(); ++ if (oldContext && oldSurface) ++ oldContext->makeCurrent(oldSurface); ++ ++ return vendor; ++} ++#endif ++ + } // namespace + + namespace QtWebEngineCore { +@@ -697,10 +731,31 @@ WebEngineContext::WebEngineContext() + const char *glType = 0; + #if QT_CONFIG(opengl) + ++ bool disableGpu = qEnvironmentVariableIsSet("QT_WEBENGINE_DISABLE_GPU"); ++ ++ if (!qEnvironmentVariableIsSet("QT_WEBENGINE_DISABLE_WAYLAND_WORKAROUND") && qApp->platformName().startsWith("wayland", Qt::CaseInsensitive)) ++ { ++ qWarning() << "Running on wayland. Qt WebEngine will disable usage of the GPU.\n" ++ "Note: you can set the QT_WEBENGINE_DISABLE_WAYLAND_WORKAROUND\n" ++ "environment variable before running this application, but this is \n" ++ "not recommended since this usually causes applications to crash."; ++ disableGpu = true; ++ } ++ ++ if (!qEnvironmentVariableIsSet("QT_WEBENGINE_DISABLE_NOUVEAU_WORKAROUND") && openGLVendor() == QStringLiteral("nouveau")) ++ { ++ qWarning() << "Nouveau openGL driver detected. Qt WebEngine will disable usage of the GPU.\n" ++ "Note: you can set the QT_WEBENGINE_DISABLE_NOUVEAU_WORKAROUND\n" ++ "environment variable before running this application, but this is \n" ++ "not recommended since this usually causes applications to crash as\n" ++ "Nouveau openGL drivers don't support multithreaded rendering"; ++ disableGpu = true; ++ } ++ + const bool tryGL = (usingDefaultSGBackend() && !usingSoftwareDynamicGL() && + QGuiApplicationPrivate::platformIntegration()->hasCapability(QPlatformIntegration::OpenGL)) + || enableGLSoftwareRendering; +- if (tryGL) { ++ if (tryGL && !disableGpu) { + if (qt_gl_global_share_context() && qt_gl_global_share_context()->isValid()) { + // If the native handle is QEGLNativeContext try to use GL ES/2. + // If there is no native handle, assume we are using wayland and try GL ES/2. diff --git a/python3.patch b/python3.patch new file mode 100644 index 0000000000000000000000000000000000000000..565cc965b69965b1000dff8bd7fd3a79d9a1e0a9 --- /dev/null +++ b/python3.patch @@ -0,0 +1,113545 @@ +diff --git a/configure.pri b/configure.pri +index 8705ad93f..3977c959e 100644 +--- a/configure.pri ++++ b/configure.pri +@@ -7,20 +7,7 @@ QTWEBENGINE_SOURCE_TREE = $$PWD + equals(QMAKE_HOST.os, Windows): EXE_SUFFIX = .exe + + defineTest(isPythonVersionSupported) { +- python = $$system_quote($$system_path($$1)) +- python_version = $$system('$$python -c "import sys; print(sys.version_info[0:3])"') +- python_version ~= s/[()]//g +- python_version = $$split(python_version, ',') +- python_major_version = $$first(python_version) +- greaterThan(python_major_version, 2) { +- qtLog("Python version 3 is not supported by Chromium.") +- return(false) +- } +- python_minor_version = $$member(python_version, 1) +- python_patch_version = $$member(python_version, 2) +- greaterThan(python_major_version, 1): greaterThan(python_minor_version, 6): greaterThan(python_patch_version, 4): return(true) +- qtLog("Unsupported python version: $${python_major_version}.$${python_minor_version}.$${python_patch_version}.") +- return(false) ++ return(true) + } + + defineTest(qtConfTest_detectJumboBuild) { +@@ -52,10 +39,10 @@ defineTest(qtConfReport_jumboBuild) { + qtConfReportPadded($${1}, $$mergeLimit) + } + +-defineTest(qtConfTest_detectPython2) { +- python = $$qtConfFindInPath("python2$$EXE_SUFFIX") ++defineTest(qtConfTest_detectPython3) { ++ python = $$qtConfFindInPath("python3$$EXE_SUFFIX") + isEmpty(python) { +- qtLog("'python2$$EXE_SUFFIX' not found in PATH. Checking for 'python$$EXE_SUFFIX'.") ++ qtLog("'python3$$EXE_SUFFIX' not found in PATH. Checking for 'python$$EXE_SUFFIX'.") + python = $$qtConfFindInPath("python$$EXE_SUFFIX") + } + isEmpty(python) { +@@ -67,7 +54,7 @@ defineTest(qtConfTest_detectPython2) { + return(false) + } + +- # Make tests.python2.location available in configure.json. ++ # Make tests.python3.location available in configure.json. + $${1}.location = $$clean_path($$python) + export($${1}.location) + $${1}.cache += location +diff --git a/mkspecs/features/functions.prf b/mkspecs/features/functions.prf +index 2750d7071..908e6dcd4 100644 +--- a/mkspecs/features/functions.prf ++++ b/mkspecs/features/functions.prf +@@ -39,11 +39,11 @@ defineReplace(which) { + + # Returns the unquoted path to the python executable. + defineReplace(pythonPath) { +- isEmpty(QMAKE_PYTHON2) { ++ isEmpty(QMAKE_PYTHON3) { + # Fallback for building QtWebEngine with Qt < 5.8 +- QMAKE_PYTHON2 = python ++ QMAKE_PYTHON3 = python + } +- return($$QMAKE_PYTHON2) ++ return($$QMAKE_PYTHON3) + } + + # Returns the python executable for use with shell / make targets. +diff --git a/src/3rdparty/chromium/PRESUBMIT.py b/src/3rdparty/chromium/PRESUBMIT.py +index c66349a31..137c2e37a 100644 +--- a/src/3rdparty/chromium/PRESUBMIT.py ++++ b/src/3rdparty/chromium/PRESUBMIT.py +@@ -1987,7 +1987,7 @@ def CheckNoDeprecatedMojoTypes(input_api, output_api): + file_filter = lambda f: f.LocalPath().endswith(('.cc', '.mm', '.h')) + for f in input_api.AffectedFiles(file_filter=file_filter): + # Don't check //components/arc, not yet migrated (see crrev.com/c/1868870). +- if any(map(lambda path: f.LocalPath().startswith(path), ok_paths)): ++ if any([f.LocalPath().startswith(path) for path in ok_paths]): + continue + + for line_num, line in f.ChangedContents(): +@@ -1997,7 +1997,7 @@ def CheckNoDeprecatedMojoTypes(input_api, output_api): + + if problems: + # Raise errors inside |error_paths| and warnings everywhere else. +- if any(map(lambda path: f.LocalPath().startswith(path), error_paths)): ++ if any([f.LocalPath().startswith(path) for path in error_paths]): + errors.extend(problems) + else: + warnings.extend(problems) +@@ -2336,7 +2336,7 @@ def _ExtractAddRulesFromParsedDeps(parsed_deps): + if rule.startswith('+') or rule.startswith('!') + ]) + for _, rules in parsed_deps.get('specific_include_rules', +- {}).iteritems(): ++ {}).items(): + add_rules.update([ + rule[1:] for rule in rules + if rule.startswith('+') or rule.startswith('!') +@@ -2364,7 +2364,7 @@ def _ParseDeps(contents): + 'Var': _VarImpl(local_scope).Lookup, + 'Str': str, + } +- exec contents in global_scope, local_scope ++ exec(contents, global_scope, local_scope) + return local_scope + + +@@ -3062,11 +3062,11 @@ def CheckSecurityOwners(input_api, output_api): + + # Go through the OWNERS files to check, filtering out rules that are already + # present in that OWNERS file. +- for owners_file, patterns in to_check.iteritems(): ++ for owners_file, patterns in to_check.items(): + try: + with file(owners_file) as f: + lines = set(f.read().splitlines()) +- for entry in patterns.itervalues(): ++ for entry in patterns.values(): + entry['rules'] = [rule for rule in entry['rules'] if rule not in lines + ] + except IOError: +@@ -3075,10 +3075,10 @@ def CheckSecurityOwners(input_api, output_api): + + # All the remaining lines weren't found in OWNERS files, so emit an error. + errors = [] +- for owners_file, patterns in to_check.iteritems(): ++ for owners_file, patterns in to_check.items(): + missing_lines = [] + files = [] +- for _, entry in patterns.iteritems(): ++ for _, entry in patterns.items(): + missing_lines.extend(entry['rules']) + files.extend([' %s' % f.LocalPath() for f in entry['files']]) + if missing_lines: +@@ -3118,7 +3118,7 @@ def _GetFilesUsingSecurityCriticalFunctions(input_api): + } + _PATTERNS_TO_CHECK = { + k: input_api.re.compile(v) +- for k, v in _PATTERNS_TO_CHECK.items() ++ for k, v in list(_PATTERNS_TO_CHECK.items()) + } + + # Scan all affected files for changes touching _FUNCTIONS_TO_CHECK. +@@ -3131,7 +3131,7 @@ def _GetFilesUsingSecurityCriticalFunctions(input_api): + # as adding or changing the arguments. + if line.startswith('-') or (line.startswith('+') and + not line.startswith('++')): +- for name, pattern in _PATTERNS_TO_CHECK.items(): ++ for name, pattern in list(_PATTERNS_TO_CHECK.items()): + if pattern.search(line): + path = f.LocalPath() + if not path in files_to_functions: +@@ -3161,7 +3161,7 @@ def CheckSecurityChanges(input_api, output_api): + if not has_security_owner: + msg = 'The following files change calls to security-sensive functions\n' \ + 'that need to be reviewed by {}.\n'.format(owners_file) +- for path, names in files_to_functions.items(): ++ for path, names in list(files_to_functions.items()): + msg += ' {}\n'.format(path) + for name in names: + msg += ' {}\n'.format(name) +@@ -3883,7 +3883,7 @@ def CheckForRelativeIncludes(input_api, output_api): + return [] + + error_descriptions = [] +- for file_path, bad_lines in bad_files.iteritems(): ++ for file_path, bad_lines in bad_files.items(): + error_description = file_path + for line in bad_lines: + error_description += '\n ' + line +@@ -4845,8 +4845,8 @@ def CheckStrings(input_api, output_api): + git_footers = input_api.change.GitFootersFromDescription() + skip_screenshot_check_footer = [ + footer.lower() +- for footer in git_footers.get(u'Skip-Translation-Screenshots-Check', [])] +- run_screenshot_check = u'true' not in skip_screenshot_check_footer ++ for footer in git_footers.get('Skip-Translation-Screenshots-Check', [])] ++ run_screenshot_check = 'true' not in skip_screenshot_check_footer + + import os + import re +@@ -5070,18 +5070,18 @@ def CheckStrings(input_api, output_api): + if file_path.endswith('.grdp'): + if f.OldContents(): + old_id_to_msg_map = grd_helper.GetGrdpMessagesFromString( +- unicode('\n'.join(f.OldContents()))) ++ str('\n'.join(f.OldContents()))) + if f.NewContents(): + new_id_to_msg_map = grd_helper.GetGrdpMessagesFromString( +- unicode('\n'.join(f.NewContents()))) ++ str('\n'.join(f.NewContents()))) + else: + file_dir = input_api.os_path.dirname(file_path) or '.' + if f.OldContents(): + old_id_to_msg_map = grd_helper.GetGrdMessages( +- StringIO(unicode('\n'.join(f.OldContents()))), file_dir) ++ StringIO(str('\n'.join(f.OldContents()))), file_dir) + if f.NewContents(): + new_id_to_msg_map = grd_helper.GetGrdMessages( +- StringIO(unicode('\n'.join(f.NewContents()))), file_dir) ++ StringIO(str('\n'.join(f.NewContents()))), file_dir) + + grd_name, ext = input_api.os_path.splitext( + input_api.os_path.basename(file_path)) +diff --git a/src/3rdparty/chromium/PRESUBMIT_test.py b/src/3rdparty/chromium/PRESUBMIT_test.py +index 5c52d0899..853e97f0c 100755 +--- a/src/3rdparty/chromium/PRESUBMIT_test.py ++++ b/src/3rdparty/chromium/PRESUBMIT_test.py +@@ -669,7 +669,7 @@ class TryServerMasterTest(unittest.TestCase): + 'win_rel_naclmore', + ], + } +- for master, bots in bots.iteritems(): ++ for master, bots in bots.items(): + for bot in bots: + self.assertEqual(master, PRESUBMIT.GetTryServerMasterForBot(bot), + 'bot=%s: expected %s, computed %s' % ( +@@ -2254,9 +2254,9 @@ class SecurityChangeTest(unittest.TestCase): + self._mockChangeOwnerAndReviewers( + mock_input_api, 'owner@chromium.org', ['banana@chromium.org']) + result = PRESUBMIT.CheckSecurityChanges(mock_input_api, mock_output_api) +- self.assertEquals(1, len(result)) +- self.assertEquals(result[0].type, 'notify') +- self.assertEquals(result[0].message, ++ self.assertEqual(1, len(result)) ++ self.assertEqual(result[0].type, 'notify') ++ self.assertEqual(result[0].message, + 'The following files change calls to security-sensive functions\n' \ + 'that need to be reviewed by ipc/SECURITY_OWNERS.\n' + ' file.cc\n' +@@ -2273,9 +2273,9 @@ class SecurityChangeTest(unittest.TestCase): + self._mockChangeOwnerAndReviewers( + mock_input_api, 'owner@chromium.org', ['banana@chromium.org']) + result = PRESUBMIT.CheckSecurityChanges(mock_input_api, mock_output_api) +- self.assertEquals(1, len(result)) +- self.assertEquals(result[0].type, 'error') +- self.assertEquals(result[0].message, ++ self.assertEqual(1, len(result)) ++ self.assertEqual(result[0].type, 'error') ++ self.assertEqual(result[0].message, + 'The following files change calls to security-sensive functions\n' \ + 'that need to be reviewed by ipc/SECURITY_OWNERS.\n' + ' file.cc\n' +@@ -2292,7 +2292,7 @@ class SecurityChangeTest(unittest.TestCase): + mock_input_api, 'owner@chromium.org', + ['apple@chromium.org', 'banana@chromium.org']) + result = PRESUBMIT.CheckSecurityChanges(mock_input_api, mock_output_api) +- self.assertEquals(0, len(result)) ++ self.assertEqual(0, len(result)) + + def testChangeOwnerIsSecurityOwner(self): + mock_input_api = MockInputApi() +@@ -2304,7 +2304,7 @@ class SecurityChangeTest(unittest.TestCase): + self._mockChangeOwnerAndReviewers( + mock_input_api, 'orange@chromium.org', ['pear@chromium.org']) + result = PRESUBMIT.CheckSecurityChanges(mock_input_api, mock_output_api) +- self.assertEquals(1, len(result)) ++ self.assertEqual(1, len(result)) + + + class BannedTypeCheckTest(unittest.TestCase): +@@ -2726,8 +2726,8 @@ class CheckNoDirectIncludesHeadersWhichRedefineStrCat(unittest.TestCase): + MockFile('dir/jumbo.h', ['#include "sphelper.h"']), + ] + results = PRESUBMIT._CheckNoStrCatRedefines(mock_input_api, MockOutputApi()) +- self.assertEquals(1, len(results)) +- self.assertEquals(4, len(results[0].items)) ++ self.assertEqual(1, len(results)) ++ self.assertEqual(4, len(results[0].items)) + self.assertTrue('StrCat' in results[0].message) + self.assertTrue('foo_win.cc' in results[0].items[0]) + self.assertTrue('bar.h' in results[0].items[1]) +@@ -2741,7 +2741,7 @@ class CheckNoDirectIncludesHeadersWhichRedefineStrCat(unittest.TestCase): + MockFile('dir/baz-win.h', ['#include "base/win/atl.h"']), + ] + results = PRESUBMIT._CheckNoStrCatRedefines(mock_input_api, MockOutputApi()) +- self.assertEquals(0, len(results)) ++ self.assertEqual(0, len(results)) + + def testAllowsToCreateWrapper(self): + mock_input_api = MockInputApi() +@@ -2751,7 +2751,7 @@ class CheckNoDirectIncludesHeadersWhichRedefineStrCat(unittest.TestCase): + '#include "base/win/windows_defines.inc"']), + ] + results = PRESUBMIT._CheckNoStrCatRedefines(mock_input_api, MockOutputApi()) +- self.assertEquals(0, len(results)) ++ self.assertEqual(0, len(results)) + + + class StringTest(unittest.TestCase): +@@ -3523,7 +3523,7 @@ class BuildtoolsRevisionsAreInSyncTest(unittest.TestCase): + def _check(self, files): + mock_input_api = MockInputApi() + mock_input_api.files = [] +- for fname, contents in files.items(): ++ for fname, contents in list(files.items()): + mock_input_api.files.append(MockFile(fname, contents.splitlines())) + return PRESUBMIT.CheckBuildtoolsRevisionsAreInSync(mock_input_api, + MockOutputApi()) +@@ -3560,7 +3560,7 @@ class CheckFuzzTargetsTest(unittest.TestCase): + def _check(self, files): + mock_input_api = MockInputApi() + mock_input_api.files = [] +- for fname, contents in files.items(): ++ for fname, contents in list(files.items()): + mock_input_api.files.append(MockFile(fname, contents.splitlines())) + return PRESUBMIT.CheckFuzzTargetsOnUpload(mock_input_api, MockOutputApi()) + +diff --git a/src/3rdparty/chromium/PRESUBMIT_test_mocks.py b/src/3rdparty/chromium/PRESUBMIT_test_mocks.py +index 0a9e5a54e..84fe1cb5f 100644 +--- a/src/3rdparty/chromium/PRESUBMIT_test_mocks.py ++++ b/src/3rdparty/chromium/PRESUBMIT_test_mocks.py +@@ -126,7 +126,7 @@ class MockInputApi(object): + if file_.LocalPath() == filename: + return '\n'.join(file_.NewContents()) + # Otherwise, file is not in our mock API. +- raise IOError, "No such file or directory: '%s'" % filename ++ raise IOError("No such file or directory: '%s'" % filename) + + + class MockOutputApi(object): +diff --git a/src/3rdparty/chromium/base/third_party/libevent/event_rpcgen.py b/src/3rdparty/chromium/base/third_party/libevent/event_rpcgen.py +index 4ec77a6f6..c83505ad4 100755 +--- a/src/3rdparty/chromium/base/third_party/libevent/event_rpcgen.py ++++ b/src/3rdparty/chromium/base/third_party/libevent/event_rpcgen.py +@@ -27,18 +27,18 @@ class Struct: + self._name = name + self._entries = [] + self._tags = {} +- print >>sys.stderr, ' Created struct: %s' % name ++ print(' Created struct: %s' % name, file=sys.stderr) + + def AddEntry(self, entry): +- if self._tags.has_key(entry.Tag()): +- print >>sys.stderr, ( 'Entry "%s" duplicates tag number ' ++ if entry.Tag() in self._tags: ++ print(( 'Entry "%s" duplicates tag number ' + '%d from "%s" around line %d' ) % ( + entry.Name(), entry.Tag(), +- self._tags[entry.Tag()], line_count) ++ self._tags[entry.Tag()], line_count), file=sys.stderr) + sys.exit(1) + self._entries.append(entry) + self._tags[entry.Tag()] = entry.Name() +- print >>sys.stderr, ' Added entry: %s' % entry.Name() ++ print(' Added entry: %s' % entry.Name(), file=sys.stderr) + + def Name(self): + return self._name +@@ -52,24 +52,24 @@ class Struct: + def PrintIdented(self, file, ident, code): + """Takes an array, add indentation to each entry and prints it.""" + for entry in code: +- print >>file, '%s%s' % (ident, entry) ++ print('%s%s' % (ident, entry), file=file) + + def PrintTags(self, file): + """Prints the tag definitions for a structure.""" +- print >>file, '/* Tag definition for %s */' % self._name +- print >>file, 'enum %s_ {' % self._name.lower() ++ print('/* Tag definition for %s */' % self._name, file=file) ++ print('enum %s_ {' % self._name.lower(), file=file) + for entry in self._entries: +- print >>file, ' %s=%d,' % (self.EntryTagName(entry), +- entry.Tag()) +- print >>file, ' %s_MAX_TAGS' % (self._name.upper()) +- print >>file, '};\n' ++ print(' %s=%d,' % (self.EntryTagName(entry), ++ entry.Tag()), file=file) ++ print(' %s_MAX_TAGS' % (self._name.upper()), file=file) ++ print('};\n', file=file) + + def PrintForwardDeclaration(self, file): +- print >>file, 'struct %s;' % self._name ++ print('struct %s;' % self._name, file=file) + + def PrintDeclaration(self, file): +- print >>file, '/* Structure declaration for %s */' % self._name +- print >>file, 'struct %s_access_ {' % self._name ++ print('/* Structure declaration for %s */' % self._name, file=file) ++ print('struct %s_access_ {' % self._name, file=file) + for entry in self._entries: + dcl = entry.AssignDeclaration('(*%s_assign)' % entry.Name()) + dcl.extend( +@@ -78,20 +78,19 @@ class Struct: + dcl.extend( + entry.AddDeclaration('(*%s_add)' % entry.Name())) + self.PrintIdented(file, ' ', dcl) +- print >>file, '};\n' ++ print('};\n', file=file) + +- print >>file, 'struct %s {' % self._name +- print >>file, ' struct %s_access_ *base;\n' % self._name ++ print('struct %s {' % self._name, file=file) ++ print(' struct %s_access_ *base;\n' % self._name, file=file) + for entry in self._entries: + dcl = entry.Declaration() + self.PrintIdented(file, ' ', dcl) +- print >>file, '' ++ print('', file=file) + for entry in self._entries: +- print >>file, ' ev_uint8_t %s_set;' % entry.Name() +- print >>file, '};\n' ++ print(' ev_uint8_t %s_set;' % entry.Name(), file=file) ++ print('};\n', file=file) + +- print >>file, \ +-"""struct %(name)s *%(name)s_new(void); ++ print("""struct %(name)s *%(name)s_new(void); + void %(name)s_free(struct %(name)s *); + void %(name)s_clear(struct %(name)s *); + void %(name)s_marshal(struct evbuffer *, const struct %(name)s *); +@@ -100,7 +99,7 @@ int %(name)s_complete(struct %(name)s *); + void evtag_marshal_%(name)s(struct evbuffer *, ev_uint32_t, + const struct %(name)s *); + int evtag_unmarshal_%(name)s(struct evbuffer *, ev_uint32_t, +- struct %(name)s *);""" % { 'name' : self._name } ++ struct %(name)s *);""" % { 'name' : self._name }, file=file) + + + # Write a setting function of every variable +@@ -113,22 +112,21 @@ int evtag_unmarshal_%(name)s(struct evbuffer *, ev_uint32_t, + self.PrintIdented(file, '', entry.AddDeclaration( + entry.AddFuncName())) + +- print >>file, '/* --- %s done --- */\n' % self._name ++ print('/* --- %s done --- */\n' % self._name, file=file) + + def PrintCode(self, file): +- print >>file, ('/*\n' ++ print(('/*\n' + ' * Implementation of %s\n' +- ' */\n') % self._name ++ ' */\n') % self._name, file=file) + +- print >>file, \ +- 'static struct %(name)s_access_ __%(name)s_base = {' % \ +- { 'name' : self._name } ++ print('static struct %(name)s_access_ __%(name)s_base = {' % \ ++ { 'name' : self._name }, file=file) + for entry in self._entries: + self.PrintIdented(file, ' ', entry.CodeBase()) +- print >>file, '};\n' ++ print('};\n', file=file) + + # Creation +- print >>file, ( ++ print(( + 'struct %(name)s *\n' + '%(name)s_new(void)\n' + '{\n' +@@ -137,75 +135,75 @@ int evtag_unmarshal_%(name)s(struct evbuffer *, ev_uint32_t, + ' event_warn("%%s: malloc", __func__);\n' + ' return (NULL);\n' + ' }\n' +- ' tmp->base = &__%(name)s_base;\n') % { 'name' : self._name } ++ ' tmp->base = &__%(name)s_base;\n') % { 'name' : self._name }, file=file) + + for entry in self._entries: + self.PrintIdented(file, ' ', entry.CodeNew('tmp')) +- print >>file, ' tmp->%s_set = 0;\n' % entry.Name() ++ print(' tmp->%s_set = 0;\n' % entry.Name(), file=file) + +- print >>file, ( ++ print(( + ' return (tmp);\n' +- '}\n') ++ '}\n'), file=file) + + # Adding + for entry in self._entries: + if entry.Array(): + self.PrintIdented(file, '', entry.CodeAdd()) +- print >>file, '' ++ print('', file=file) + + # Assigning + for entry in self._entries: + self.PrintIdented(file, '', entry.CodeAssign()) +- print >>file, '' ++ print('', file=file) + + # Getting + for entry in self._entries: + self.PrintIdented(file, '', entry.CodeGet()) +- print >>file, '' ++ print('', file=file) + + # Clearing +- print >>file, ( 'void\n' ++ print(( 'void\n' + '%(name)s_clear(struct %(name)s *tmp)\n' + '{' +- ) % { 'name' : self._name } ++ ) % { 'name' : self._name }, file=file) + for entry in self._entries: + self.PrintIdented(file, ' ', entry.CodeClear('tmp')) + +- print >>file, '}\n' ++ print('}\n', file=file) + + # Freeing +- print >>file, ( 'void\n' ++ print(( 'void\n' + '%(name)s_free(struct %(name)s *tmp)\n' + '{' +- ) % { 'name' : self._name } ++ ) % { 'name' : self._name }, file=file) + + for entry in self._entries: + self.PrintIdented(file, ' ', entry.CodeFree('tmp')) + +- print >>file, (' free(tmp);\n' +- '}\n') ++ print((' free(tmp);\n' ++ '}\n'), file=file) + + # Marshaling +- print >>file, ('void\n' ++ print(('void\n' + '%(name)s_marshal(struct evbuffer *evbuf, ' + 'const struct %(name)s *tmp)' +- '{') % { 'name' : self._name } ++ '{') % { 'name' : self._name }, file=file) + for entry in self._entries: + indent = ' ' + # Optional entries do not have to be set + if entry.Optional(): + indent += ' ' +- print >>file, ' if (tmp->%s_set) {' % entry.Name() ++ print(' if (tmp->%s_set) {' % entry.Name(), file=file) + self.PrintIdented( + file, indent, + entry.CodeMarshal('evbuf', self.EntryTagName(entry), 'tmp')) + if entry.Optional(): +- print >>file, ' }' ++ print(' }', file=file) + +- print >>file, '}\n' ++ print('}\n', file=file) + + # Unmarshaling +- print >>file, ('int\n' ++ print(('int\n' + '%(name)s_unmarshal(struct %(name)s *tmp, ' + ' struct evbuffer *evbuf)\n' + '{\n' +@@ -214,50 +212,50 @@ int evtag_unmarshal_%(name)s(struct evbuffer *, ev_uint32_t, + ' if (evtag_peek(evbuf, &tag) == -1)\n' + ' return (-1);\n' + ' switch (tag) {\n' +- ) % { 'name' : self._name } ++ ) % { 'name' : self._name }, file=file) + for entry in self._entries: +- print >>file, ' case %s:\n' % self.EntryTagName(entry) ++ print(' case %s:\n' % self.EntryTagName(entry), file=file) + if not entry.Array(): +- print >>file, ( ++ print(( + ' if (tmp->%s_set)\n' + ' return (-1);' +- ) % (entry.Name()) ++ ) % (entry.Name()), file=file) + + self.PrintIdented( + file, ' ', + entry.CodeUnmarshal('evbuf', + self.EntryTagName(entry), 'tmp')) + +- print >>file, ( ' tmp->%s_set = 1;\n' % entry.Name() + +- ' break;\n' ) +- print >>file, ( ' default:\n' ++ print(( ' tmp->%s_set = 1;\n' % entry.Name() + ++ ' break;\n' ), file=file) ++ print(( ' default:\n' + ' return -1;\n' + ' }\n' +- ' }\n' ) ++ ' }\n' ), file=file) + # Check if it was decoded completely +- print >>file, ( ' if (%(name)s_complete(tmp) == -1)\n' ++ print(( ' if (%(name)s_complete(tmp) == -1)\n' + ' return (-1);' +- ) % { 'name' : self._name } ++ ) % { 'name' : self._name }, file=file) + + # Successfully decoded +- print >>file, ( ' return (0);\n' +- '}\n') ++ print(( ' return (0);\n' ++ '}\n'), file=file) + + # Checking if a structure has all the required data +- print >>file, ( ++ print(( + 'int\n' + '%(name)s_complete(struct %(name)s *msg)\n' +- '{' ) % { 'name' : self._name } ++ '{' ) % { 'name' : self._name }, file=file) + for entry in self._entries: + self.PrintIdented( + file, ' ', + entry.CodeComplete('msg')) +- print >>file, ( ++ print(( + ' return (0);\n' +- '}\n' ) ++ '}\n' ), file=file) + + # Complete message unmarshaling +- print >>file, ( ++ print(( + 'int\n' + 'evtag_unmarshal_%(name)s(struct evbuffer *evbuf, ' + 'ev_uint32_t need_tag, struct %(name)s *msg)\n' +@@ -279,10 +277,10 @@ int evtag_unmarshal_%(name)s(struct evbuffer *, ev_uint32_t, + ' error:\n' + ' evbuffer_free(tmp);\n' + ' return (res);\n' +- '}\n' ) % { 'name' : self._name } ++ '}\n' ) % { 'name' : self._name }, file=file) + + # Complete message marshaling +- print >>file, ( ++ print(( + 'void\n' + 'evtag_marshal_%(name)s(struct evbuffer *evbuf, ev_uint32_t tag, ' + 'const struct %(name)s *msg)\n' +@@ -294,7 +292,7 @@ int evtag_unmarshal_%(name)s(struct evbuffer *, ev_uint32_t, + ' evtag_marshal(evbuf, tag, EVBUFFER_DATA(_buf), ' + 'EVBUFFER_LENGTH(_buf));\n' + ' evbuffer_free(_buf);\n' +- '}\n' ) % { 'name' : self._name } ++ '}\n' ) % { 'name' : self._name }, file=file) + + class Entry: + def __init__(self, type, name, tag): +@@ -425,19 +423,19 @@ class Entry: + + def Verify(self): + if self.Array() and not self._can_be_array: +- print >>sys.stderr, ( ++ print(( + 'Entry "%s" cannot be created as an array ' +- 'around line %d' ) % (self._name, self.LineCount()) ++ 'around line %d' ) % (self._name, self.LineCount()), file=sys.stderr) + sys.exit(1) + if not self._struct: +- print >>sys.stderr, ( ++ print(( + 'Entry "%s" does not know which struct it belongs to ' +- 'around line %d' ) % (self._name, self.LineCount()) ++ 'around line %d' ) % (self._name, self.LineCount()), file=sys.stderr) + sys.exit(1) + if self._optional and self._array: +- print >>sys.stderr, ( 'Entry "%s" has illegal combination of ' ++ print(( 'Entry "%s" has illegal combination of ' + 'optional and array around line %d' ) % ( +- self._name, self.LineCount() ) ++ self._name, self.LineCount() ), file=sys.stderr) + sys.exit(1) + + class EntryBytes(Entry): +@@ -522,8 +520,8 @@ class EntryBytes(Entry): + + def Verify(self): + if not self._length: +- print >>sys.stderr, 'Entry "%s" needs a length around line %d' % ( +- self._name, self.LineCount() ) ++ print('Entry "%s" needs a length around line %d' % ( ++ self._name, self.LineCount() ), file=sys.stderr) + sys.exit(1) + + Entry.Verify(self) +@@ -1089,8 +1087,8 @@ def ProcessOneEntry(newstruct, entry): + if not name: + res = re.match(r'^([^\[\]]+)(\[.*\])?$', token) + if not res: +- print >>sys.stderr, 'Cannot parse name: \"%s\" around %d' % ( +- entry, line_count) ++ print('Cannot parse name: \"%s\" around %d' % ( ++ entry, line_count), file=sys.stderr) + sys.exit(1) + name = res.group(1) + fixed_length = res.group(2) +@@ -1101,24 +1099,24 @@ def ProcessOneEntry(newstruct, entry): + if not separator: + separator = token + if separator != '=': +- print >>sys.stderr, 'Expected "=" after name \"%s\" got %s' % ( +- name, token) ++ print('Expected "=" after name \"%s\" got %s' % ( ++ name, token), file=sys.stderr) + sys.exit(1) + continue + + if not tag_set: + tag_set = 1 + if not re.match(r'^(0x)?[0-9]+$', token): +- print >>sys.stderr, 'Expected tag number: \"%s\"' % entry ++ print('Expected tag number: \"%s\"' % entry, file=sys.stderr) + sys.exit(1) + tag = int(token, 0) + continue + +- print >>sys.stderr, 'Cannot parse \"%s\"' % entry ++ print('Cannot parse \"%s\"' % entry, file=sys.stderr) + sys.exit(1) + + if not tag_set: +- print >>sys.stderr, 'Need tag number: \"%s\"' % entry ++ print('Need tag number: \"%s\"' % entry, file=sys.stderr) + sys.exit(1) + + # Create the right entry +@@ -1138,7 +1136,7 @@ def ProcessOneEntry(newstruct, entry): + # References another struct defined in our file + newentry = EntryStruct(entry_type, name, tag, res.group(1)) + else: +- print >>sys.stderr, 'Bad type: "%s" in "%s"' % (entry_type, entry) ++ print('Bad type: "%s" in "%s"' % (entry_type, entry), file=sys.stderr) + sys.exit(1) + + structs = [] +@@ -1240,8 +1238,8 @@ def GetNextStruct(file): + + if not re.match(r'^struct %s {$' % _STRUCT_RE, + line, re.IGNORECASE): +- print >>sys.stderr, 'Missing struct on line %d: %s' % ( +- line_count, line) ++ print('Missing struct on line %d: %s' % ( ++ line_count, line), file=sys.stderr) + sys.exit(1) + else: + got_struct = 1 +@@ -1255,8 +1253,8 @@ def GetNextStruct(file): + continue + + if len(tokens[1]): +- print >>sys.stderr, 'Trailing garbage after struct on line %d' % ( +- line_count ) ++ print('Trailing garbage after struct on line %d' % ( ++ line_count ), file=sys.stderr) + sys.exit(1) + + # We found the end of the struct +@@ -1377,17 +1375,17 @@ def BodyPreamble(name): + + def main(argv): + if len(argv) < 2 or not argv[1]: +- print >>sys.stderr, 'Need RPC description file as first argument.' ++ print('Need RPC description file as first argument.', file=sys.stderr) + sys.exit(1) + + filename = argv[1] + + ext = filename.split('.')[-1] + if ext != 'rpc': +- print >>sys.stderr, 'Unrecognized file extension: %s' % ext ++ print('Unrecognized file extension: %s' % ext, file=sys.stderr) + sys.exit(1) + +- print >>sys.stderr, 'Reading \"%s\"' % filename ++ print('Reading \"%s\"' % filename, file=sys.stderr) + + fp = open(filename, 'r') + entities = Parse(fp) +@@ -1396,25 +1394,25 @@ def main(argv): + header_file = '.'.join(filename.split('.')[:-1]) + '.gen.h' + impl_file = '.'.join(filename.split('.')[:-1]) + '.gen.c' + +- print >>sys.stderr, '... creating "%s"' % header_file ++ print('... creating "%s"' % header_file, file=sys.stderr) + header_fp = open(header_file, 'w') +- print >>header_fp, HeaderPreamble(filename) ++ print(HeaderPreamble(filename), file=header_fp) + + # Create forward declarations: allows other structs to reference + # each other + for entry in entities: + entry.PrintForwardDeclaration(header_fp) +- print >>header_fp, '' ++ print('', file=header_fp) + + for entry in entities: + entry.PrintTags(header_fp) + entry.PrintDeclaration(header_fp) +- print >>header_fp, HeaderPostamble(filename) ++ print(HeaderPostamble(filename), file=header_fp) + header_fp.close() + +- print >>sys.stderr, '... creating "%s"' % impl_file ++ print('... creating "%s"' % impl_file, file=sys.stderr) + impl_fp = open(impl_file, 'w') +- print >>impl_fp, BodyPreamble(filename) ++ print(BodyPreamble(filename), file=impl_fp) + for entry in entities: + entry.PrintCode(impl_fp) + impl_fp.close() +diff --git a/src/3rdparty/chromium/base/win/embedded_i18n/create_string_rc.py b/src/3rdparty/chromium/base/win/embedded_i18n/create_string_rc.py +index 934131c2a..436a08b48 100755 +--- a/src/3rdparty/chromium/base/win/embedded_i18n/create_string_rc.py ++++ b/src/3rdparty/chromium/base/win/embedded_i18n/create_string_rc.py +@@ -58,7 +58,7 @@ Note: MODE_SPECIFIC_STRINGS cannot be specified if STRING_IDS is not specified. + # and IDS_L10N_OFFSET_* for the language we are interested in. + # + +-from __future__ import print_function ++ + + import argparse + import glob +@@ -280,7 +280,7 @@ class StringRcMaker(object): + def __AddModeSpecificStringIds(self): + """Adds the mode-specific strings for all of the current brand's install + modes to self.string_id_set.""" +- for string_id, brands in self.mode_specific_strings.items(): ++ for string_id, brands in list(self.mode_specific_strings.items()): + brand_strings = brands.get(self.brand) + if not brand_strings: + raise RuntimeError( +@@ -358,7 +358,7 @@ Extra input files: + # Manually put the source strings as en-US in the list of translated + # strings. + translated_strings = [] +- for string_id, message_text in source_strings.items(): ++ for string_id, message_text in list(source_strings.items()): + translated_strings.append(self.__TranslationData(string_id, + 'EN_US', + message_text)) +@@ -368,7 +368,7 @@ Extra input files: + # message text; hence the message id is mapped to a list of string ids + # instead of a single value. + translation_ids = {} +- for (string_id, message_text) in source_strings.items(): ++ for (string_id, message_text) in list(source_strings.items()): + message_id = tclib.GenerateMessageId(message_text) + translation_ids.setdefault(message_id, []).append(string_id); + +@@ -383,7 +383,7 @@ Extra input files: + if not xtb_filename in source_xtb_files: + extra_xtb_files.append(xtb_filename) + sax_parser.parse(xtb_filename) +- for string_id, message_text in source_strings.items(): ++ for string_id, message_text in list(source_strings.items()): + translated_string = xtb_handler.translations.get(string_id, + message_text) + translated_strings.append(self.__TranslationData(string_id, +@@ -407,13 +407,13 @@ Extra input files: + """Writes a resource file with the strings provided in |translated_strings|. + """ + HEADER_TEXT = ( +- u'#include "%s"\n\n' +- u'STRINGTABLE\n' +- u'BEGIN\n' ++ '#include "%s"\n\n' ++ 'STRINGTABLE\n' ++ 'BEGIN\n' + ) % os.path.basename(self.header_file) + + FOOTER_TEXT = ( +- u'END\n' ++ 'END\n' + ) + + with io.open(self.rc_file, +@@ -426,7 +426,7 @@ Extra input files: + escaped_text = (translation.translation.replace('"', '""') + .replace('\t', '\\t') + .replace('\n', '\\n')) +- outfile.write(u' %s "%s"\n' % ++ outfile.write(' %s "%s"\n' % + (translation.resource_id_str + '_' + translation.language, + escaped_text)) + outfile.write(FOOTER_TEXT) +@@ -463,7 +463,7 @@ Extra input files: + resource_id += 1 + + # Handle mode-specific strings. +- for string_id, brands in self.mode_specific_strings.items(): ++ for string_id, brands in list(self.mode_specific_strings.items()): + # Populate the DO_MODE_STRINGS macro. + brand_strings = brands.get(self.brand) + if not brand_strings: +@@ -575,7 +575,7 @@ def main(): + parser.error('A brand was specified (' + brand + ') but no mode ' + 'specific strings were given.') + valid_brands = [b for b in +- next(iter(mode_specific_strings.values())).keys()] ++ list(next(iter(list(mode_specific_strings.values()))).keys())] + if not brand in valid_brands: + parser.error('A brand was specified (' + brand + ') but it is not ' + 'a valid brand [' + ', '.join(valid_brands) + '].') +@@ -590,7 +590,7 @@ def main(): + parser.error('Mismatch in number of grd files ({}) and xtb relative ' + 'paths ({})'.format(len(grd_files), len(xtb_relative_paths))) + +- inputs = zip(grd_files, xtb_relative_paths) ++ inputs = list(zip(grd_files, xtb_relative_paths)) + + StringRcMaker(inputs, args.expected_xtb_input_files, args.header_file, + args.rc_file, brand, args.first_resource_id, string_ids_to_extract, +diff --git a/src/3rdparty/chromium/build/android/adb_command_line.py b/src/3rdparty/chromium/build/android/adb_command_line.py +index 5d3e9ce11..596d61af5 100755 +--- a/src/3rdparty/chromium/build/android/adb_command_line.py ++++ b/src/3rdparty/chromium/build/android/adb_command_line.py +@@ -5,7 +5,7 @@ + + """Utility for reading / writing command-line flag files on device(s).""" + +-from __future__ import print_function ++ + + import argparse + import logging +diff --git a/src/3rdparty/chromium/build/android/adb_logcat_monitor.py b/src/3rdparty/chromium/build/android/adb_logcat_monitor.py +index a919722cb..614cfefea 100755 +--- a/src/3rdparty/chromium/build/android/adb_logcat_monitor.py ++++ b/src/3rdparty/chromium/build/android/adb_logcat_monitor.py +@@ -16,7 +16,7 @@ resilient across phone disconnects and reconnects and start the logcat + early enough to not miss anything. + """ + +-from __future__ import print_function ++ + + import logging + import os +@@ -141,7 +141,7 @@ def main(base_dir, adb_cmd='adb'): + except: # pylint: disable=bare-except + logging.exception('Unexpected exception in main.') + finally: +- for process, _ in devices.itervalues(): ++ for process, _ in devices.values(): + if process: + try: + process.terminate() +diff --git a/src/3rdparty/chromium/build/android/adb_logcat_printer.py b/src/3rdparty/chromium/build/android/adb_logcat_printer.py +index a71517075..6aaeb9e78 100755 +--- a/src/3rdparty/chromium/build/android/adb_logcat_printer.py ++++ b/src/3rdparty/chromium/build/android/adb_logcat_printer.py +@@ -19,7 +19,7 @@ monitoring for the deletion of the aforementioned file. + """ + # pylint: disable=W0702 + +-import cStringIO ++import io + import logging + import optparse + import os +@@ -108,7 +108,7 @@ def GetDeviceLogs(log_filenames, logger): + """ + device_logs = [] + +- for device, device_files in log_filenames.iteritems(): ++ for device, device_files in log_filenames.items(): + logger.debug('%s: %s', device, str(device_files)) + device_file_lines = [] + for cur_file in device_files: +@@ -160,7 +160,7 @@ def main(argv): + parser.error('Wrong number of unparsed args') + base_dir = args[0] + +- log_stringio = cStringIO.StringIO() ++ log_stringio = io.StringIO() + logger = logging.getLogger('LogcatPrinter') + logger.setLevel(LOG_LEVEL) + sh = logging.StreamHandler(log_stringio) +diff --git a/src/3rdparty/chromium/build/android/adb_reverse_forwarder.py b/src/3rdparty/chromium/build/android/adb_reverse_forwarder.py +index 3da9c98f7..f8a0c3143 100755 +--- a/src/3rdparty/chromium/build/android/adb_reverse_forwarder.py ++++ b/src/3rdparty/chromium/build/android/adb_reverse_forwarder.py +@@ -62,7 +62,7 @@ def main(argv): + if len(args.ports) < 2 or len(args.ports) % 2: + parser.error('Need even number of port pairs') + +- port_pairs = zip(args.ports[::2], args.ports[1::2]) ++ port_pairs = list(zip(args.ports[::2], args.ports[1::2])) + + if args.build_type: + constants.SetBuildType(args.build_type) +diff --git a/src/3rdparty/chromium/build/android/apk_operations.py b/src/3rdparty/chromium/build/android/apk_operations.py +index d2798147a..1e5136824 100755 +--- a/src/3rdparty/chromium/build/android/apk_operations.py ++++ b/src/3rdparty/chromium/build/android/apk_operations.py +@@ -6,7 +6,7 @@ + # Using colorama.Fore/Back/Style members + # pylint: disable=no-member + +-from __future__ import print_function ++ + + import argparse + import collections +@@ -531,8 +531,8 @@ def _RunDiskUsage(devices, package_name): + compilation_filter) + + def print_sizes(desc, sizes): +- print('%s: %d KiB' % (desc, sum(sizes.itervalues()))) +- for path, size in sorted(sizes.iteritems()): ++ print('%s: %d KiB' % (desc, sum(sizes.values()))) ++ for path, size in sorted(sizes.items()): + print(' %s: %s KiB' % (path, size)) + + parallel_devices = device_utils.DeviceUtils.parallel(devices) +@@ -544,7 +544,7 @@ def _RunDiskUsage(devices, package_name): + + (data_dir_sizes, code_cache_sizes, apk_sizes, lib_sizes, odex_sizes, + compilation_filter) = result +- total = sum(sum(sizes.itervalues()) for sizes in result[:-1]) ++ total = sum(sum(sizes.values()) for sizes in result[:-1]) + + print_sizes('Apk', apk_sizes) + print_sizes('App Data (non-code cache)', data_dir_sizes) +diff --git a/src/3rdparty/chromium/build/android/asan_symbolize.py b/src/3rdparty/chromium/build/android/asan_symbolize.py +index 658508987..5f61b1d4e 100755 +--- a/src/3rdparty/chromium/build/android/asan_symbolize.py ++++ b/src/3rdparty/chromium/build/android/asan_symbolize.py +@@ -4,7 +4,7 @@ + # Use of this source code is governed by a BSD-style license that can be + # found in the LICENSE file. + +-from __future__ import print_function ++ + + import collections + import optparse +@@ -98,7 +98,7 @@ def _PrintSymbolized(asan_input, arch): + # Maps library -> { address -> [(symbol, location, obj_sym_with_offset)...] } + all_symbols = collections.defaultdict(dict) + +- for library, items in libraries.iteritems(): ++ for library, items in libraries.items(): + libname = _TranslateLibPath(library, asan_libs) + lib_relative_addrs = set([i.rel_address for i in items]) + # pylint: disable=no-member +diff --git a/src/3rdparty/chromium/build/android/convert_dex_profile_tests.py b/src/3rdparty/chromium/build/android/convert_dex_profile_tests.py +index 0ddc5ce4a..0beb71314 100644 +--- a/src/3rdparty/chromium/build/android/convert_dex_profile_tests.py ++++ b/src/3rdparty/chromium/build/android/convert_dex_profile_tests.py +@@ -167,14 +167,14 @@ class GenerateProfileTests(unittest.TestCase): + dex = cp.ProcessDex(DEX_DUMP.splitlines()) + self.assertIsNotNone(dex['a']) + +- self.assertEquals(len(dex['a'].FindMethodsAtLine('', 311, 313)), 1) +- self.assertEquals(len(dex['a'].FindMethodsAtLine('', 309, 315)), 1) ++ self.assertEqual(len(dex['a'].FindMethodsAtLine('', 311, 313)), 1) ++ self.assertEqual(len(dex['a'].FindMethodsAtLine('', 309, 315)), 1) + clinit = dex['a'].FindMethodsAtLine('', 311, 313)[0] +- self.assertEquals(clinit.name, '') +- self.assertEquals(clinit.return_type, 'V') +- self.assertEquals(clinit.param_types, 'Ljava/lang/String;') ++ self.assertEqual(clinit.name, '') ++ self.assertEqual(clinit.return_type, 'V') ++ self.assertEqual(clinit.param_types, 'Ljava/lang/String;') + +- self.assertEquals(len(dex['a'].FindMethodsAtLine('a', 8, None)), 2) ++ self.assertEqual(len(dex['a'].FindMethodsAtLine('a', 8, None)), 2) + self.assertIsNone(dex['a'].FindMethodsAtLine('a', 100, None)) + + # pylint: disable=protected-access +@@ -183,7 +183,7 @@ class GenerateProfileTests(unittest.TestCase): + mapping, reverse = cp.ProcessProguardMapping( + PROGUARD_MAPPING.splitlines(), dex) + +- self.assertEquals('La;', reverse.GetClassMapping('Lorg/chromium/Original;')) ++ self.assertEqual('La;', reverse.GetClassMapping('Lorg/chromium/Original;')) + + getInstance = cp.Method( + 'getInstance', 'Lorg/chromium/Original;', '', 'Lorg/chromium/Original;') +@@ -196,7 +196,7 @@ class GenerateProfileTests(unittest.TestCase): + + mapped = mapping.GetMethodMapping( + cp.Method('a', 'La;', 'Ljava/lang/String;', 'I')) +- self.assertEquals(len(mapped), 2) ++ self.assertEqual(len(mapped), 2) + self.assertIn(getInstance, mapped) + self.assertNotIn(subclassInit, mapped) + self.assertNotIn( +@@ -205,18 +205,18 @@ class GenerateProfileTests(unittest.TestCase): + + mapped = mapping.GetMethodMapping( + cp.Method('a', 'La;', 'Ljava/lang/Object;', 'I')) +- self.assertEquals(len(mapped), 1) ++ self.assertEqual(len(mapped), 1) + self.assertIn(getInstance, mapped) + + mapped = mapping.GetMethodMapping(cp.Method('b', 'La;', '', 'La;')) +- self.assertEquals(len(mapped), 1) ++ self.assertEqual(len(mapped), 1) + self.assertIn(another, mapped) + +- for from_method, to_methods in mapping._method_mapping.iteritems(): ++ for from_method, to_methods in mapping._method_mapping.items(): + for to_method in to_methods: + self.assertIn(from_method, reverse.GetMethodMapping(to_method)) +- for from_class, to_class in mapping._class_mapping.iteritems(): +- self.assertEquals(from_class, reverse.GetClassMapping(to_class)) ++ for from_class, to_class in mapping._class_mapping.items(): ++ self.assertEqual(from_class, reverse.GetClassMapping(to_class)) + + def testProcessProfile(self): + dex = cp.ProcessDex(DEX_DUMP.splitlines()) +@@ -234,9 +234,9 @@ class GenerateProfileTests(unittest.TestCase): + self.assertIn(initialize, profile._methods) + self.assertIn(another, profile._methods) + +- self.assertEquals(profile._methods[getInstance], set(['H', 'S', 'P'])) +- self.assertEquals(profile._methods[initialize], set(['H', 'P'])) +- self.assertEquals(profile._methods[another], set(['P'])) ++ self.assertEqual(profile._methods[getInstance], set(['H', 'S', 'P'])) ++ self.assertEqual(profile._methods[initialize], set(['H', 'P'])) ++ self.assertEqual(profile._methods[another], set(['P'])) + + def testEndToEnd(self): + dex = cp.ProcessDex(DEX_DUMP.splitlines()) +@@ -247,7 +247,7 @@ class GenerateProfileTests(unittest.TestCase): + profile.WriteToFile(temp.name) + with open(temp.name, 'r') as f: + for a, b in zip(sorted(f), sorted(UNOBFUSCATED_PROFILE.splitlines())): +- self.assertEquals(a.strip(), b.strip()) ++ self.assertEqual(a.strip(), b.strip()) + + def testObfuscateProfile(self): + with build_utils.TempDir() as temp_dir: +@@ -269,7 +269,7 @@ class GenerateProfileTests(unittest.TestCase): + obfuscated_profile = sorted(obfuscated_file.readlines()) + for a, b in zip( + sorted(OBFUSCATED_PROFILE_2.splitlines()), obfuscated_profile): +- self.assertEquals(a.strip(), b.strip()) ++ self.assertEqual(a.strip(), b.strip()) + + + if __name__ == '__main__': +diff --git a/src/3rdparty/chromium/build/android/devil_chromium.py b/src/3rdparty/chromium/build/android/devil_chromium.py +index 1cd5a8715..742452596 100644 +--- a/src/3rdparty/chromium/build/android/devil_chromium.py ++++ b/src/3rdparty/chromium/build/android/devil_chromium.py +@@ -152,7 +152,7 @@ def Initialize(output_directory=None, custom_deps=None, adb_path=None): + for dep_config in dep_configs + } + } +- for dep_name, dep_configs in _DEVIL_BUILD_PRODUCT_DEPS.iteritems() ++ for dep_name, dep_configs in _DEVIL_BUILD_PRODUCT_DEPS.items() + } + if custom_deps: + devil_dynamic_config['dependencies'].update(custom_deps) +diff --git a/src/3rdparty/chromium/build/android/diff_resource_sizes.py b/src/3rdparty/chromium/build/android/diff_resource_sizes.py +index eefb6cdb2..2dd1c209b 100755 +--- a/src/3rdparty/chromium/build/android/diff_resource_sizes.py ++++ b/src/3rdparty/chromium/build/android/diff_resource_sizes.py +@@ -5,7 +5,7 @@ + + """Runs resource_sizes.py on two apks and outputs the diff.""" + +-from __future__ import print_function ++ + + import argparse + import json +@@ -49,8 +49,8 @@ def DiffResults(chartjson, base_results, diff_results): + base_results: The chartjson-formatted size results of the base APK. + diff_results: The chartjson-formatted size results of the diff APK. + """ +- for graph_title, graph in base_results['charts'].iteritems(): +- for trace_title, trace in graph.iteritems(): ++ for graph_title, graph in base_results['charts'].items(): ++ for trace_title, trace in graph.items(): + perf_tests_results_helper.ReportPerfResult( + chartjson, graph_title, trace_title, + diff_results['charts'][graph_title][trace_title]['value'] +@@ -67,8 +67,8 @@ def AddIntermediateResults(chartjson, base_results, diff_results): + base_results: The chartjson-formatted size results of the base APK. + diff_results: The chartjson-formatted size results of the diff APK. + """ +- for graph_title, graph in base_results['charts'].iteritems(): +- for trace_title, trace in graph.iteritems(): ++ for graph_title, graph in base_results['charts'].items(): ++ for trace_title, trace in graph.items(): + perf_tests_results_helper.ReportPerfResult( + chartjson, graph_title + '_base_apk', trace_title, + trace['value'], trace['units'], trace['improvement_direction'], +@@ -76,8 +76,8 @@ def AddIntermediateResults(chartjson, base_results, diff_results): + + # Both base_results and diff_results should have the same charts/traces, but + # loop over them separately in case they don't +- for graph_title, graph in diff_results['charts'].iteritems(): +- for trace_title, trace in graph.iteritems(): ++ for graph_title, graph in diff_results['charts'].items(): ++ for trace_title, trace in graph.items(): + perf_tests_results_helper.ReportPerfResult( + chartjson, graph_title + '_diff_apk', trace_title, + trace['value'], trace['units'], trace['improvement_direction'], +diff --git a/src/3rdparty/chromium/build/android/dump_apk_resource_strings.py b/src/3rdparty/chromium/build/android/dump_apk_resource_strings.py +index b57db5032..fe7129e7e 100755 +--- a/src/3rdparty/chromium/build/android/dump_apk_resource_strings.py ++++ b/src/3rdparty/chromium/build/android/dump_apk_resource_strings.py +@@ -6,7 +6,7 @@ + + """A script to parse and dump localized strings in resource.arsc files.""" + +-from __future__ import print_function ++ + + import argparse + import collections +@@ -220,7 +220,7 @@ class ResourceStringValues(object): + def ToStringList(self, res_id): + """Convert entry to string list for human-friendly output.""" + values = sorted( +- [(str(config), value) for config, value in self.res_values.iteritems()]) ++ [(str(config), value) for config, value in self.res_values.items()]) + if res_id is None: + # res_id will be None when the resource ID should not be part + # of the output. +@@ -256,7 +256,7 @@ class ResourceStringMap(object): + + def RemapResourceNames(self, id_name_map): + """Rename all entries according to a given {res_id -> res_name} map.""" +- for res_id, res_name in id_name_map.iteritems(): ++ for res_id, res_name in id_name_map.items(): + if res_id in self._res_map: + self._res_map[res_id].res_name = res_name + +@@ -286,7 +286,7 @@ class ResourceStringMap(object): + result = cmp(a[0], b[0]) + return result + +- for res_id, _ in sorted(res_map.iteritems(), cmp=cmp_id_name): ++ for res_id, _ in sorted(iter(res_map.items()), cmp=cmp_id_name): + result += res_map[res_id].ToStringList(None if omit_ids else res_id) + result.append('} # Resource strings') + return result +@@ -386,7 +386,7 @@ assert _RE_BUNDLE_STRING_DEFAULT_VALUE.match( + _RE_BUNDLE_STRING_LOCALIZED_VALUE = re.compile( + r'^\s+locale: "([0-9a-zA-Z-]+)" - \[STR\] "(.*)"$') + assert _RE_BUNDLE_STRING_LOCALIZED_VALUE.match( +- u' locale: "ar" - [STR] "گزینه\u200cهای بیشتر"'.encode('utf-8')) ++ ' locale: "ar" - [STR] "گزینه\u200cهای بیشتر"'.encode('utf-8')) + + + def ParseBundleResources(bundle_tool_jar_path, bundle_path): +diff --git a/src/3rdparty/chromium/build/android/emma_coverage_stats.py b/src/3rdparty/chromium/build/android/emma_coverage_stats.py +index fe1775a8a..f79e26592 100755 +--- a/src/3rdparty/chromium/build/android/emma_coverage_stats.py ++++ b/src/3rdparty/chromium/build/android/emma_coverage_stats.py +@@ -183,7 +183,7 @@ class _EmmaHtmlParser(object): + } + + package_to_emma = {} +- for package_emma_file_path, package_name in package_links.iteritems(): ++ for package_emma_file_path, package_name in package_links.items(): + # These elements contain each class name in the current package and + # the path of the file where the coverage info is stored for each class. + coverage_file_link_elements = self._FindElements( +@@ -257,7 +257,7 @@ class _EmmaCoverageStats(object): + |lines_for_coverage|. + """ + file_coverage = {} +- for file_path, line_numbers in lines_for_coverage.iteritems(): ++ for file_path, line_numbers in lines_for_coverage.items(): + file_coverage_dict = self.GetCoverageDictForFile(file_path, line_numbers) + if file_coverage_dict: + file_coverage[file_path] = file_coverage_dict +@@ -265,7 +265,7 @@ class _EmmaCoverageStats(object): + logging.warning( + 'No code coverage data for %s, skipping.', file_path) + +- covered_statuses = [s['incremental'] for s in file_coverage.itervalues()] ++ covered_statuses = [s['incremental'] for s in file_coverage.values()] + num_covered_lines = sum(s['covered'] for s in covered_statuses) + num_total_lines = sum(s['total'] for s in covered_statuses) + return { +@@ -382,7 +382,7 @@ class _EmmaCoverageStats(object): + # Finally, we have a dict mapping Java file paths to EMMA report files. + # Example: /usr/code/file.java -> out/coverage/1a.html. + source_to_emma = {source: package_to_emma[package] +- for source, package in source_to_package.iteritems() ++ for source, package in source_to_package.items() + if package in package_to_emma} + return source_to_emma + +@@ -442,12 +442,12 @@ def GenerateCoverageReport(line_coverage_file, out_file_path, coverage_dir): + potential_files_for_coverage = json.load(f) + + files_for_coverage = {f: lines +- for f, lines in potential_files_for_coverage.iteritems() ++ for f, lines in potential_files_for_coverage.items() + if _EmmaCoverageStats.NeedsCoverage(f)} + + coverage_results = {} + if files_for_coverage: +- code_coverage = _EmmaCoverageStats(coverage_dir, files_for_coverage.keys()) ++ code_coverage = _EmmaCoverageStats(coverage_dir, list(files_for_coverage.keys())) + coverage_results = code_coverage.GetCoverageDict(files_for_coverage) + else: + logging.info('No Java files requiring coverage were included in %s.', +diff --git a/src/3rdparty/chromium/build/android/emma_coverage_stats_test.py b/src/3rdparty/chromium/build/android/emma_coverage_stats_test.py +index d67f6be21..6a3db26d8 100755 +--- a/src/3rdparty/chromium/build/android/emma_coverage_stats_test.py ++++ b/src/3rdparty/chromium/build/android/emma_coverage_stats_test.py +@@ -182,7 +182,7 @@ class _EmmaHtmlParserTest(unittest.TestCase): + read_values = ['
' + multiple_trs + '
'] + found, _ = MockOpenForFunction(self.parser._FindElements, read_values, + file_path='fake', xpath_selector='.//TR') +- self.assertEquals(2, len(found)) ++ self.assertEqual(2, len(found)) + + def testFindElements_noMatch(self): + read_values = [self.simple_html] +@@ -377,7 +377,7 @@ class _EmmaCoverageStatsTest(unittest.TestCase): + return_value=package_to_emma) + coverage_stats.GetPackageNameFromFile = lambda x: package_names[x] + result_dict = coverage_stats._GetSourceFileToEmmaFileDict( +- package_names.keys()) ++ list(package_names.keys())) + self.assertDictEqual(result_dict, self.good_source_to_emma) + + def testGetCoverageDictForFile(self): +diff --git a/src/3rdparty/chromium/build/android/generate_jacoco_report.py b/src/3rdparty/chromium/build/android/generate_jacoco_report.py +index f325ee775..33a256513 100755 +--- a/src/3rdparty/chromium/build/android/generate_jacoco_report.py ++++ b/src/3rdparty/chromium/build/android/generate_jacoco_report.py +@@ -6,7 +6,7 @@ + + """Aggregates Jacoco coverage files to produce output.""" + +-from __future__ import print_function ++ + + import argparse + import fnmatch +diff --git a/src/3rdparty/chromium/build/android/gradle/generate_gradle.py b/src/3rdparty/chromium/build/android/gradle/generate_gradle.py +index bbbde51da..eecf8d268 100755 +--- a/src/3rdparty/chromium/build/android/gradle/generate_gradle.py ++++ b/src/3rdparty/chromium/build/android/gradle/generate_gradle.py +@@ -83,7 +83,7 @@ def _RebasePath(path_or_list, new_cwd=None, old_cwd=None): + """ + if path_or_list is None: + return [] +- if not isinstance(path_or_list, basestring): ++ if not isinstance(path_or_list, str): + return [_RebasePath(p, new_cwd, old_cwd) for p in path_or_list] + if old_cwd is None: + old_cwd = constants.GetOutDirectory() +@@ -435,10 +435,10 @@ def _ComputeJavaSourceDirsAndExcludes(output_dir, java_files): + if java_files: + java_files = _RebasePath(java_files) + computed_dirs = _ComputeJavaSourceDirs(java_files) +- java_dirs = computed_dirs.keys() ++ java_dirs = list(computed_dirs.keys()) + all_found_java_files = set() + +- for directory, files in computed_dirs.iteritems(): ++ for directory, files in computed_dirs.items(): + found_java_files = build_utils.FindInDirectory(directory, '*.java') + all_found_java_files.update(found_java_files) + unwanted_java_files = set(found_java_files) - set(files) +@@ -570,7 +570,7 @@ def _GenerateGradleFile(entry, generator, build_vars, jinja_processor): + test_entry = generator.Generate(e) + test_entry['android_manifest'] = generator.GenerateManifest(e) + variables['android_test'].append(test_entry) +- for key, value in test_entry.iteritems(): ++ for key, value in test_entry.items(): + if isinstance(value, list): + test_entry[key] = sorted(set(value) - set(variables['main'][key])) + +@@ -725,7 +725,7 @@ def _CombineTestEntries(entries): + entry.android_test_entries = android_test_entries[target_name] + del android_test_entries[target_name] + # Add unmatched test entries as individual targets. +- combined_entries.extend(e for l in android_test_entries.values() for e in l) ++ combined_entries.extend(e for l in list(android_test_entries.values()) for e in l) + return combined_entries + + +diff --git a/src/3rdparty/chromium/build/android/gradle/gn_to_cmake.py b/src/3rdparty/chromium/build/android/gradle/gn_to_cmake.py +index d3e80ae76..658c25d56 100755 +--- a/src/3rdparty/chromium/build/android/gradle/gn_to_cmake.py ++++ b/src/3rdparty/chromium/build/android/gradle/gn_to_cmake.py +@@ -16,7 +16,7 @@ python gn/gn_to_cmake.py out/config/project.json + The first is recommended, as it will auto-update. + """ + +-from __future__ import print_function ++ + + import functools + import json +@@ -267,7 +267,7 @@ def WriteAction(out, target, project, sources, synthetic_dependencies): + out.write('\n') + + out.write(' DEPENDS ') +- for sources_type_name in sources.values(): ++ for sources_type_name in list(sources.values()): + WriteVariable(out, sources_type_name, ' ') + out.write('\n') + +@@ -384,7 +384,7 @@ def WriteCopy(out, target, project, sources, synthetic_dependencies): + out.write('"\n') + + out.write(' DEPENDS ') +- for sources_type_name in sources.values(): ++ for sources_type_name in list(sources.values()): + WriteVariable(out, sources_type_name, ' ') + out.write('\n') + +@@ -502,7 +502,7 @@ def WriteSourceVariables(out, target, project): + source_types['obj_target'].append(obj_target_sources) + + sources = {} +- for source_type, sources_of_type in source_types.items(): ++ for source_type, sources_of_type in list(source_types.items()): + if sources_of_type: + sources[source_type] = '${target}__' + source_type + '_srcs' + SetVariableList(out, sources[source_type], sources_of_type) +@@ -536,7 +536,7 @@ def WriteTarget(out, target, project): + if target.cmake_type.modifier is not None: + out.write(' ') + out.write(target.cmake_type.modifier) +- for sources_type_name in sources.values(): ++ for sources_type_name in list(sources.values()): + WriteVariable(out, sources_type_name, ' ') + if synthetic_dependencies: + out.write(' DEPENDS') +@@ -667,7 +667,7 @@ def WriteProject(project): + out.write(' configure_file(${gn_dep} "CMakeLists.devnull" COPYONLY)\n') + out.write('endforeach("gn_dep")\n') + +- for target_name in project.targets.keys(): ++ for target_name in list(project.targets.keys()): + out.write('\n') + WriteTarget(out, Target(target_name, project), project) + +diff --git a/src/3rdparty/chromium/build/android/gyp/allot_native_libraries.py b/src/3rdparty/chromium/build/android/gyp/allot_native_libraries.py +index 585f19d51..3a506e28e 100755 +--- a/src/3rdparty/chromium/build/android/gyp/allot_native_libraries.py ++++ b/src/3rdparty/chromium/build/android/gyp/allot_native_libraries.py +@@ -114,7 +114,7 @@ def _AllotLibraries(module_tree, libraries_map): + Exception if some libraries can only be allotted to the None root. + """ + allotment_map = collections.defaultdict(set) +- for library, modules in libraries_map.items(): ++ for library, modules in list(libraries_map.items()): + ancestor = _ClosestCommonAncestor(module_tree, modules) + if not ancestor: + raise Exception('Cannot allot libraries for given dependency tree') +@@ -175,7 +175,7 @@ def main(args): + with open(options.output, 'w') as f: + # Write native libraries config and ensure the output is deterministic. + json.dump({m: sorted(l) +- for m, l in allotment_map.items()}, ++ for m, l in list(allotment_map.items())}, + f, + sort_keys=True, + indent=2) +diff --git a/src/3rdparty/chromium/build/android/gyp/assert_static_initializers.py b/src/3rdparty/chromium/build/android/gyp/assert_static_initializers.py +index 3a2e7e3f5..3d3ec2210 100755 +--- a/src/3rdparty/chromium/build/android/gyp/assert_static_initializers.py ++++ b/src/3rdparty/chromium/build/android/gyp/assert_static_initializers.py +@@ -5,7 +5,7 @@ + + """Checks the number of static initializers in an APK's library.""" + +-from __future__ import print_function ++ + + import argparse + import os +diff --git a/src/3rdparty/chromium/build/android/gyp/compile_java.py b/src/3rdparty/chromium/build/android/gyp/compile_java.py +index 3c1400f69..0596cf287 100755 +--- a/src/3rdparty/chromium/build/android/gyp/compile_java.py ++++ b/src/3rdparty/chromium/build/android/gyp/compile_java.py +@@ -233,7 +233,7 @@ def ProcessJavacOutput(output): + line = Colorize(line, marker_re, marker_color) + return line + +- return '\n'.join(map(ApplyColors, filter(ApplyFilters, output.split('\n')))) ++ return '\n'.join(map(ApplyColors, list(filter(ApplyFilters, output.split('\n'))))) + + + def CheckErrorproneStderrWarning(jar_path, expected_warning_regex, +diff --git a/src/3rdparty/chromium/build/android/gyp/copy_ex.py b/src/3rdparty/chromium/build/android/gyp/copy_ex.py +index f93597f97..40d81ba3b 100755 +--- a/src/3rdparty/chromium/build/android/gyp/copy_ex.py ++++ b/src/3rdparty/chromium/build/android/gyp/copy_ex.py +@@ -6,7 +6,7 @@ + + """Copies files to a directory.""" + +-from __future__ import print_function ++ + + import filecmp + import itertools +@@ -74,7 +74,7 @@ def DoRenaming(options, deps): + print('Renaming source and destination files not match.') + sys.exit(-1) + +- for src, dest in itertools.izip(src_files, dest_files): ++ for src, dest in zip(src_files, dest_files): + if os.path.isdir(src): + print('renaming diretory is not supported.') + sys.exit(-1) +diff --git a/src/3rdparty/chromium/build/android/gyp/create_apk_operations_script.py b/src/3rdparty/chromium/build/android/gyp/create_apk_operations_script.py +index a39752bcf..c5306dc55 100755 +--- a/src/3rdparty/chromium/build/android/gyp/create_apk_operations_script.py ++++ b/src/3rdparty/chromium/build/android/gyp/create_apk_operations_script.py +@@ -87,7 +87,7 @@ def main(args): + 'TARGET_CPU': repr(args.target_cpu), + } + script.write(SCRIPT_TEMPLATE.substitute(script_dict)) +- os.chmod(args.script_output_path, 0750) ++ os.chmod(args.script_output_path, 0o750) + return 0 + + +diff --git a/src/3rdparty/chromium/build/android/gyp/create_app_bundle.py b/src/3rdparty/chromium/build/android/gyp/create_app_bundle.py +index ce05668e0..7e01a99a7 100755 +--- a/src/3rdparty/chromium/build/android/gyp/create_app_bundle.py ++++ b/src/3rdparty/chromium/build/android/gyp/create_app_bundle.py +@@ -331,7 +331,7 @@ def _GenerateBaseResourcesAllowList(base_module_rtxt_path, + """ + ids_map = resource_utils.GenerateStringResourcesAllowList( + base_module_rtxt_path, base_allowlist_rtxt_path) +- return ids_map.keys() ++ return list(ids_map.keys()) + + + def _ConcatTextFiles(in_paths, out_path): +@@ -386,7 +386,7 @@ def _WriteBundlePathmap(module_pathmap_paths, module_names, + if not os.path.exists(module_pathmap_path): + continue + module_pathmap = _LoadPathmap(module_pathmap_path) +- for short_path, long_path in module_pathmap.iteritems(): ++ for short_path, long_path in module_pathmap.items(): + rebased_long_path = '{}/{}'.format(module_name, long_path) + rebased_short_path = '{}/{}'.format(module_name, short_path) + line = '{} -> {}\n'.format(rebased_long_path, rebased_short_path) +@@ -432,11 +432,11 @@ def _MaybeCheckServicesPresentInBase(bundle_path, module_zips): + classes = set() + base_package_name = manifest_utils.GetPackage(base_manifest) + for package in dexdump.Dump(base_zip): +- for name, package_dict in package.items(): ++ for name, package_dict in list(package.items()): + if not name: + name = base_package_name + classes.update('%s.%s' % (name, c) +- for c in package_dict['classes'].keys()) ++ for c in list(package_dict['classes'].keys())) + + # Ensure all services are present in base module. + for service_name in service_names: +diff --git a/src/3rdparty/chromium/build/android/gyp/create_bundle_wrapper_script.py b/src/3rdparty/chromium/build/android/gyp/create_bundle_wrapper_script.py +index 5f576acf4..3aa0d5d87 100755 +--- a/src/3rdparty/chromium/build/android/gyp/create_bundle_wrapper_script.py ++++ b/src/3rdparty/chromium/build/android/gyp/create_bundle_wrapper_script.py +@@ -119,7 +119,7 @@ def main(args): + repr(args.default_modules), + } + script.write(SCRIPT_TEMPLATE.substitute(script_dict)) +- os.chmod(args.script_output_path, 0750) ++ os.chmod(args.script_output_path, 0o750) + return 0 + + +diff --git a/src/3rdparty/chromium/build/android/gyp/create_java_binary_script.py b/src/3rdparty/chromium/build/android/gyp/create_java_binary_script.py +index f24bcde02..64a6503d6 100755 +--- a/src/3rdparty/chromium/build/android/gyp/create_java_binary_script.py ++++ b/src/3rdparty/chromium/build/android/gyp/create_java_binary_script.py +@@ -103,7 +103,7 @@ def main(argv): + extra_program_args=repr(extra_program_args), + noverify_flag=noverify_flag)) + +- os.chmod(options.output, 0750) ++ os.chmod(options.output, 0o750) + + + if __name__ == '__main__': +diff --git a/src/3rdparty/chromium/build/android/gyp/create_r_java.py b/src/3rdparty/chromium/build/android/gyp/create_r_java.py +index 6c55a06fd..2d5ffae04 100755 +--- a/src/3rdparty/chromium/build/android/gyp/create_r_java.py ++++ b/src/3rdparty/chromium/build/android/gyp/create_r_java.py +@@ -1,4 +1,4 @@ +-#!/usr/bin/env python2 ++#!/usr/bin/env python3 + # Copyright 2020 The Chromium Authors. All rights reserved. + # Use of this source code is governed by a BSD-style license that can be + # found in the LICENSE file. +diff --git a/src/3rdparty/chromium/build/android/gyp/dex.py b/src/3rdparty/chromium/build/android/gyp/dex.py +index 905c44d9d..37e87c605 100755 +--- a/src/3rdparty/chromium/build/android/gyp/dex.py ++++ b/src/3rdparty/chromium/build/android/gyp/dex.py +@@ -290,7 +290,7 @@ def _ZipMultidex(file_dir, dex_files): + if not ordered_files: + raise Exception('Could not find classes.dex multidex file in %s', + dex_files) +- for dex_idx in xrange(2, len(dex_files) + 1): ++ for dex_idx in range(2, len(dex_files) + 1): + archive_name = 'classes%d.dex' % dex_idx + for f in dex_files: + if f.endswith(archive_name): +diff --git a/src/3rdparty/chromium/build/android/gyp/dexsplitter.py b/src/3rdparty/chromium/build/android/gyp/dexsplitter.py +index 34aef0352..a51562f0d 100755 +--- a/src/3rdparty/chromium/build/android/gyp/dexsplitter.py ++++ b/src/3rdparty/chromium/build/android/gyp/dexsplitter.py +@@ -81,7 +81,7 @@ def main(args): + options = _ParseOptions(args) + + input_paths = [options.input_dex_zip] +- for feature_jars in options.features.itervalues(): ++ for feature_jars in options.features.values(): + for feature_jar in feature_jars: + input_paths.append(feature_jar) + +diff --git a/src/3rdparty/chromium/build/android/gyp/extract_unwind_tables.py b/src/3rdparty/chromium/build/android/gyp/extract_unwind_tables.py +index b20f74076..9b3acc049 100755 +--- a/src/3rdparty/chromium/build/android/gyp/extract_unwind_tables.py ++++ b/src/3rdparty/chromium/build/android/gyp/extract_unwind_tables.py +@@ -197,7 +197,7 @@ def _WriteCfiData(cfi_data, out_file): + # Store mapping between the functions to the index. + func_addr_to_index = {} + previous_func_end = 0 +- for addr, function in sorted(cfi_data.iteritems()): ++ for addr, function in sorted(cfi_data.items()): + # Add an empty function entry when functions CFIs are missing between 2 + # functions. + if previous_func_end != 0 and addr - previous_func_end > 4: +@@ -243,7 +243,7 @@ def _WriteCfiData(cfi_data, out_file): + _Write4Bytes(out_file, len(func_addr_to_index)) + + # Write the UNW_INDEX table. First list of addresses and then indices. +- sorted_unw_index = sorted(func_addr_to_index.iteritems()) ++ sorted_unw_index = sorted(func_addr_to_index.items()) + for addr, index in sorted_unw_index: + _Write4Bytes(out_file, addr) + for addr, index in sorted_unw_index: +diff --git a/src/3rdparty/chromium/build/android/gyp/extract_unwind_tables_tests.py b/src/3rdparty/chromium/build/android/gyp/extract_unwind_tables_tests.py +index 7f9d0de73..72698deaf 100755 +--- a/src/3rdparty/chromium/build/android/gyp/extract_unwind_tables_tests.py ++++ b/src/3rdparty/chromium/build/android/gyp/extract_unwind_tables_tests.py +@@ -109,7 +109,7 @@ STACK CFI 3b93218 .cfa: r7 16 + .ra: .cfa -4 + ^ + + func_start = index + 1 + func_end = func_start + unw_data[index] * 2 +- self.assertEquals( ++ self.assertEqual( + len(expected_cfi_data[func_addr]), func_end - func_start) + func_cfi = unw_data[func_start : func_end] + self.assertEqual(expected_cfi_data[func_addr], func_cfi) +diff --git a/src/3rdparty/chromium/build/android/gyp/find.py b/src/3rdparty/chromium/build/android/gyp/find.py +index a78bc83af..ccd08be73 100755 +--- a/src/3rdparty/chromium/build/android/gyp/find.py ++++ b/src/3rdparty/chromium/build/android/gyp/find.py +@@ -7,7 +7,7 @@ + """Finds files in directories. + """ + +-from __future__ import print_function ++ + + import fnmatch + import optparse +diff --git a/src/3rdparty/chromium/build/android/gyp/gcc_preprocess.py b/src/3rdparty/chromium/build/android/gyp/gcc_preprocess.py +index 8c5c404c7..31cc8fc9e 100755 +--- a/src/3rdparty/chromium/build/android/gyp/gcc_preprocess.py ++++ b/src/3rdparty/chromium/build/android/gyp/gcc_preprocess.py +@@ -15,7 +15,7 @@ def DoGcc(options): + + gcc_cmd = [ 'gcc' ] # invoke host gcc. + if options.defines: +- gcc_cmd.extend(sum(map(lambda w: ['-D', w], options.defines), [])) ++ gcc_cmd.extend(sum([['-D', w] for w in options.defines], [])) + + with build_utils.AtomicOutput(options.output) as f: + gcc_cmd.extend([ +diff --git a/src/3rdparty/chromium/build/android/gyp/jacoco_instr.py b/src/3rdparty/chromium/build/android/gyp/jacoco_instr.py +index cb3880c6c..c1e73f6b0 100755 +--- a/src/3rdparty/chromium/build/android/gyp/jacoco_instr.py ++++ b/src/3rdparty/chromium/build/android/gyp/jacoco_instr.py +@@ -13,7 +13,7 @@ jacococli.jar. + + """ + +-from __future__ import print_function ++ + + import argparse + import json +diff --git a/src/3rdparty/chromium/build/android/gyp/java_cpp_enum.py b/src/3rdparty/chromium/build/android/gyp/java_cpp_enum.py +index 0b9ee541e..e758f2142 100755 +--- a/src/3rdparty/chromium/build/android/gyp/java_cpp_enum.py ++++ b/src/3rdparty/chromium/build/android/gyp/java_cpp_enum.py +@@ -71,7 +71,7 @@ class EnumDefinition(object): + # Enums, if given no value, are given the value of the previous enum + 1. + if not all(self.entries.values()): + prev_enum_value = -1 +- for key, value in self.entries.items(): ++ for key, value in list(self.entries.items()): + if not value: + self.entries[key] = prev_enum_value + 1 + elif value in self.entries: +@@ -96,7 +96,7 @@ class EnumDefinition(object): + 'k' + self.original_enum_name] + + for prefix in prefixes: +- if all([w.startswith(prefix) for w in self.entries.keys()]): ++ if all([w.startswith(prefix) for w in list(self.entries.keys())]): + prefix_to_strip = prefix + break + else: +@@ -104,7 +104,7 @@ class EnumDefinition(object): + + def StripEntries(entries): + ret = collections.OrderedDict() +- for k, v in entries.items(): ++ for k, v in list(entries.items()): + stripped_key = k.replace(prefix_to_strip, '', 1) + if isinstance(v, str): + stripped_value = v.replace(prefix_to_strip, '') +@@ -126,7 +126,7 @@ def _TransformKeys(d, func): + """Normalize keys in |d| and update references to old keys in |d| values.""" + keys_map = {k: func(k) for k in d} + ret = collections.OrderedDict() +- for k, v in d.items(): ++ for k, v in list(d.items()): + # Need to transform values as well when the entry value was explicitly set + # (since it could contain references to other enum entry values). + if isinstance(v, str): +@@ -135,7 +135,7 @@ def _TransformKeys(d, func): + if v in d: + v = keys_map[v] + else: +- for old_key, new_key in keys_map.items(): ++ for old_key, new_key in list(keys_map.items()): + v = v.replace(old_key, new_key) + ret[keys_map[k]] = v + return ret +@@ -375,7 +375,7 @@ ${ENUM_ENTRIES} + enum_template = Template(' int ${NAME} = ${VALUE};') + enum_entries_string = [] + enum_names = [] +- for enum_name, enum_value in enum_definition.entries.items(): ++ for enum_name, enum_value in list(enum_definition.entries.items()): + values = { + 'NAME': enum_name, + 'VALUE': enum_value, +diff --git a/src/3rdparty/chromium/build/android/gyp/java_google_api_keys.py b/src/3rdparty/chromium/build/android/gyp/java_google_api_keys.py +index 349821a8f..53b9d85ef 100755 +--- a/src/3rdparty/chromium/build/android/gyp/java_google_api_keys.py ++++ b/src/3rdparty/chromium/build/android/gyp/java_google_api_keys.py +@@ -48,7 +48,7 @@ ${CONSTANT_ENTRIES} + constant_template = string.Template( + ' public static final String ${NAME} = "${VALUE}";') + constant_entries_list = [] +- for constant_name, constant_value in constant_definitions.iteritems(): ++ for constant_name, constant_value in constant_definitions.items(): + values = { + 'NAME': constant_name, + 'VALUE': constant_value, +diff --git a/src/3rdparty/chromium/build/android/gyp/jetify_jar.py b/src/3rdparty/chromium/build/android/gyp/jetify_jar.py +index 491efd4f9..99277b143 100755 +--- a/src/3rdparty/chromium/build/android/gyp/jetify_jar.py ++++ b/src/3rdparty/chromium/build/android/gyp/jetify_jar.py +@@ -4,7 +4,7 @@ + # Use of this source code is governed by a BSD-style license that can be + # found in the LICENSE file. + +-from __future__ import print_function ++ + + import argparse + import os +diff --git a/src/3rdparty/chromium/build/android/gyp/lint.py b/src/3rdparty/chromium/build/android/gyp/lint.py +index 1ec6171b8..ce90fee78 100755 +--- a/src/3rdparty/chromium/build/android/gyp/lint.py ++++ b/src/3rdparty/chromium/build/android/gyp/lint.py +@@ -5,7 +5,7 @@ + # found in the LICENSE file. + """Runs Android's lint tool.""" + +-from __future__ import print_function ++ + + import argparse + import functools +diff --git a/src/3rdparty/chromium/build/android/gyp/proto/Resources_pb2.py b/src/3rdparty/chromium/build/android/gyp/proto/Resources_pb2.py +index 79d12b8fe..3bbd7028b 100644 +--- a/src/3rdparty/chromium/build/android/gyp/proto/Resources_pb2.py ++++ b/src/3rdparty/chromium/build/android/gyp/proto/Resources_pb2.py +@@ -13,7 +13,7 @@ from google.protobuf import symbol_database as _symbol_database + _sym_db = _symbol_database.Default() + + +-import Configuration_pb2 as frameworks_dot_base_dot_tools_dot_aapt2_dot_Configuration__pb2 ++from . import Configuration_pb2 as frameworks_dot_base_dot_tools_dot_aapt2_dot_Configuration__pb2 + + + DESCRIPTOR = _descriptor.FileDescriptor( +diff --git a/src/3rdparty/chromium/build/android/gyp/util/build_utils.py b/src/3rdparty/chromium/build/android/gyp/util/build_utils.py +index 022980517..ce6644466 100644 +--- a/src/3rdparty/chromium/build/android/gyp/util/build_utils.py ++++ b/src/3rdparty/chromium/build/android/gyp/util/build_utils.py +@@ -39,7 +39,7 @@ RT_JAR_PATH = os.path.join(DIR_SOURCE_ROOT, 'third_party', 'jdk', 'extras', + 'java_8', 'jre', 'lib', 'rt.jar') + + try: +- string_types = basestring ++ string_types = str + except NameError: + string_types = (str, bytes) + +diff --git a/src/3rdparty/chromium/build/android/gyp/util/build_utils_test.py b/src/3rdparty/chromium/build/android/gyp/util/build_utils_test.py +index d462f0c67..8da06453f 100755 +--- a/src/3rdparty/chromium/build/android/gyp/util/build_utils_test.py ++++ b/src/3rdparty/chromium/build/android/gyp/util/build_utils_test.py +@@ -26,7 +26,7 @@ _DEPS['i'] = ['f'] + + class BuildUtilsTest(unittest.TestCase): + def testGetSortedTransitiveDependencies_all(self): +- TOP = _DEPS.keys() ++ TOP = list(_DEPS.keys()) + EXPECTED = ['a', 'b', 'c', 'd', 'f', 'e', 'g', 'h', 'i'] + actual = build_utils.GetSortedTransitiveDependencies(TOP, _DEPS.get) + self.assertEqual(EXPECTED, actual) +diff --git a/src/3rdparty/chromium/build/android/gyp/util/jar_info_utils.py b/src/3rdparty/chromium/build/android/gyp/util/jar_info_utils.py +index 355bcb090..16a3af6f4 100644 +--- a/src/3rdparty/chromium/build/android/gyp/util/jar_info_utils.py ++++ b/src/3rdparty/chromium/build/android/gyp/util/jar_info_utils.py +@@ -50,7 +50,7 @@ def WriteJarInfoFile(output_obj, info_data, source_file_map=None): + path of Java source files that where extracted from an .srcjar into a + temporary location. + """ +- for fully_qualified_name, path in sorted(info_data.iteritems()): ++ for fully_qualified_name, path in sorted(info_data.items()): + if source_file_map and path in source_file_map: + path = source_file_map[path] + assert not path.startswith('/tmp'), ( +diff --git a/src/3rdparty/chromium/build/android/gyp/util/manifest_utils.py b/src/3rdparty/chromium/build/android/gyp/util/manifest_utils.py +index a9e22f3d8..46763a7f9 100644 +--- a/src/3rdparty/chromium/build/android/gyp/util/manifest_utils.py ++++ b/src/3rdparty/chromium/build/android/gyp/util/manifest_utils.py +@@ -264,14 +264,14 @@ def NormalizeManifest(manifest_contents): + if app_node is not None: + for node in app_node.getchildren(): + if (node.tag in ['uses-static-library', 'static-library'] +- and '{%s}version' % ANDROID_NAMESPACE in node.keys() +- and '{%s}name' % ANDROID_NAMESPACE in node.keys()): ++ and '{%s}version' % ANDROID_NAMESPACE in list(node.keys()) ++ and '{%s}name' % ANDROID_NAMESPACE in list(node.keys())): + node.set('{%s}version' % ANDROID_NAMESPACE, '$VERSION_NUMBER') + + # We also remove the exact package name (except the one at the root level) + # to avoid noise during manifest comparison. + def blur_package_name(node): +- for key in node.keys(): ++ for key in list(node.keys()): + node.set(key, node.get(key).replace(package, '$PACKAGE')) + + for child in node.getchildren(): +diff --git a/src/3rdparty/chromium/build/android/gyp/util/md5_check.py b/src/3rdparty/chromium/build/android/gyp/util/md5_check.py +index 2830d25c9..d004adc48 100644 +--- a/src/3rdparty/chromium/build/android/gyp/util/md5_check.py ++++ b/src/3rdparty/chromium/build/android/gyp/util/md5_check.py +@@ -2,7 +2,7 @@ + # Use of this source code is governed by a BSD-style license that can be + # found in the LICENSE file. + +-from __future__ import print_function ++ + + import difflib + import hashlib +diff --git a/src/3rdparty/chromium/build/android/gyp/util/parallel.py b/src/3rdparty/chromium/build/android/gyp/util/parallel.py +index 082ad9722..c26875a71 100644 +--- a/src/3rdparty/chromium/build/android/gyp/util/parallel.py ++++ b/src/3rdparty/chromium/build/android/gyp/util/parallel.py +@@ -205,7 +205,7 @@ def BulkForkAndCall(func, arg_tuples, **kwargs): + pool = _MakeProcessPool(arg_tuples, **kwargs) + wrapped_func = _FuncWrapper(func) + try: +- for result in pool.imap(wrapped_func, xrange(len(arg_tuples))): ++ for result in pool.imap(wrapped_func, range(len(arg_tuples))): + _CheckForException(result) + yield result + finally: +diff --git a/src/3rdparty/chromium/build/android/gyp/util/resource_utils.py b/src/3rdparty/chromium/build/android/gyp/util/resource_utils.py +index 7b83981c3..9d7289e64 100644 +--- a/src/3rdparty/chromium/build/android/gyp/util/resource_utils.py ++++ b/src/3rdparty/chromium/build/android/gyp/util/resource_utils.py +@@ -305,7 +305,7 @@ class ResourceInfoFile(object): + """ + entries = self._ApplyRenames() + lines = [] +- for archive_path, source_path in entries.iteritems(): ++ for archive_path, source_path in entries.items(): + lines.append('{}\t{}\n'.format(archive_path, source_path)) + with open(info_file_path, 'w') as info_file: + info_file.writelines(sorted(lines)) +@@ -648,7 +648,7 @@ def _RenderRootRJavaSource(package, all_resources_by_type, rjava_build_options, + """Render an R.java source file. See _CreateRJaveSourceFile for args info.""" + final_resources_by_type = collections.defaultdict(list) + non_final_resources_by_type = collections.defaultdict(list) +- for res_type, resources in all_resources_by_type.iteritems(): ++ for res_type, resources in all_resources_by_type.items(): + for entry in resources: + # Entries in stylable that are not int[] are not actually resource ids + # but constants. +@@ -1021,13 +1021,13 @@ def GenerateAndroidResourceStringsXml(names_to_utf8_text, namespaces=None): + result = '\n' + result += ' 3) + http.request('HEAD', '/') +@@ -137,7 +137,7 @@ class LighttpdServer(object): + client_error = ('Bad response: %s %s version %s\n ' % + (r.status, r.reason, r.version) + + '\n '.join([': '.join(h) for h in r.getheaders()])) +- except (httplib.HTTPException, socket.error) as client_error: ++ except (http.client.HTTPException, socket.error) as client_error: + pass # Probably too quick connecting: try again + # Check for server startup error messages + # pylint: disable=no-member +@@ -248,7 +248,7 @@ def main(argv): + server = LighttpdServer(*argv[1:]) + try: + if server.StartupHttpServer(): +- raw_input('Server running at http://127.0.0.1:%s -' ++ input('Server running at http://127.0.0.1:%s -' + ' press Enter to exit it.' % server.port) + else: + print('Server exit code:', server.process.exitstatus) +diff --git a/src/3rdparty/chromium/build/android/list_class_verification_failures.py b/src/3rdparty/chromium/build/android/list_class_verification_failures.py +index ff0218045..971b72d70 100755 +--- a/src/3rdparty/chromium/build/android/list_class_verification_failures.py ++++ b/src/3rdparty/chromium/build/android/list_class_verification_failures.py +@@ -9,7 +9,7 @@ This is a wrapper around the device's oatdump executable, parsing desired output + and accommodating API-level-specific details, such as file paths. + """ + +-from __future__ import print_function ++ + + import argparse + import exceptions +diff --git a/src/3rdparty/chromium/build/android/method_count.py b/src/3rdparty/chromium/build/android/method_count.py +index 7b0b29285..456c2108d 100755 +--- a/src/3rdparty/chromium/build/android/method_count.py ++++ b/src/3rdparty/chromium/build/android/method_count.py +@@ -27,10 +27,10 @@ _CONTRIBUTORS_TO_DEX_CACHE = { + + def _ExtractSizesFromDexFile(dexfile): + count_by_item = {} +- for item_name, readable_name in _CONTRIBUTORS_TO_DEX_CACHE.iteritems(): ++ for item_name, readable_name in _CONTRIBUTORS_TO_DEX_CACHE.items(): + count_by_item[readable_name] = getattr(dexfile.header, item_name) + return count_by_item, sum( +- count_by_item[x] for x in _CONTRIBUTORS_TO_DEX_CACHE.itervalues()) * 4 ++ count_by_item[x] for x in _CONTRIBUTORS_TO_DEX_CACHE.values()) * 4 + + + def ExtractSizesFromZip(path): +@@ -44,11 +44,11 @@ def ExtractSizesFromZip(path): + dexfile_name = os.path.basename(subpath) + dexfiles[dexfile_name] = dex_parser.DexFile(bytearray(z.read(subpath))) + +- for dexfile_name, dexfile in dexfiles.iteritems(): ++ for dexfile_name, dexfile in dexfiles.items(): + cur_dex_counts, cur_dexcache_size = _ExtractSizesFromDexFile(dexfile) + dex_counts_by_file[dexfile_name] = cur_dex_counts + dexcache_size += cur_dexcache_size +- num_unique_methods = dex_parser.CountUniqueDexMethods(dexfiles.values()) ++ num_unique_methods = dex_parser.CountUniqueDexMethods(list(dexfiles.values())) + return dex_counts_by_file, dexcache_size, num_unique_methods + + +@@ -68,8 +68,8 @@ def main(): + num_unique_methods = single_set_of_sizes['methods'] + + file_basename = os.path.basename(args.filename) +- for classes_dex_file, classes_dex_sizes in sizes.iteritems(): +- for readable_name in _CONTRIBUTORS_TO_DEX_CACHE.itervalues(): ++ for classes_dex_file, classes_dex_sizes in sizes.items(): ++ for readable_name in _CONTRIBUTORS_TO_DEX_CACHE.values(): + if readable_name in classes_dex_sizes: + perf_tests_results_helper.PrintPerfResult( + '%s_%s_%s' % (file_basename, classes_dex_file, readable_name), +diff --git a/src/3rdparty/chromium/build/android/pylib/base/base_test_result.py b/src/3rdparty/chromium/build/android/pylib/base/base_test_result.py +index bb4f6fab9..139e7a168 100644 +--- a/src/3rdparty/chromium/build/android/pylib/base/base_test_result.py ++++ b/src/3rdparty/chromium/build/android/pylib/base/base_test_result.py +@@ -139,7 +139,7 @@ class TestRunResults(object): + log = t.GetLog() + if log: + s.append('[%s] %s:' % (test_type, t)) +- s.append(unicode(log, 'utf-8')) ++ s.append(str(log, 'utf-8')) + return '\n'.join(s) + + def GetGtestForm(self): +diff --git a/src/3rdparty/chromium/build/android/pylib/constants/host_paths_unittest.py b/src/3rdparty/chromium/build/android/pylib/constants/host_paths_unittest.py +index 658ed08bd..a84ac0a41 100755 +--- a/src/3rdparty/chromium/build/android/pylib/constants/host_paths_unittest.py ++++ b/src/3rdparty/chromium/build/android/pylib/constants/host_paths_unittest.py +@@ -40,7 +40,7 @@ class HostPathsTest(unittest.TestCase): + self.assertEqual(host_paths.GetAaptPath(), _EXPECTED_AAPT_PATH) + + def test_ToolPath(self): +- for cpu_arch, binprefix in _EXPECTED_NDK_TOOL_SUBDIR_MAP.iteritems(): ++ for cpu_arch, binprefix in _EXPECTED_NDK_TOOL_SUBDIR_MAP.items(): + expected_binprefix = os.path.join(constants.ANDROID_NDK_ROOT, binprefix) + expected_path = expected_binprefix + 'foo' + self.assertEqual(host_paths.ToolPath('foo', cpu_arch), expected_path) +diff --git a/src/3rdparty/chromium/build/android/pylib/content_settings.py b/src/3rdparty/chromium/build/android/pylib/content_settings.py +index 3bf11bc49..5ea7c525e 100644 +--- a/src/3rdparty/chromium/build/android/pylib/content_settings.py ++++ b/src/3rdparty/chromium/build/android/pylib/content_settings.py +@@ -23,7 +23,7 @@ class ContentSettings(dict): + return 'f' + if isinstance(value, int): + return 'i' +- if isinstance(value, long): ++ if isinstance(value, int): + return 'l' + if isinstance(value, str): + return 's' +diff --git a/src/3rdparty/chromium/build/android/pylib/device_settings.py b/src/3rdparty/chromium/build/android/pylib/device_settings.py +index ab4ad1b90..a9830af0b 100644 +--- a/src/3rdparty/chromium/build/android/pylib/device_settings.py ++++ b/src/3rdparty/chromium/build/android/pylib/device_settings.py +@@ -33,7 +33,7 @@ def ConfigureContentSettings(device, desired_settings): + for key, value in key_value: + settings[key] = value + logging.info('\n%s %s', table, (80 - len(table)) * '-') +- for key, value in sorted(settings.iteritems()): ++ for key, value in sorted(settings.items()): + logging.info('\t%s: %s', key, value) + + +diff --git a/src/3rdparty/chromium/build/android/pylib/dex/dex_parser.py b/src/3rdparty/chromium/build/android/pylib/dex/dex_parser.py +index 5ddd6e270..e9d4d5f9e 100755 +--- a/src/3rdparty/chromium/build/android/pylib/dex/dex_parser.py ++++ b/src/3rdparty/chromium/build/android/pylib/dex/dex_parser.py +@@ -10,7 +10,7 @@ A DexFile class that exposes access to several memory items in the dex format + is provided, but it does not include error handling or validation. + """ + +-from __future__ import print_function ++ + + import argparse + import collections +@@ -91,7 +91,7 @@ class _MemoryItemList(object): + self.offset = offset + self.size = size + reader.Seek(first_item_offset or offset) +- self._items = [factory(reader) for _ in xrange(size)] ++ self._items = [factory(reader) for _ in range(size)] + + if alignment: + reader.AlignUpTo(alignment) +@@ -141,7 +141,7 @@ class _StringItemList(_MemoryItemList): + + def __init__(self, reader, offset, size): + reader.Seek(offset) +- string_item_offsets = iter([reader.ReadUInt() for _ in xrange(size)]) ++ string_item_offsets = iter([reader.ReadUInt() for _ in range(size)]) + + def factory(x): + data_offset = next(string_item_offsets) +@@ -182,7 +182,7 @@ class _ClassDefItemList(_MemoryItemList): + + def factory(x): + return _ClassDefItem(*(x.ReadUInt() +- for _ in xrange(len(_ClassDefItem._fields)))) ++ for _ in range(len(_ClassDefItem._fields)))) + + super(_ClassDefItemList, self).__init__(reader, offset, size, factory) + +@@ -209,7 +209,7 @@ class _DexMapList(object): + self._map = {} + reader.Seek(offset) + self._size = reader.ReadUInt() +- for _ in xrange(self._size): ++ for _ in range(self._size): + item = _DexMapItem(reader) + self._map[item.type] = item + +@@ -299,7 +299,7 @@ class _DexReader(object): + self.Seek(offset) + ret = '' + +- for _ in xrange(string_length): ++ for _ in range(string_length): + a = self.ReadUByte() + if a == 0: + raise _MUTf8DecodeError('Early string termination encountered', +@@ -320,7 +320,7 @@ class _DexReader(object): + else: + raise _MUTf8DecodeError('Bad byte', string_length, offset) + +- ret += unichr(code) ++ ret += chr(code) + + if self.ReadUByte() != 0x00: + raise _MUTf8DecodeError('Expected string termination', string_length, +@@ -419,7 +419,7 @@ class DexFile(object): + def ResolveClassAccessFlags(access_flags): + return tuple( + flag_string +- for flag, flag_string in DexFile._CLASS_ACCESS_FLAGS.iteritems() ++ for flag, flag_string in DexFile._CLASS_ACCESS_FLAGS.items() + if flag & access_flags) + + def __repr__(self): +diff --git a/src/3rdparty/chromium/build/android/pylib/gtest/gtest_test_instance.py b/src/3rdparty/chromium/build/android/pylib/gtest/gtest_test_instance.py +index 6fe18bec0..ffa530b0b 100644 +--- a/src/3rdparty/chromium/build/android/pylib/gtest/gtest_test_instance.py ++++ b/src/3rdparty/chromium/build/android/pylib/gtest/gtest_test_instance.py +@@ -2,7 +2,7 @@ + # Use of this source code is governed by a BSD-style license that can be + # found in the LICENSE file. + +-import HTMLParser ++import html.parser + import json + import logging + import os +@@ -233,7 +233,7 @@ def ParseGTestXML(xml_content): + if not xml_content: + return results + +- html = HTMLParser.HTMLParser() ++ html = html.parser.HTMLParser() + + testsuites = xml.etree.ElementTree.fromstring(xml_content) + for testsuite in testsuites: +@@ -263,7 +263,7 @@ def ParseGTestJSON(json_content): + + json_data = json.loads(json_content) + +- openstack = json_data['tests'].items() ++ openstack = list(json_data['tests'].items()) + + while openstack: + name, value = openstack.pop() +@@ -273,7 +273,7 @@ def ParseGTestJSON(json_content): + 'actual'] == 'PASS' else base_test_result.ResultType.FAIL + results.append(base_test_result.BaseTestResult(name, result_type)) + else: +- openstack += [("%s.%s" % (name, k), v) for k, v in value.iteritems()] ++ openstack += [("%s.%s" % (name, k), v) for k, v in value.items()] + + return results + +diff --git a/src/3rdparty/chromium/build/android/pylib/gtest/gtest_test_instance_test.py b/src/3rdparty/chromium/build/android/pylib/gtest/gtest_test_instance_test.py +index 2a8c9b98e..cddfd4c59 100755 +--- a/src/3rdparty/chromium/build/android/pylib/gtest/gtest_test_instance_test.py ++++ b/src/3rdparty/chromium/build/android/pylib/gtest/gtest_test_instance_test.py +@@ -99,10 +99,10 @@ class GtestTestInstanceTests(unittest.TestCase): + '[ OK ] FooTest.Bar (1 ms)', + ] + actual = gtest_test_instance.ParseGTestOutput(raw_output, None, None) +- self.assertEquals(1, len(actual)) +- self.assertEquals('FooTest.Bar', actual[0].GetName()) +- self.assertEquals(1, actual[0].GetDuration()) +- self.assertEquals(base_test_result.ResultType.PASS, actual[0].GetType()) ++ self.assertEqual(1, len(actual)) ++ self.assertEqual('FooTest.Bar', actual[0].GetName()) ++ self.assertEqual(1, actual[0].GetDuration()) ++ self.assertEqual(base_test_result.ResultType.PASS, actual[0].GetType()) + + def testParseGTestOutput_fail(self): + raw_output = [ +@@ -110,10 +110,10 @@ class GtestTestInstanceTests(unittest.TestCase): + '[ FAILED ] FooTest.Bar (1 ms)', + ] + actual = gtest_test_instance.ParseGTestOutput(raw_output, None, None) +- self.assertEquals(1, len(actual)) +- self.assertEquals('FooTest.Bar', actual[0].GetName()) +- self.assertEquals(1, actual[0].GetDuration()) +- self.assertEquals(base_test_result.ResultType.FAIL, actual[0].GetType()) ++ self.assertEqual(1, len(actual)) ++ self.assertEqual('FooTest.Bar', actual[0].GetName()) ++ self.assertEqual(1, actual[0].GetDuration()) ++ self.assertEqual(base_test_result.ResultType.FAIL, actual[0].GetType()) + + def testParseGTestOutput_crash(self): + raw_output = [ +@@ -121,10 +121,10 @@ class GtestTestInstanceTests(unittest.TestCase): + '[ CRASHED ] FooTest.Bar (1 ms)', + ] + actual = gtest_test_instance.ParseGTestOutput(raw_output, None, None) +- self.assertEquals(1, len(actual)) +- self.assertEquals('FooTest.Bar', actual[0].GetName()) +- self.assertEquals(1, actual[0].GetDuration()) +- self.assertEquals(base_test_result.ResultType.CRASH, actual[0].GetType()) ++ self.assertEqual(1, len(actual)) ++ self.assertEqual('FooTest.Bar', actual[0].GetName()) ++ self.assertEqual(1, actual[0].GetDuration()) ++ self.assertEqual(base_test_result.ResultType.CRASH, actual[0].GetType()) + + def testParseGTestOutput_errorCrash(self): + raw_output = [ +@@ -132,20 +132,20 @@ class GtestTestInstanceTests(unittest.TestCase): + '[ERROR:blah] Currently running: FooTest.Bar', + ] + actual = gtest_test_instance.ParseGTestOutput(raw_output, None, None) +- self.assertEquals(1, len(actual)) +- self.assertEquals('FooTest.Bar', actual[0].GetName()) +- self.assertEquals(0, actual[0].GetDuration()) +- self.assertEquals(base_test_result.ResultType.CRASH, actual[0].GetType()) ++ self.assertEqual(1, len(actual)) ++ self.assertEqual('FooTest.Bar', actual[0].GetName()) ++ self.assertEqual(0, actual[0].GetDuration()) ++ self.assertEqual(base_test_result.ResultType.CRASH, actual[0].GetType()) + + def testParseGTestOutput_unknown(self): + raw_output = [ + '[ RUN ] FooTest.Bar', + ] + actual = gtest_test_instance.ParseGTestOutput(raw_output, None, None) +- self.assertEquals(1, len(actual)) +- self.assertEquals('FooTest.Bar', actual[0].GetName()) +- self.assertEquals(0, actual[0].GetDuration()) +- self.assertEquals(base_test_result.ResultType.UNKNOWN, actual[0].GetType()) ++ self.assertEqual(1, len(actual)) ++ self.assertEqual('FooTest.Bar', actual[0].GetName()) ++ self.assertEqual(0, actual[0].GetDuration()) ++ self.assertEqual(base_test_result.ResultType.UNKNOWN, actual[0].GetType()) + + def testParseGTestOutput_nonterminalUnknown(self): + raw_output = [ +@@ -154,15 +154,15 @@ class GtestTestInstanceTests(unittest.TestCase): + '[ OK ] FooTest.Baz (1 ms)', + ] + actual = gtest_test_instance.ParseGTestOutput(raw_output, None, None) +- self.assertEquals(2, len(actual)) ++ self.assertEqual(2, len(actual)) + +- self.assertEquals('FooTest.Bar', actual[0].GetName()) +- self.assertEquals(0, actual[0].GetDuration()) +- self.assertEquals(base_test_result.ResultType.UNKNOWN, actual[0].GetType()) ++ self.assertEqual('FooTest.Bar', actual[0].GetName()) ++ self.assertEqual(0, actual[0].GetDuration()) ++ self.assertEqual(base_test_result.ResultType.UNKNOWN, actual[0].GetType()) + +- self.assertEquals('FooTest.Baz', actual[1].GetName()) +- self.assertEquals(1, actual[1].GetDuration()) +- self.assertEquals(base_test_result.ResultType.PASS, actual[1].GetType()) ++ self.assertEqual('FooTest.Baz', actual[1].GetName()) ++ self.assertEqual(1, actual[1].GetDuration()) ++ self.assertEqual(base_test_result.ResultType.PASS, actual[1].GetType()) + + def testParseGTestOutput_deathTestCrashOk(self): + raw_output = [ +@@ -171,11 +171,11 @@ class GtestTestInstanceTests(unittest.TestCase): + '[ OK ] FooTest.Bar (1 ms)', + ] + actual = gtest_test_instance.ParseGTestOutput(raw_output, None, None) +- self.assertEquals(1, len(actual)) ++ self.assertEqual(1, len(actual)) + +- self.assertEquals('FooTest.Bar', actual[0].GetName()) +- self.assertEquals(1, actual[0].GetDuration()) +- self.assertEquals(base_test_result.ResultType.PASS, actual[0].GetType()) ++ self.assertEqual('FooTest.Bar', actual[0].GetName()) ++ self.assertEqual(1, actual[0].GetDuration()) ++ self.assertEqual(base_test_result.ResultType.PASS, actual[0].GetType()) + + def testParseGTestOutput_typeParameterized(self): + raw_output = [ +@@ -183,10 +183,10 @@ class GtestTestInstanceTests(unittest.TestCase): + '[ FAILED ] Baz/FooTest.Bar/0, where TypeParam = (1 ms)', + ] + actual = gtest_test_instance.ParseGTestOutput(raw_output, None, None) +- self.assertEquals(1, len(actual)) +- self.assertEquals('Baz/FooTest.Bar/0', actual[0].GetName()) +- self.assertEquals(1, actual[0].GetDuration()) +- self.assertEquals(base_test_result.ResultType.FAIL, actual[0].GetType()) ++ self.assertEqual(1, len(actual)) ++ self.assertEqual('Baz/FooTest.Bar/0', actual[0].GetName()) ++ self.assertEqual(1, actual[0].GetDuration()) ++ self.assertEqual(base_test_result.ResultType.FAIL, actual[0].GetType()) + + def testParseGTestOutput_valueParameterized(self): + raw_output = [ +@@ -195,10 +195,10 @@ class GtestTestInstanceTests(unittest.TestCase): + ' where GetParam() = 4-byte object <00-00 00-00> (1 ms)', + ] + actual = gtest_test_instance.ParseGTestOutput(raw_output, None, None) +- self.assertEquals(1, len(actual)) +- self.assertEquals('Baz/FooTest.Bar/0', actual[0].GetName()) +- self.assertEquals(1, actual[0].GetDuration()) +- self.assertEquals(base_test_result.ResultType.FAIL, actual[0].GetType()) ++ self.assertEqual(1, len(actual)) ++ self.assertEqual('Baz/FooTest.Bar/0', actual[0].GetName()) ++ self.assertEqual(1, actual[0].GetDuration()) ++ self.assertEqual(base_test_result.ResultType.FAIL, actual[0].GetType()) + + def testParseGTestOutput_typeAndValueParameterized(self): + raw_output = [ +@@ -207,18 +207,18 @@ class GtestTestInstanceTests(unittest.TestCase): + ' where TypeParam = and GetParam() = (1 ms)', + ] + actual = gtest_test_instance.ParseGTestOutput(raw_output, None, None) +- self.assertEquals(1, len(actual)) +- self.assertEquals('Baz/FooTest.Bar/0', actual[0].GetName()) +- self.assertEquals(1, actual[0].GetDuration()) +- self.assertEquals(base_test_result.ResultType.FAIL, actual[0].GetType()) ++ self.assertEqual(1, len(actual)) ++ self.assertEqual('Baz/FooTest.Bar/0', actual[0].GetName()) ++ self.assertEqual(1, actual[0].GetDuration()) ++ self.assertEqual(base_test_result.ResultType.FAIL, actual[0].GetType()) + + def testParseGTestXML_none(self): + actual = gtest_test_instance.ParseGTestXML(None) +- self.assertEquals([], actual) ++ self.assertEqual([], actual) + + def testParseGTestJSON_none(self): + actual = gtest_test_instance.ParseGTestJSON(None) +- self.assertEquals([], actual) ++ self.assertEqual([], actual) + + def testParseGTestJSON_example(self): + raw_json = """ +@@ -253,10 +253,10 @@ class GtestTestInstanceTests(unittest.TestCase): + } + }""" + actual = gtest_test_instance.ParseGTestJSON(raw_json) +- self.assertEquals(1, len(actual)) +- self.assertEquals('mojom_tests.parse.ast_unittest.ASTTest.testNodeBase', ++ self.assertEqual(1, len(actual)) ++ self.assertEqual('mojom_tests.parse.ast_unittest.ASTTest.testNodeBase', + actual[0].GetName()) +- self.assertEquals(base_test_result.ResultType.PASS, actual[0].GetType()) ++ self.assertEqual(base_test_result.ResultType.PASS, actual[0].GetType()) + + def testTestNameWithoutDisabledPrefix_disabled(self): + test_name_list = [ +@@ -268,7 +268,7 @@ class GtestTestInstanceTests(unittest.TestCase): + actual = gtest_test_instance \ + .TestNameWithoutDisabledPrefix(test_name) + expected = 'A.B' +- self.assertEquals(expected, actual) ++ self.assertEqual(expected, actual) + + def testTestNameWithoutDisabledPrefix_flaky(self): + test_name_list = [ +@@ -280,14 +280,14 @@ class GtestTestInstanceTests(unittest.TestCase): + actual = gtest_test_instance \ + .TestNameWithoutDisabledPrefix(test_name) + expected = 'A.B' +- self.assertEquals(expected, actual) ++ self.assertEqual(expected, actual) + + def testTestNameWithoutDisabledPrefix_notDisabledOrFlaky(self): + test_name = 'A.B' + actual = gtest_test_instance \ + .TestNameWithoutDisabledPrefix(test_name) + expected = 'A.B' +- self.assertEquals(expected, actual) ++ self.assertEqual(expected, actual) + + + if __name__ == '__main__': +diff --git a/src/3rdparty/chromium/build/android/pylib/instrumentation/instrumentation_test_instance.py b/src/3rdparty/chromium/build/android/pylib/instrumentation/instrumentation_test_instance.py +index ae70fbfbd..eec4d58ba 100644 +--- a/src/3rdparty/chromium/build/android/pylib/instrumentation/instrumentation_test_instance.py ++++ b/src/3rdparty/chromium/build/android/pylib/instrumentation/instrumentation_test_instance.py +@@ -199,7 +199,7 @@ def GenerateTestResults(result_code, result_bundle, statuses, duration_ms, + if current_result.GetType() == base_test_result.ResultType.UNKNOWN: + crashed = (result_code == _ACTIVITY_RESULT_CANCELED + and any(_NATIVE_CRASH_RE.search(l) +- for l in result_bundle.itervalues())) ++ for l in result_bundle.values())) + if crashed: + current_result.SetType(base_test_result.ResultType.CRASH) + +@@ -402,8 +402,8 @@ def _GetTestsFromDexdump(test_apk): + } for m in methods if m.startswith('test')] + + for dump in dex_dumps: +- for package_name, package_info in dump.iteritems(): +- for class_name, class_info in package_info['classes'].iteritems(): ++ for package_name, package_info in dump.items(): ++ for class_name, class_info in package_info['classes'].items(): + if class_name.endswith('Test'): + tests.append({ + 'class': '%s.%s' % (package_name, class_name), +@@ -664,7 +664,7 @@ class InstrumentationTestInstance(test_instance.TestInstance): + self._package_info = None + if self._apk_under_test: + package_under_test = self._apk_under_test.GetPackageName() +- for package_info in constants.PACKAGE_INFO.itervalues(): ++ for package_info in constants.PACKAGE_INFO.values(): + if package_under_test == package_info.package: + self._package_info = package_info + break +@@ -1021,7 +1021,7 @@ class InstrumentationTestInstance(test_instance.TestInstance): + elif clazz == _PARAMETERIZED_COMMAND_LINE_FLAGS: + list_of_switches = [] + for annotation in methods['value']: +- for clazz, methods in annotation.iteritems(): ++ for clazz, methods in annotation.items(): + list_of_switches += _annotationToSwitches(clazz, methods) + return list_of_switches + else: +@@ -1039,7 +1039,7 @@ class InstrumentationTestInstance(test_instance.TestInstance): + list_of_switches = [] + _checkParameterization(annotations) + if _SKIP_PARAMETERIZATION not in annotations: +- for clazz, methods in annotations.iteritems(): ++ for clazz, methods in annotations.items(): + list_of_switches += _annotationToSwitches(clazz, methods) + if list_of_switches: + _setTestFlags(t, _switchesToFlags(list_of_switches[0])) +diff --git a/src/3rdparty/chromium/build/android/pylib/instrumentation/instrumentation_test_instance_test.py b/src/3rdparty/chromium/build/android/pylib/instrumentation/instrumentation_test_instance_test.py +index fdb4114a6..db2b4760c 100755 +--- a/src/3rdparty/chromium/build/android/pylib/instrumentation/instrumentation_test_instance_test.py ++++ b/src/3rdparty/chromium/build/android/pylib/instrumentation/instrumentation_test_instance_test.py +@@ -60,7 +60,7 @@ class InstrumentationTestInstanceTest(unittest.TestCase): + o = self.createTestInstance() + args = self.createFlagAttributesArgs(command_line_flags=['--foo', '--bar']) + o._initializeFlagAttributes(args) +- self.assertEquals(o._flags, ['--enable-test-intents', '--foo', '--bar']) ++ self.assertEqual(o._flags, ['--enable-test-intents', '--foo', '--bar']) + + def test_initializeFlagAttributes_deviceFlagsFile(self): + o = self.createTestInstance() +@@ -70,26 +70,26 @@ class InstrumentationTestInstanceTest(unittest.TestCase): + + args = self.createFlagAttributesArgs(device_flags_file=flags_file.name) + o._initializeFlagAttributes(args) +- self.assertEquals(o._flags, ['--enable-test-intents', '--foo', '--bar']) ++ self.assertEqual(o._flags, ['--enable-test-intents', '--foo', '--bar']) + + def test_initializeFlagAttributes_strictModeOn(self): + o = self.createTestInstance() + args = self.createFlagAttributesArgs(strict_mode='on') + o._initializeFlagAttributes(args) +- self.assertEquals(o._flags, ['--enable-test-intents', '--strict-mode=on']) ++ self.assertEqual(o._flags, ['--enable-test-intents', '--strict-mode=on']) + + def test_initializeFlagAttributes_strictModeOn_coverageOn(self): + o = self.createTestInstance() + args = self.createFlagAttributesArgs( + strict_mode='on', coverage_dir='/coverage/dir') + o._initializeFlagAttributes(args) +- self.assertEquals(o._flags, ['--enable-test-intents']) ++ self.assertEqual(o._flags, ['--enable-test-intents']) + + def test_initializeFlagAttributes_strictModeOff(self): + o = self.createTestInstance() + args = self.createFlagAttributesArgs(strict_mode='off') + o._initializeFlagAttributes(args) +- self.assertEquals(o._flags, ['--enable-test-intents']) ++ self.assertEqual(o._flags, ['--enable-test-intents']) + + def testGetTests_noFilter(self): + o = self.createTestInstance() +@@ -156,7 +156,7 @@ class InstrumentationTestInstanceTest(unittest.TestCase): + o._junit4_runner_class = 'J4Runner' + actual_tests = o.ProcessRawTests(raw_tests) + +- self.assertEquals(actual_tests, expected_tests) ++ self.assertEqual(actual_tests, expected_tests) + + def testGetTests_simpleGtestFilter(self): + o = self.createTestInstance() +@@ -195,7 +195,7 @@ class InstrumentationTestInstanceTest(unittest.TestCase): + o._junit4_runner_class = 'J4Runner' + actual_tests = o.ProcessRawTests(raw_tests) + +- self.assertEquals(actual_tests, expected_tests) ++ self.assertEqual(actual_tests, expected_tests) + + def testGetTests_simpleGtestUnqualifiedNameFilter(self): + o = self.createTestInstance() +@@ -234,7 +234,7 @@ class InstrumentationTestInstanceTest(unittest.TestCase): + o._junit4_runner_class = 'J4Runner' + actual_tests = o.ProcessRawTests(raw_tests) + +- self.assertEquals(actual_tests, expected_tests) ++ self.assertEqual(actual_tests, expected_tests) + + def testGetTests_parameterizedTestGtestFilter(self): + o = self.createTestInstance() +@@ -293,7 +293,7 @@ class InstrumentationTestInstanceTest(unittest.TestCase): + o._test_filter = 'org.chromium.test.SampleTest.testMethod1' + actual_tests = o.ProcessRawTests(raw_tests) + +- self.assertEquals(actual_tests, expected_tests) ++ self.assertEqual(actual_tests, expected_tests) + + def testGetTests_wildcardGtestFilter(self): + o = self.createTestInstance() +@@ -343,7 +343,7 @@ class InstrumentationTestInstanceTest(unittest.TestCase): + o._junit4_runner_class = 'J4Runner' + actual_tests = o.ProcessRawTests(raw_tests) + +- self.assertEquals(actual_tests, expected_tests) ++ self.assertEqual(actual_tests, expected_tests) + + def testGetTests_negativeGtestFilter(self): + o = self.createTestInstance() +@@ -402,7 +402,7 @@ class InstrumentationTestInstanceTest(unittest.TestCase): + o._junit4_runner_class = 'J4Runner' + actual_tests = o.ProcessRawTests(raw_tests) + +- self.assertEquals(actual_tests, expected_tests) ++ self.assertEqual(actual_tests, expected_tests) + + def testGetTests_annotationFilter(self): + o = self.createTestInstance() +@@ -461,7 +461,7 @@ class InstrumentationTestInstanceTest(unittest.TestCase): + o._junit4_runner_class = 'J4Runner' + actual_tests = o.ProcessRawTests(raw_tests) + +- self.assertEquals(actual_tests, expected_tests) ++ self.assertEqual(actual_tests, expected_tests) + + def testGetTests_excludedAnnotationFilter(self): + o = self.createTestInstance() +@@ -513,7 +513,7 @@ class InstrumentationTestInstanceTest(unittest.TestCase): + o._junit4_runner_class = 'J4Runner' + actual_tests = o.ProcessRawTests(raw_tests) + +- self.assertEquals(actual_tests, expected_tests) ++ self.assertEqual(actual_tests, expected_tests) + + def testGetTests_annotationSimpleValueFilter(self): + o = self.createTestInstance() +@@ -575,7 +575,7 @@ class InstrumentationTestInstanceTest(unittest.TestCase): + o._junit4_runner_class = 'J4Runner' + actual_tests = o.ProcessRawTests(raw_tests) + +- self.assertEquals(actual_tests, expected_tests) ++ self.assertEqual(actual_tests, expected_tests) + + def testGetTests_annotationDictValueFilter(self): + o = self.createTestInstance() +@@ -625,7 +625,7 @@ class InstrumentationTestInstanceTest(unittest.TestCase): + o._junit4_runner_class = 'J4Runner' + actual_tests = o.ProcessRawTests(raw_tests) + +- self.assertEquals(actual_tests, expected_tests) ++ self.assertEqual(actual_tests, expected_tests) + + def testGetTestName(self): + test = { +@@ -643,10 +643,10 @@ class InstrumentationTestInstanceTest(unittest.TestCase): + 'method': test['method'] + } + +- self.assertEquals( ++ self.assertEqual( + instrumentation_test_instance.GetTestName(test, sep='.'), + 'org.chromium.TestA.testSimple') +- self.assertEquals( ++ self.assertEqual( + instrumentation_test_instance.GetTestName( + unqualified_class_test, sep='.'), + 'TestA.testSimple') +@@ -662,7 +662,7 @@ class InstrumentationTestInstanceTest(unittest.TestCase): + 'flags': ['enable_features=abc'], + 'is_junit4': True, + 'method': 'testSimple'} +- self.assertEquals( ++ self.assertEqual( + instrumentation_test_instance.GetUniqueTestName( + test, sep='.'), + 'org.chromium.TestA.testSimple_with_enable_features=abc') +@@ -682,11 +682,11 @@ class InstrumentationTestInstanceTest(unittest.TestCase): + 'class': test['class'].split('.')[-1], + 'method': test['method'] + } +- self.assertEquals( ++ self.assertEqual( + instrumentation_test_instance.GetTestNameWithoutParameterPostfix( + test, sep='.'), + 'org.chromium.TestA') +- self.assertEquals( ++ self.assertEqual( + instrumentation_test_instance.GetTestNameWithoutParameterPostfix( + unqualified_class_test, sep='.'), + 'TestA') +@@ -755,7 +755,7 @@ class InstrumentationTestInstanceTest(unittest.TestCase): + o._junit4_runner_class = 'J4Runner' + actual_tests = o.ProcessRawTests(raw_tests) + +- self.assertEquals(actual_tests, expected_tests) ++ self.assertEqual(actual_tests, expected_tests) + + def testGenerateTestResults_noStatus(self): + results = instrumentation_test_instance.GenerateTestResults( +@@ -952,7 +952,7 @@ class InstrumentationTestInstanceTest(unittest.TestCase): + o._test_jar = 'path/to/test.jar' + o._junit4_runner_class = 'J4Runner' + actual_tests = o.ProcessRawTests(raw_tests) +- self.assertEquals(actual_tests, expected_tests) ++ self.assertEqual(actual_tests, expected_tests) + + def testParameterizedCommandLineFlags(self): + o = self.createTestInstance() +@@ -1075,7 +1075,7 @@ class InstrumentationTestInstanceTest(unittest.TestCase): + o._test_jar = 'path/to/test.jar' + o._junit4_runner_class = 'J4Runner' + actual_tests = o.ProcessRawTests(raw_tests) +- self.assertEquals(actual_tests, expected_tests) ++ self.assertEqual(actual_tests, expected_tests) + + def testDifferentCommandLineParameterizations(self): + o = self.createTestInstance() +@@ -1136,7 +1136,7 @@ class InstrumentationTestInstanceTest(unittest.TestCase): + o._test_jar = 'path/to/test.jar' + o._junit4_runner_class = 'J4Runner' + actual_tests = o.ProcessRawTests(raw_tests) +- self.assertEquals(actual_tests, expected_tests) ++ self.assertEqual(actual_tests, expected_tests) + + def testMultipleCommandLineParameterizations_raises(self): + o = self.createTestInstance() +diff --git a/src/3rdparty/chromium/build/android/pylib/local/device/local_device_gtest_run.py b/src/3rdparty/chromium/build/android/pylib/local/device/local_device_gtest_run.py +index 4f1bd1e96..e9f3623e3 100644 +--- a/src/3rdparty/chromium/build/android/pylib/local/device/local_device_gtest_run.py ++++ b/src/3rdparty/chromium/build/android/pylib/local/device/local_device_gtest_run.py +@@ -488,11 +488,11 @@ class LocalDeviceGtestRun(local_device_test_run.LocalDeviceTestRun): + + batch_size = self._test_instance.test_launcher_batch_limit + +- for i in xrange(0, device_count): ++ for i in range(0, device_count): + unbounded_shard = tests[i::device_count] + shards += [ + unbounded_shard[j:j + batch_size] +- for j in xrange(0, len(unbounded_shard), batch_size) ++ for j in range(0, len(unbounded_shard), batch_size) + ] + return shards + +diff --git a/src/3rdparty/chromium/build/android/pylib/local/device/local_device_instrumentation_test_run.py b/src/3rdparty/chromium/build/android/pylib/local/device/local_device_instrumentation_test_run.py +index 191fecab7..14c8c2f29 100644 +--- a/src/3rdparty/chromium/build/android/pylib/local/device/local_device_instrumentation_test_run.py ++++ b/src/3rdparty/chromium/build/android/pylib/local/device/local_device_instrumentation_test_run.py +@@ -121,7 +121,7 @@ def DidPackageCrashOnDevice(package_name, device): + # Dismiss any error dialogs. Limit the number in case we have an error + # loop or we are failing to dismiss. + try: +- for _ in xrange(10): ++ for _ in range(10): + package = device.DismissCrashDialogIfNeeded(timeout=10, retries=1) + if not package: + return False +@@ -492,7 +492,7 @@ class LocalDeviceInstrumentationTestRun( + other_tests.append(test) + + all_tests = [] +- for _, tests in batched_tests.items(): ++ for _, tests in list(batched_tests.items()): + tests.sort() # Ensure a consistent ordering across external shards. + all_tests.extend([ + tests[i:i + _TEST_BATCH_MAX_GROUP_SIZE] +@@ -558,7 +558,7 @@ class LocalDeviceInstrumentationTestRun( + i = self._GetTimeoutFromAnnotations(t['annotations'], n) + return (n, i) + +- test_names, timeouts = zip(*(name_and_timeout(t) for t in test)) ++ test_names, timeouts = list(zip(*(name_and_timeout(t) for t in test))) + + test_name = instrumentation_test_instance.GetTestName(test[0]) + '_batch' + extras['class'] = ','.join(test_names) +diff --git a/src/3rdparty/chromium/build/android/pylib/local/device/local_device_test_run.py b/src/3rdparty/chromium/build/android/pylib/local/device/local_device_test_run.py +index 69b271865..c74cfa426 100644 +--- a/src/3rdparty/chromium/build/android/pylib/local/device/local_device_test_run.py ++++ b/src/3rdparty/chromium/build/android/pylib/local/device/local_device_test_run.py +@@ -6,7 +6,7 @@ import fnmatch + import logging + import posixpath + import signal +-import thread ++import _thread + import threading + + from devil import base_error +@@ -65,7 +65,7 @@ class LocalDeviceTestRun(test_run.TestRun): + consecutive_device_errors = 0 + for test in tests: + if exit_now.isSet(): +- thread.exit() ++ _thread.exit() + + result = None + rerun = None +@@ -220,13 +220,13 @@ class LocalDeviceTestRun(test_run.TestRun): + if name.endswith('*'): + tests_and_results[name] = ( + test, +- [r for n, r in all_test_results.iteritems() ++ [r for n, r in all_test_results.items() + if fnmatch.fnmatch(n, name)]) + else: + tests_and_results[name] = (test, all_test_results.get(name)) + + failed_tests_and_results = ( +- (test, result) for test, result in tests_and_results.itervalues() ++ (test, result) for test, result in tests_and_results.values() + if is_failure_result(result) + ) + +diff --git a/src/3rdparty/chromium/build/android/pylib/local/device/local_device_test_run_test.py b/src/3rdparty/chromium/build/android/pylib/local/device/local_device_test_run_test.py +index aeea5881c..55e2b772e 100755 +--- a/src/3rdparty/chromium/build/android/pylib/local/device/local_device_test_run_test.py ++++ b/src/3rdparty/chromium/build/android/pylib/local/device/local_device_test_run_test.py +@@ -16,25 +16,25 @@ import mock # pylint: disable=import-error + class SubstituteDeviceRootTest(unittest.TestCase): + + def testNoneDevicePath(self): +- self.assertEquals( ++ self.assertEqual( + '/fake/device/root', + local_device_test_run.SubstituteDeviceRoot( + None, '/fake/device/root')) + + def testStringDevicePath(self): +- self.assertEquals( ++ self.assertEqual( + '/another/fake/device/path', + local_device_test_run.SubstituteDeviceRoot( + '/another/fake/device/path', '/fake/device/root')) + + def testListWithNoneDevicePath(self): +- self.assertEquals( ++ self.assertEqual( + '/fake/device/root/subpath', + local_device_test_run.SubstituteDeviceRoot( + [None, 'subpath'], '/fake/device/root')) + + def testListWithoutNoneDevicePath(self): +- self.assertEquals( ++ self.assertEqual( + '/another/fake/device/path', + local_device_test_run.SubstituteDeviceRoot( + ['/', 'another', 'fake', 'device', 'path'], +@@ -79,7 +79,7 @@ class LocalDeviceTestRunTest(unittest.TestCase): + + test_run = TestLocalDeviceTestRun() + tests_to_retry = test_run._GetTestsToRetry(tests, try_results) +- self.assertEquals(0, len(tests_to_retry)) ++ self.assertEqual(0, len(tests_to_retry)) + + def testGetTestsToRetry_testFailed(self): + results = [ +@@ -95,7 +95,7 @@ class LocalDeviceTestRunTest(unittest.TestCase): + + test_run = TestLocalDeviceTestRun() + tests_to_retry = test_run._GetTestsToRetry(tests, try_results) +- self.assertEquals(1, len(tests_to_retry)) ++ self.assertEqual(1, len(tests_to_retry)) + self.assertIn('Test1', tests_to_retry) + + def testGetTestsToRetry_testUnknown(self): +@@ -110,7 +110,7 @@ class LocalDeviceTestRunTest(unittest.TestCase): + + test_run = TestLocalDeviceTestRun() + tests_to_retry = test_run._GetTestsToRetry(tests, try_results) +- self.assertEquals(1, len(tests_to_retry)) ++ self.assertEqual(1, len(tests_to_retry)) + self.assertIn('Test1', tests_to_retry) + + def testGetTestsToRetry_wildcardFilter_allPass(self): +@@ -127,7 +127,7 @@ class LocalDeviceTestRunTest(unittest.TestCase): + + test_run = TestLocalDeviceTestRun() + tests_to_retry = test_run._GetTestsToRetry(tests, try_results) +- self.assertEquals(0, len(tests_to_retry)) ++ self.assertEqual(0, len(tests_to_retry)) + + def testGetTestsToRetry_wildcardFilter_oneFails(self): + results = [ +@@ -143,7 +143,7 @@ class LocalDeviceTestRunTest(unittest.TestCase): + + test_run = TestLocalDeviceTestRun() + tests_to_retry = test_run._GetTestsToRetry(tests, try_results) +- self.assertEquals(1, len(tests_to_retry)) ++ self.assertEqual(1, len(tests_to_retry)) + self.assertIn('TestCase.*', tests_to_retry) + + def testGetTestsToRetry_nonStringTests(self): +@@ -163,9 +163,9 @@ class LocalDeviceTestRunTest(unittest.TestCase): + + test_run = TestLocalDeviceNonStringTestRun() + tests_to_retry = test_run._GetTestsToRetry(tests, try_results) +- self.assertEquals(1, len(tests_to_retry)) ++ self.assertEqual(1, len(tests_to_retry)) + self.assertIsInstance(tests_to_retry[0], dict) +- self.assertEquals(tests[1], tests_to_retry[0]) ++ self.assertEqual(tests[1], tests_to_retry[0]) + + + if __name__ == '__main__': +diff --git a/src/3rdparty/chromium/build/android/pylib/local/emulator/avd.py b/src/3rdparty/chromium/build/android/pylib/local/emulator/avd.py +index 881bff3a2..3a21fd096 100644 +--- a/src/3rdparty/chromium/build/android/pylib/local/emulator/avd.py ++++ b/src/3rdparty/chromium/build/android/pylib/local/emulator/avd.py +@@ -374,7 +374,7 @@ class AvdConfig(object): + pkgs_by_dir[pkg.dest_path] = [] + pkgs_by_dir[pkg.dest_path].append(pkg) + +- for pkg_dir, pkgs in pkgs_by_dir.iteritems(): ++ for pkg_dir, pkgs in pkgs_by_dir.items(): + logging.info('Installing packages in %s', pkg_dir) + cipd_root = os.path.join(constants.DIR_SOURCE_ROOT, pkg_dir) + if not os.path.exists(cipd_root): +diff --git a/src/3rdparty/chromium/build/android/pylib/local/emulator/ini.py b/src/3rdparty/chromium/build/android/pylib/local/emulator/ini.py +index 45761884f..fe363c992 100644 +--- a/src/3rdparty/chromium/build/android/pylib/local/emulator/ini.py ++++ b/src/3rdparty/chromium/build/android/pylib/local/emulator/ini.py +@@ -27,7 +27,7 @@ def load(fp): + + def dumps(obj): + ret = '' +- for k, v in sorted(obj.iteritems()): ++ for k, v in sorted(obj.items()): + ret += '%s = %s\n' % (k, str(v)) + return ret + +diff --git a/src/3rdparty/chromium/build/android/pylib/local/local_test_server_spawner.py b/src/3rdparty/chromium/build/android/pylib/local/local_test_server_spawner.py +index 66733622e..af63e1011 100644 +--- a/src/3rdparty/chromium/build/android/pylib/local/local_test_server_spawner.py ++++ b/src/3rdparty/chromium/build/android/pylib/local/local_test_server_spawner.py +@@ -25,7 +25,7 @@ def _WaitUntil(predicate, max_attempts=5): + Whether the provided predicate was satisfied once (before the timeout). + """ + sleep_time_sec = 0.025 +- for _ in xrange(1, max_attempts): ++ for _ in range(1, max_attempts): + if predicate(): + return True + time.sleep(sleep_time_sec) +diff --git a/src/3rdparty/chromium/build/android/pylib/output/local_output_manager.py b/src/3rdparty/chromium/build/android/pylib/output/local_output_manager.py +index 89becd7f7..d068aaf6b 100644 +--- a/src/3rdparty/chromium/build/android/pylib/output/local_output_manager.py ++++ b/src/3rdparty/chromium/build/android/pylib/output/local_output_manager.py +@@ -5,7 +5,7 @@ + import time + import os + import shutil +-import urllib ++import urllib.request, urllib.parse, urllib.error + + from pylib.base import output_manager + +@@ -37,7 +37,7 @@ class LocalArchivedFile(output_manager.ArchivedFile): + self._output_path = os.path.join(out_root, out_subdir, out_filename) + + def _Link(self): +- return 'file://%s' % urllib.quote(self._output_path) ++ return 'file://%s' % urllib.parse.quote(self._output_path) + + def _Archive(self): + if not os.path.exists(os.path.dirname(self._output_path)): +diff --git a/src/3rdparty/chromium/build/android/pylib/pexpect.py b/src/3rdparty/chromium/build/android/pylib/pexpect.py +index cf59fb0f6..508eae675 100644 +--- a/src/3rdparty/chromium/build/android/pylib/pexpect.py ++++ b/src/3rdparty/chromium/build/android/pylib/pexpect.py +@@ -1,7 +1,7 @@ + # Copyright (c) 2012 The Chromium Authors. All rights reserved. + # Use of this source code is governed by a BSD-style license that can be + # found in the LICENSE file. +-from __future__ import absolute_import ++ + + import os + import sys +diff --git a/src/3rdparty/chromium/build/android/pylib/results/flakiness_dashboard/json_results_generator.py b/src/3rdparty/chromium/build/android/pylib/results/flakiness_dashboard/json_results_generator.py +index b2e542bd2..5cc9c31ce 100644 +--- a/src/3rdparty/chromium/build/android/pylib/results/flakiness_dashboard/json_results_generator.py ++++ b/src/3rdparty/chromium/build/android/pylib/results/flakiness_dashboard/json_results_generator.py +@@ -13,7 +13,7 @@ import logging + import mimetypes + import os + import time +-import urllib2 ++import urllib.request, urllib.error, urllib.parse + + _log = logging.getLogger(__name__) + +@@ -44,7 +44,7 @@ def WriteJSON(json_object, file_path, callback=None): + def ConvertTrieToFlatPaths(trie, prefix=None): + """Flattens the trie of paths, prepending a prefix to each.""" + result = {} +- for name, data in trie.iteritems(): ++ for name, data in trie.items(): + if prefix: + name = prefix + '/' + name + +@@ -95,7 +95,7 @@ class TestResult(object): + """A simple class that represents a single test result.""" + + # Test modifier constants. +- (NONE, FAILS, FLAKY, DISABLED) = range(4) ++ (NONE, FAILS, FLAKY, DISABLED) = list(range(4)) + + def __init__(self, test, failed=False, elapsed_time=0): + self.test_name = test +@@ -195,7 +195,7 @@ class JSONResultsGeneratorBase(object): + self._results_directory = results_file_base_path + + self._test_results_map = test_results_map +- self._test_results = test_results_map.values() ++ self._test_results = list(test_results_map.values()) + + self._svn_repositories = svn_repositories + if not self._svn_repositories: +@@ -217,7 +217,7 @@ class JSONResultsGeneratorBase(object): + WriteJSON(json_object, file_path) + + def GenerateTimesMSFile(self): +- times = TestTimingsTrie(self._test_results_map.values()) ++ times = TestTimingsTrie(list(self._test_results_map.values())) + file_path = os.path.join(self._results_directory, self.TIMES_MS_FILENAME) + WriteJSON(times, file_path) + +@@ -326,7 +326,7 @@ class JSONResultsGeneratorBase(object): + return self.__class__.NO_DATA_RESULT + + test_result = self._test_results_map[test_name] +- if test_result.modifier in self.MODIFIER_TO_CHAR.keys(): ++ if test_result.modifier in list(self.MODIFIER_TO_CHAR.keys()): + return self.MODIFIER_TO_CHAR[test_result.modifier] + + return self.__class__.PASS_RESULT +@@ -374,23 +374,23 @@ class JSONResultsGeneratorBase(object): + return {}, None + + results_file_url = (self.URL_FOR_TEST_LIST_JSON % +- (urllib2.quote(self._test_results_server), +- urllib2.quote(self._builder_name), ++ (urllib.parse.quote(self._test_results_server), ++ urllib.parse.quote(self._builder_name), + self.RESULTS_FILENAME, +- urllib2.quote(self._test_type), +- urllib2.quote(self._master_name))) ++ urllib.parse.quote(self._test_type), ++ urllib.parse.quote(self._master_name))) + + # pylint: disable=redefined-variable-type + try: + # FIXME: We should talk to the network via a Host object. +- results_file = urllib2.urlopen(results_file_url) ++ results_file = urllib.request.urlopen(results_file_url) + old_results = results_file.read() +- except urllib2.HTTPError as http_error: ++ except urllib.error.HTTPError as http_error: + # A non-4xx status code means the bot is hosed for some reason + # and we can't grab the results.json file off of it. + if http_error.code < 400 and http_error.code >= 500: + error = http_error +- except urllib2.URLError as url_error: ++ except urllib.error.URLError as url_error: + error = url_error + # pylint: enable=redefined-variable-type + +@@ -426,7 +426,7 @@ class JSONResultsGeneratorBase(object): + + # Create a test modifiers (FAILS, FLAKY etc) summary dictionary. + entry = {} +- for test_name in self._test_results_map.iterkeys(): ++ for test_name in self._test_results_map.keys(): + result_char = self._GetModifierChar(test_name) + entry[result_char] = entry.get(result_char, 0) + 1 + +@@ -543,7 +543,7 @@ class JSONResultsGeneratorBase(object): + + # version 3->4 + if archive_version == 3: +- for results in results_json.values(): ++ for results in list(results_json.values()): + self._ConvertTestsToTrie(results) + + results_json[self.VERSION_KEY] = self.VERSION +@@ -554,7 +554,7 @@ class JSONResultsGeneratorBase(object): + + test_results = results[self.TESTS] + test_results_trie = {} +- for test in test_results.iterkeys(): ++ for test in test_results.keys(): + single_test_result = test_results[test] + AddPathToTrie(test, single_test_result, test_results_trie) + +@@ -642,10 +642,10 @@ class _FileUploader(object): + end = start + self._timeout_seconds + while time.time() < end: + try: +- request = urllib2.Request(self._url, data, ++ request = urllib.request.Request(self._url, data, + {'Content-Type': content_type}) +- return urllib2.urlopen(request) +- except urllib2.HTTPError as e: ++ return urllib.request.urlopen(request) ++ except urllib.error.HTTPError as e: + _log.warn("Received HTTP status %s loading \"%s\". " + 'Retrying in 10 seconds...', e.code, e.filename) + time.sleep(10) +@@ -678,7 +678,7 @@ def _EncodeMultipartFormData(fields, files): + lines.append('--' + BOUNDARY) + lines.append('Content-Disposition: form-data; name="%s"' % key) + lines.append('') +- if isinstance(value, unicode): ++ if isinstance(value, str): + value = value.encode('utf-8') + lines.append(value) + +@@ -688,7 +688,7 @@ def _EncodeMultipartFormData(fields, files): + 'filename="%s"' % (key, filename)) + lines.append('Content-Type: %s' % _GetMIMEType(filename)) + lines.append('') +- if isinstance(value, unicode): ++ if isinstance(value, str): + value = value.encode('utf-8') + lines.append(value) + +diff --git a/src/3rdparty/chromium/build/android/pylib/results/flakiness_dashboard/json_results_generator_unittest.py b/src/3rdparty/chromium/build/android/pylib/results/flakiness_dashboard/json_results_generator_unittest.py +index d6aee057b..70c808c71 100644 +--- a/src/3rdparty/chromium/build/android/pylib/results/flakiness_dashboard/json_results_generator_unittest.py ++++ b/src/3rdparty/chromium/build/android/pylib/results/flakiness_dashboard/json_results_generator_unittest.py +@@ -114,7 +114,7 @@ class JSONGeneratorTest(unittest.TestCase): + if tests_set or DISABLED_count: + fixable = {} + for fixable_items in buildinfo[JRG.FIXABLE]: +- for (result_type, count) in fixable_items.iteritems(): ++ for (result_type, count) in fixable_items.items(): + if result_type in fixable: + fixable[result_type] = fixable[result_type] + count + else: +@@ -138,7 +138,7 @@ class JSONGeneratorTest(unittest.TestCase): + + if failed_count_map: + tests = buildinfo[JRG.TESTS] +- for test_name in failed_count_map.iterkeys(): ++ for test_name in failed_count_map.keys(): + test = self._FindTestInTrie(test_name, tests) + + failed = 0 +diff --git a/src/3rdparty/chromium/build/android/pylib/results/json_results.py b/src/3rdparty/chromium/build/android/pylib/results/json_results.py +index 38ede80e5..1c8de865e 100644 +--- a/src/3rdparty/chromium/build/android/pylib/results/json_results.py ++++ b/src/3rdparty/chromium/build/android/pylib/results/json_results.py +@@ -92,14 +92,14 @@ def GenerateResultsDict(test_run_results, global_tags=None): + result_dict = { + 'status': r.GetType(), + 'elapsed_time_ms': r.GetDuration(), +- 'output_snippet': unicode(r.GetLog(), errors='replace'), ++ 'output_snippet': str(r.GetLog(), errors='replace'), + 'losless_snippet': True, + 'output_snippet_base64': '', + 'links': r.GetLinks(), + } + iteration_data[r.GetName()].append(result_dict) + +- all_tests = all_tests.union(set(iteration_data.iterkeys())) ++ all_tests = all_tests.union(set(iteration_data.keys())) + per_iteration_data.append(iteration_data) + + return { +@@ -213,7 +213,7 @@ def ParseResultsFromJson(json_results): + results_list = [] + testsuite_runs = json_results['per_iteration_data'] + for testsuite_run in testsuite_runs: +- for test, test_runs in testsuite_run.iteritems(): ++ for test, test_runs in testsuite_run.items(): + results_list.extend( + [base_test_result.BaseTestResult(test, + string_as_status(tr['status']), +diff --git a/src/3rdparty/chromium/build/android/pylib/results/json_results_test.py b/src/3rdparty/chromium/build/android/pylib/results/json_results_test.py +index 2c1a42202..6eb0c223d 100755 +--- a/src/3rdparty/chromium/build/android/pylib/results/json_results_test.py ++++ b/src/3rdparty/chromium/build/android/pylib/results/json_results_test.py +@@ -19,18 +19,18 @@ class JsonResultsTest(unittest.TestCase): + all_results.AddResult(result) + + results_dict = json_results.GenerateResultsDict([all_results]) +- self.assertEquals( ++ self.assertEqual( + ['test.package.TestName'], + results_dict['all_tests']) +- self.assertEquals(1, len(results_dict['per_iteration_data'])) ++ self.assertEqual(1, len(results_dict['per_iteration_data'])) + + iteration_result = results_dict['per_iteration_data'][0] + self.assertTrue('test.package.TestName' in iteration_result) +- self.assertEquals(1, len(iteration_result['test.package.TestName'])) ++ self.assertEqual(1, len(iteration_result['test.package.TestName'])) + + test_iteration_result = iteration_result['test.package.TestName'][0] + self.assertTrue('status' in test_iteration_result) +- self.assertEquals('SUCCESS', test_iteration_result['status']) ++ self.assertEqual('SUCCESS', test_iteration_result['status']) + + def testGenerateResultsDict_skippedResult(self): + result = base_test_result.BaseTestResult( +@@ -40,18 +40,18 @@ class JsonResultsTest(unittest.TestCase): + all_results.AddResult(result) + + results_dict = json_results.GenerateResultsDict([all_results]) +- self.assertEquals( ++ self.assertEqual( + ['test.package.TestName'], + results_dict['all_tests']) +- self.assertEquals(1, len(results_dict['per_iteration_data'])) ++ self.assertEqual(1, len(results_dict['per_iteration_data'])) + + iteration_result = results_dict['per_iteration_data'][0] + self.assertTrue('test.package.TestName' in iteration_result) +- self.assertEquals(1, len(iteration_result['test.package.TestName'])) ++ self.assertEqual(1, len(iteration_result['test.package.TestName'])) + + test_iteration_result = iteration_result['test.package.TestName'][0] + self.assertTrue('status' in test_iteration_result) +- self.assertEquals('SKIPPED', test_iteration_result['status']) ++ self.assertEqual('SKIPPED', test_iteration_result['status']) + + def testGenerateResultsDict_failedResult(self): + result = base_test_result.BaseTestResult( +@@ -61,18 +61,18 @@ class JsonResultsTest(unittest.TestCase): + all_results.AddResult(result) + + results_dict = json_results.GenerateResultsDict([all_results]) +- self.assertEquals( ++ self.assertEqual( + ['test.package.TestName'], + results_dict['all_tests']) +- self.assertEquals(1, len(results_dict['per_iteration_data'])) ++ self.assertEqual(1, len(results_dict['per_iteration_data'])) + + iteration_result = results_dict['per_iteration_data'][0] + self.assertTrue('test.package.TestName' in iteration_result) +- self.assertEquals(1, len(iteration_result['test.package.TestName'])) ++ self.assertEqual(1, len(iteration_result['test.package.TestName'])) + + test_iteration_result = iteration_result['test.package.TestName'][0] + self.assertTrue('status' in test_iteration_result) +- self.assertEquals('FAILURE', test_iteration_result['status']) ++ self.assertEqual('FAILURE', test_iteration_result['status']) + + def testGenerateResultsDict_duration(self): + result = base_test_result.BaseTestResult( +@@ -82,18 +82,18 @@ class JsonResultsTest(unittest.TestCase): + all_results.AddResult(result) + + results_dict = json_results.GenerateResultsDict([all_results]) +- self.assertEquals( ++ self.assertEqual( + ['test.package.TestName'], + results_dict['all_tests']) +- self.assertEquals(1, len(results_dict['per_iteration_data'])) ++ self.assertEqual(1, len(results_dict['per_iteration_data'])) + + iteration_result = results_dict['per_iteration_data'][0] + self.assertTrue('test.package.TestName' in iteration_result) +- self.assertEquals(1, len(iteration_result['test.package.TestName'])) ++ self.assertEqual(1, len(iteration_result['test.package.TestName'])) + + test_iteration_result = iteration_result['test.package.TestName'][0] + self.assertTrue('elapsed_time_ms' in test_iteration_result) +- self.assertEquals(123, test_iteration_result['elapsed_time_ms']) ++ self.assertEqual(123, test_iteration_result['elapsed_time_ms']) + + def testGenerateResultsDict_multipleResults(self): + result1 = base_test_result.BaseTestResult( +@@ -106,27 +106,27 @@ class JsonResultsTest(unittest.TestCase): + all_results.AddResult(result2) + + results_dict = json_results.GenerateResultsDict([all_results]) +- self.assertEquals( ++ self.assertEqual( + ['test.package.TestName1', 'test.package.TestName2'], + results_dict['all_tests']) + + self.assertTrue('per_iteration_data' in results_dict) + iterations = results_dict['per_iteration_data'] +- self.assertEquals(1, len(iterations)) ++ self.assertEqual(1, len(iterations)) + + expected_tests = set([ + 'test.package.TestName1', + 'test.package.TestName2', + ]) + +- for test_name, iteration_result in iterations[0].iteritems(): ++ for test_name, iteration_result in iterations[0].items(): + self.assertTrue(test_name in expected_tests) + expected_tests.remove(test_name) +- self.assertEquals(1, len(iteration_result)) ++ self.assertEqual(1, len(iteration_result)) + + test_iteration_result = iteration_result[0] + self.assertTrue('status' in test_iteration_result) +- self.assertEquals('SUCCESS', test_iteration_result['status']) ++ self.assertEqual('SUCCESS', test_iteration_result['status']) + + def testGenerateResultsDict_passOnRetry(self): + raw_results = [] +@@ -144,28 +144,28 @@ class JsonResultsTest(unittest.TestCase): + raw_results.append(run_results2) + + results_dict = json_results.GenerateResultsDict([raw_results]) +- self.assertEquals(['test.package.TestName1'], results_dict['all_tests']) ++ self.assertEqual(['test.package.TestName1'], results_dict['all_tests']) + + # Check that there's only one iteration. + self.assertIn('per_iteration_data', results_dict) + iterations = results_dict['per_iteration_data'] +- self.assertEquals(1, len(iterations)) ++ self.assertEqual(1, len(iterations)) + + # Check that test.package.TestName1 is the only test in the iteration. +- self.assertEquals(1, len(iterations[0])) ++ self.assertEqual(1, len(iterations[0])) + self.assertIn('test.package.TestName1', iterations[0]) + + # Check that there are two results for test.package.TestName1. + actual_test_results = iterations[0]['test.package.TestName1'] +- self.assertEquals(2, len(actual_test_results)) ++ self.assertEqual(2, len(actual_test_results)) + + # Check that the first result is a failure. + self.assertIn('status', actual_test_results[0]) +- self.assertEquals('FAILURE', actual_test_results[0]['status']) ++ self.assertEqual('FAILURE', actual_test_results[0]['status']) + + # Check that the second result is a success. + self.assertIn('status', actual_test_results[1]) +- self.assertEquals('SUCCESS', actual_test_results[1]['status']) ++ self.assertEqual('SUCCESS', actual_test_results[1]['status']) + + def testGenerateResultsDict_globalTags(self): + raw_results = [] +@@ -173,7 +173,7 @@ class JsonResultsTest(unittest.TestCase): + + results_dict = json_results.GenerateResultsDict( + [raw_results], global_tags=global_tags) +- self.assertEquals(['UNRELIABLE_RESULTS'], results_dict['global_tags']) ++ self.assertEqual(['UNRELIABLE_RESULTS'], results_dict['global_tags']) + + def testGenerateResultsDict_loslessSnippet(self): + result = base_test_result.BaseTestResult( +@@ -185,22 +185,22 @@ class JsonResultsTest(unittest.TestCase): + all_results.AddResult(result) + + results_dict = json_results.GenerateResultsDict([all_results]) +- self.assertEquals( ++ self.assertEqual( + ['test.package.TestName'], + results_dict['all_tests']) +- self.assertEquals(1, len(results_dict['per_iteration_data'])) ++ self.assertEqual(1, len(results_dict['per_iteration_data'])) + + iteration_result = results_dict['per_iteration_data'][0] + self.assertTrue('test.package.TestName' in iteration_result) +- self.assertEquals(1, len(iteration_result['test.package.TestName'])) ++ self.assertEqual(1, len(iteration_result['test.package.TestName'])) + + test_iteration_result = iteration_result['test.package.TestName'][0] + self.assertTrue('losless_snippet' in test_iteration_result) + self.assertTrue(test_iteration_result['losless_snippet']) + self.assertTrue('output_snippet' in test_iteration_result) +- self.assertEquals(log, test_iteration_result['output_snippet']) ++ self.assertEqual(log, test_iteration_result['output_snippet']) + self.assertTrue('output_snippet_base64' in test_iteration_result) +- self.assertEquals('', test_iteration_result['output_snippet_base64']) ++ self.assertEqual('', test_iteration_result['output_snippet_base64']) + + def testGenerateJsonTestResultFormatDict_passedResult(self): + result = base_test_result.BaseTestResult('test.package.TestName', +@@ -210,13 +210,13 @@ class JsonResultsTest(unittest.TestCase): + all_results.AddResult(result) + + results_dict = json_results.GenerateJsonTestResultFormatDict([all_results]) +- self.assertEquals(1, len(results_dict['tests'])) +- self.assertEquals(1, len(results_dict['tests']['test'])) +- self.assertEquals(1, len(results_dict['tests']['test']['package'])) +- self.assertEquals( ++ self.assertEqual(1, len(results_dict['tests'])) ++ self.assertEqual(1, len(results_dict['tests']['test'])) ++ self.assertEqual(1, len(results_dict['tests']['test']['package'])) ++ self.assertEqual( + 'PASS', + results_dict['tests']['test']['package']['TestName']['expected']) +- self.assertEquals( ++ self.assertEqual( + 'PASS', results_dict['tests']['test']['package']['TestName']['actual']) + + def testGenerateJsonTestResultFormatDict_failedResult(self): +@@ -227,13 +227,13 @@ class JsonResultsTest(unittest.TestCase): + all_results.AddResult(result) + + results_dict = json_results.GenerateJsonTestResultFormatDict([all_results]) +- self.assertEquals(1, len(results_dict['tests'])) +- self.assertEquals(1, len(results_dict['tests']['test'])) +- self.assertEquals(1, len(results_dict['tests']['test']['package'])) +- self.assertEquals( ++ self.assertEqual(1, len(results_dict['tests'])) ++ self.assertEqual(1, len(results_dict['tests']['test'])) ++ self.assertEqual(1, len(results_dict['tests']['test']['package'])) ++ self.assertEqual( + 'PASS', + results_dict['tests']['test']['package']['TestName']['expected']) +- self.assertEquals( ++ self.assertEqual( + 'FAIL', results_dict['tests']['test']['package']['TestName']['actual']) + + +diff --git a/src/3rdparty/chromium/build/android/pylib/results/presentation/standard_gtest_merge.py b/src/3rdparty/chromium/build/android/pylib/results/presentation/standard_gtest_merge.py +index 58a29366c..941c2f574 100755 +--- a/src/3rdparty/chromium/build/android/pylib/results/presentation/standard_gtest_merge.py ++++ b/src/3rdparty/chromium/build/android/pylib/results/presentation/standard_gtest_merge.py +@@ -4,7 +4,7 @@ + # Use of this source code is governed by a BSD-style license that can be + # found in the LICENSE file. + +-from __future__ import print_function ++ + + import argparse + import json +@@ -43,17 +43,17 @@ def merge_shard_results(summary_json, jsons_to_merge): + # client/swarming.py, which means the state enum is saved in its string + # name form, not in the number form. + state = result.get('state') +- if state == u'BOT_DIED': ++ if state == 'BOT_DIED': + print( + 'Shard #%d had a Swarming internal failure' % index, file=sys.stderr) +- elif state == u'EXPIRED': ++ elif state == 'EXPIRED': + print('There wasn\'t enough capacity to run your test', file=sys.stderr) +- elif state == u'TIMED_OUT': ++ elif state == 'TIMED_OUT': + print('Test runtime exceeded allocated time' + 'Either it ran for too long (hard timeout) or it didn\'t produce ' + 'I/O for an extended period of time (I/O timeout)', + file=sys.stderr) +- elif state != u'COMPLETED': ++ elif state != 'COMPLETED': + print('Invalid Swarming task state: %s' % state, file=sys.stderr) + + json_data, err_msg = load_shard_json(index, result.get('task_id'), +@@ -138,7 +138,7 @@ def load_shard_json(index, task_id, jsons_to_merge): + def merge_list_of_dicts(left, right): + """Merges dicts left[0] with right[0], left[1] with right[1], etc.""" + output = [] +- for i in xrange(max(len(left), len(right))): ++ for i in range(max(len(left), len(right))): + left_dict = left[i] if i < len(left) else {} + right_dict = right[i] if i < len(right) else {} + merged_dict = left_dict.copy() +diff --git a/src/3rdparty/chromium/build/android/pylib/results/presentation/test_results_presentation.py b/src/3rdparty/chromium/build/android/pylib/results/presentation/test_results_presentation.py +index 128eb04e3..6545c806f 100755 +--- a/src/3rdparty/chromium/build/android/pylib/results/presentation/test_results_presentation.py ++++ b/src/3rdparty/chromium/build/android/pylib/results/presentation/test_results_presentation.py +@@ -4,7 +4,7 @@ + # Use of this source code is governed by a BSD-style license that can be + # found in the LICENSE file. + +-from __future__ import print_function ++ + + import argparse + import collections +@@ -14,7 +14,7 @@ import logging + import tempfile + import os + import sys +-import urllib ++import urllib.request, urllib.parse, urllib.error + + + CURRENT_DIR = os.path.dirname(os.path.abspath(__file__)) +@@ -104,7 +104,7 @@ def action_cell(action, data, html_class): + + + def flakiness_dashbord_link(test_name, suite_name): +- url_args = urllib.urlencode([ ++ url_args = urllib.parse.urlencode([ + ('testType', suite_name), + ('tests', test_name)]) + return ('https://test-results.appspot.com/' +@@ -156,7 +156,7 @@ def create_test_table(results_dict, cs_base_url, suite_name): + ] + + test_row_blocks = [] +- for test_name, test_results in results_dict.iteritems(): ++ for test_name, test_results in results_dict.items(): + test_runs = [] + for index, result in enumerate(test_results): + if index == 0: +@@ -215,7 +215,7 @@ def create_suite_table(results_dict): + ] + + suite_row_dict = {} +- for test_name, test_results in results_dict.iteritems(): ++ for test_name, test_results in results_dict.items(): + # TODO(mikecase): This logic doesn't work if there are multiple test runs. + # That is, if 'per_iteration_data' has multiple entries. + # Since we only care about the result of the last test run. +@@ -253,7 +253,7 @@ def create_suite_table(results_dict): + suite_row[TIME_INDEX]['data'] += result['elapsed_time_ms'] + footer_row[TIME_INDEX]['data'] += result['elapsed_time_ms'] + +- for suite in suite_row_dict.values(): ++ for suite in list(suite_row_dict.values()): + if suite[FAIL_COUNT_INDEX]['data'] > 0: + suite[FAIL_COUNT_INDEX]['class'] += ' failure' + else: +@@ -265,7 +265,7 @@ def create_suite_table(results_dict): + footer_row[FAIL_COUNT_INDEX]['class'] += ' success' + + return (header_row, +- [[suite_row] for suite_row in suite_row_dict.values()], ++ [[suite_row] for suite_row in list(suite_row_dict.values())], + footer_row) + + +@@ -278,7 +278,7 @@ def feedback_url(result_details_link): + ] + if result_details_link: + url_args.append(('comment', 'Please check out: %s' % result_details_link)) +- url_args = urllib.urlencode(url_args) ++ url_args = urllib.parse.urlencode(url_args) + # pylint: enable=redefined-variable-type + return 'https://bugs.chromium.org/p/chromium/issues/entry?%s' % url_args + +@@ -349,7 +349,7 @@ def result_details(json_path, test_name, cs_base_url, bucket=None, + + results_dict = collections.defaultdict(list) + for testsuite_run in json_object['per_iteration_data']: +- for test, test_runs in testsuite_run.iteritems(): ++ for test, test_runs in testsuite_run.items(): + results_dict[test].extend(test_runs) + return results_to_html(results_dict, cs_base_url, bucket, test_name, + builder_name, build_number, local_output) +@@ -376,12 +376,12 @@ def ui_screenshot_set(json_path): + ui_screenshots = [] + # pylint: disable=too-many-nested-blocks + for testsuite_run in json_object['per_iteration_data']: +- for _, test_runs in testsuite_run.iteritems(): ++ for _, test_runs in testsuite_run.items(): + for test_run in test_runs: + if 'ui screenshot' in test_run['links']: + screenshot_link = test_run['links']['ui screenshot'] + if screenshot_link.startswith('file:'): +- with contextlib.closing(urllib.urlopen(screenshot_link)) as f: ++ with contextlib.closing(urllib.request.urlopen(screenshot_link)) as f: + test_screenshots = json.load(f) + else: + # Assume anything that isn't a file link is a google storage link +@@ -518,7 +518,7 @@ def main(): + + if ui_screenshot_set_link: + ui_catalog_url = 'https://chrome-ui-catalog.appspot.com/' +- ui_catalog_query = urllib.urlencode( ++ ui_catalog_query = urllib.parse.urlencode( + {'screenshot_source': ui_screenshot_set_link}) + ui_screenshot_link = '%s?%s' % (ui_catalog_url, ui_catalog_query) + +diff --git a/src/3rdparty/chromium/build/android/pylib/results/report_results.py b/src/3rdparty/chromium/build/android/pylib/results/report_results.py +index 56eefac46..60307d2c2 100644 +--- a/src/3rdparty/chromium/build/android/pylib/results/report_results.py ++++ b/src/3rdparty/chromium/build/android/pylib/results/report_results.py +@@ -4,7 +4,7 @@ + + """Module containing utility functions for reporting results.""" + +-from __future__ import print_function ++ + + import logging + import os +diff --git a/src/3rdparty/chromium/build/android/pylib/symbols/apk_lib_dump.py b/src/3rdparty/chromium/build/android/pylib/symbols/apk_lib_dump.py +index ba8702669..933a0aba1 100755 +--- a/src/3rdparty/chromium/build/android/pylib/symbols/apk_lib_dump.py ++++ b/src/3rdparty/chromium/build/android/pylib/symbols/apk_lib_dump.py +@@ -22,7 +22,7 @@ easy to use in a Python script, e.g. with a line like: + (0x, 0x, 0x, ), + """ + +-from __future__ import print_function ++ + + import argparse + import os +diff --git a/src/3rdparty/chromium/build/android/pylib/symbols/apk_native_libs_unittest.py b/src/3rdparty/chromium/build/android/pylib/symbols/apk_native_libs_unittest.py +index 416918d8a..b2e47ccb7 100644 +--- a/src/3rdparty/chromium/build/android/pylib/symbols/apk_native_libs_unittest.py ++++ b/src/3rdparty/chromium/build/android/pylib/symbols/apk_native_libs_unittest.py +@@ -130,12 +130,12 @@ class MockApkReaderTest(unittest.TestCase): + 'foo.txt': (1024, 1024, 'FooFooFoo'), + 'lib/bar/libcode.so': (16000, 3240, 1024, '\x7fELFFFFFFFFFFFF'), + } +- for path, props in _ENTRIES.iteritems(): ++ for path, props in _ENTRIES.items(): + reader.AddTestEntry(path, props[0], props[1], props[2]) + + entries = reader.ListEntries() + self.assertEqual(len(entries), len(_ENTRIES)) +- for path, props in _ENTRIES.iteritems(): ++ for path, props in _ENTRIES.items(): + entry = reader.FindEntry(path) + self.assertEqual(entry.filename, path) + self.assertEqual(entry.file_size, props[0]) +diff --git a/src/3rdparty/chromium/build/android/pylib/symbols/deobfuscator.py b/src/3rdparty/chromium/build/android/pylib/symbols/deobfuscator.py +index ffc23b870..aca4c3a9e 100644 +--- a/src/3rdparty/chromium/build/android/pylib/symbols/deobfuscator.py ++++ b/src/3rdparty/chromium/build/android/pylib/symbols/deobfuscator.py +@@ -139,7 +139,7 @@ class DeobfuscatorPool(object): + # out/Release/apks/ChromePublic.apk.mapping + def __init__(self, mapping_path, pool_size=4): + self._mapping_path = mapping_path +- self._pool = [Deobfuscator(mapping_path) for _ in xrange(pool_size)] ++ self._pool = [Deobfuscator(mapping_path) for _ in range(pool_size)] + # Allow only one thread to select from the pool at a time. + self._lock = threading.Lock() + self._num_restarts = 0 +diff --git a/src/3rdparty/chromium/build/android/pylib/symbols/elf_symbolizer.py b/src/3rdparty/chromium/build/android/pylib/symbols/elf_symbolizer.py +index 1f2f91825..2c4bfacbf 100644 +--- a/src/3rdparty/chromium/build/android/pylib/symbols/elf_symbolizer.py ++++ b/src/3rdparty/chromium/build/android/pylib/symbols/elf_symbolizer.py +@@ -8,7 +8,7 @@ import logging + import multiprocessing + import os + import posixpath +-import Queue ++import queue + import re + import subprocess + import sys +@@ -293,7 +293,7 @@ class ELFSymbolizer(object): + + try: + lines = self._out_queue.get(block=True, timeout=0.25) +- except Queue.Empty: ++ except queue.Empty: + # On timeout (1/4 s.) repeat the inner loop and check if either the + # addr2line process did crash or we waited its output for too long. + continue +@@ -314,7 +314,7 @@ class ELFSymbolizer(object): + while True: + try: + lines = self._out_queue.get_nowait() +- except Queue.Empty: ++ except queue.Empty: + break + self._ProcessSymbolOutput(lines) + +@@ -405,7 +405,7 @@ class ELFSymbolizer(object): + # The only reason of existence of this Queue (and the corresponding + # Thread below) is the lack of a subprocess.stdout.poll_avail_lines(). + # Essentially this is a pipe able to extract a couple of lines atomically. +- self._out_queue = Queue.Queue() ++ self._out_queue = queue.Queue() + + # Start the underlying addr2line process in line buffered mode. + +diff --git a/src/3rdparty/chromium/build/android/pylib/symbols/elf_symbolizer_unittest.py b/src/3rdparty/chromium/build/android/pylib/symbols/elf_symbolizer_unittest.py +index 765b5989c..37e16670f 100755 +--- a/src/3rdparty/chromium/build/android/pylib/symbols/elf_symbolizer_unittest.py ++++ b/src/3rdparty/chromium/build/android/pylib/symbols/elf_symbolizer_unittest.py +@@ -55,7 +55,7 @@ class ELFSymbolizerTest(unittest.TestCase): + inlines=True, + max_concurrent_jobs=4) + +- for addr in xrange(1000): ++ for addr in range(1000): + exp_inline = False + exp_unknown = False + +@@ -150,7 +150,7 @@ class ELFSymbolizerTest(unittest.TestCase): + max_concurrent_jobs=max_concurrent_jobs, + addr2line_timeout=0.5) + +- for addr in xrange(num_symbols): ++ for addr in range(num_symbols): + exp_name = 'mock_sym_for_addr_%d' % addr + exp_source_path = 'mock_src/mock_lib1.so.c' + exp_source_line = addr +@@ -160,7 +160,7 @@ class ELFSymbolizerTest(unittest.TestCase): + symbolizer.Join() + + # Check that all the expected callbacks have been received. +- for addr in xrange(num_symbols): ++ for addr in range(num_symbols): + self.assertIn(addr, self._resolved_addresses) + self._resolved_addresses.remove(addr) + +diff --git a/src/3rdparty/chromium/build/android/pylib/symbols/symbol_utils.py b/src/3rdparty/chromium/build/android/pylib/symbols/symbol_utils.py +index dea3c63ca..9e18fde77 100644 +--- a/src/3rdparty/chromium/build/android/pylib/symbols/symbol_utils.py ++++ b/src/3rdparty/chromium/build/android/pylib/symbols/symbol_utils.py +@@ -2,7 +2,7 @@ + # Use of this source code is governed by a BSD-style license that can be + # found in the LICENSE file. + +-from __future__ import print_function ++ + + import bisect + import collections +@@ -346,7 +346,7 @@ class DeviceSymbolResolver(SymbolResolver): + offset) + libraries_map[lib_path].add(lib_offset) + +- for lib_path, lib_offsets in libraries_map.iteritems(): ++ for lib_path, lib_offsets in libraries_map.items(): + self.AddLibraryOffsets(lib_path, lib_offsets) + + def FindSymbolInfo(self, device_path, offset): +diff --git a/src/3rdparty/chromium/build/android/pylib/symbols/symbol_utils_unittest.py b/src/3rdparty/chromium/build/android/pylib/symbols/symbol_utils_unittest.py +index ed87f9ed7..c186d6364 100644 +--- a/src/3rdparty/chromium/build/android/pylib/symbols/symbol_utils_unittest.py ++++ b/src/3rdparty/chromium/build/android/pylib/symbols/symbol_utils_unittest.py +@@ -772,7 +772,7 @@ class ElfSymbolResolverTest(unittest.TestCase): + addr2line_path_for_tests=_MOCK_A2L_PATH) + resolver.SetAndroidAbi('ignored-abi') + +- for addr, expected_sym in _TEST_SYMBOL_DATA.iteritems(): ++ for addr, expected_sym in _TEST_SYMBOL_DATA.items(): + self.assertEqual(resolver.FindSymbolInfo('/some/path/libmock1.so', addr), + expected_sym) + +@@ -781,11 +781,11 @@ class ElfSymbolResolverTest(unittest.TestCase): + addr2line_path_for_tests=_MOCK_A2L_PATH) + resolver.SetAndroidAbi('ignored-abi') + resolver.AddLibraryOffsets('/some/path/libmock1.so', +- _TEST_SYMBOL_DATA.keys()) ++ list(_TEST_SYMBOL_DATA.keys())) + + resolver.DisallowSymbolizerForTesting() + +- for addr, expected_sym in _TEST_SYMBOL_DATA.iteritems(): ++ for addr, expected_sym in _TEST_SYMBOL_DATA.items(): + sym_info = resolver.FindSymbolInfo('/some/path/libmock1.so', addr) + self.assertIsNotNone(sym_info, 'None symbol info for addr %x' % addr) + self.assertEqual( +@@ -915,7 +915,7 @@ class BacktraceTranslatorTest(unittest.TestCase): + input_backtrace = _EXPECTED_BACKTRACE.splitlines() + expected_lib_offsets_map = _EXPECTED_BACKTRACE_OFFSETS_MAP + offset_map = backtrace_translator.FindLibraryOffsets(input_backtrace) +- for lib_path, offsets in offset_map.iteritems(): ++ for lib_path, offsets in offset_map.items(): + self.assertTrue(lib_path in expected_lib_offsets_map, + '%s is not in expected library-offsets map!' % lib_path) + sorted_offsets = sorted(offsets) +diff --git a/src/3rdparty/chromium/build/android/pylib/utils/argparse_utils.py b/src/3rdparty/chromium/build/android/pylib/utils/argparse_utils.py +index 06544a2b0..bd603c9d5 100644 +--- a/src/3rdparty/chromium/build/android/pylib/utils/argparse_utils.py ++++ b/src/3rdparty/chromium/build/android/pylib/utils/argparse_utils.py +@@ -2,7 +2,7 @@ + # Use of this source code is governed by a BSD-style license that can be + # found in the LICENSE file. + +-from __future__ import print_function ++ + + import argparse + +diff --git a/src/3rdparty/chromium/build/android/pylib/utils/chrome_proxy_utils_test.py b/src/3rdparty/chromium/build/android/pylib/utils/chrome_proxy_utils_test.py +index b38b268fe..8447fa8fc 100755 +--- a/src/3rdparty/chromium/build/android/pylib/utils/chrome_proxy_utils_test.py ++++ b/src/3rdparty/chromium/build/android/pylib/utils/chrome_proxy_utils_test.py +@@ -91,7 +91,7 @@ class ChromeProxySessionTest(unittest.TestCase): + wpr_mock.assert_called_once_with() + ts_proxy_mock.assert_called_once_with() + self.assertFalse(chrome_proxy.wpr_replay_mode) +- self.assertEquals(chrome_proxy.wpr_archive_path, os.path.abspath(__file__)) ++ self.assertEqual(chrome_proxy.wpr_archive_path, os.path.abspath(__file__)) + + def test_SetWPRRecordMode(self): + chrome_proxy = chrome_proxy_utils.ChromeProxySession(4) +@@ -108,7 +108,7 @@ class ChromeProxySessionTest(unittest.TestCase): + def test_SetWPRArchivePath(self): + chrome_proxy = chrome_proxy_utils.ChromeProxySession(4) + chrome_proxy._wpr_server._archive_path = 'abc' +- self.assertEquals(chrome_proxy.wpr_archive_path, 'abc') ++ self.assertEqual(chrome_proxy.wpr_archive_path, 'abc') + + def test_UseDefaultDeviceProxyPort(self): + chrome_proxy = chrome_proxy_utils.ChromeProxySession() +@@ -117,7 +117,7 @@ class ChromeProxySessionTest(unittest.TestCase): + 'PhrPvGIaAMmd29hj8BCZOq096yj7uMpRNHpn5PDxI6I=', + '--proxy-server=socks5://localhost:1080' + ] +- self.assertEquals(chrome_proxy.device_proxy_port, 1080) ++ self.assertEqual(chrome_proxy.device_proxy_port, 1080) + self.assertListEqual(chrome_proxy.GetFlags(), expected_flags) + + def test_UseNewDeviceProxyPort(self): +@@ -127,7 +127,7 @@ class ChromeProxySessionTest(unittest.TestCase): + 'PhrPvGIaAMmd29hj8BCZOq096yj7uMpRNHpn5PDxI6I=', + '--proxy-server=socks5://localhost:1' + ] +- self.assertEquals(chrome_proxy.device_proxy_port, 1) ++ self.assertEqual(chrome_proxy.device_proxy_port, 1) + self.assertListEqual(chrome_proxy.GetFlags(), expected_flags) + + +diff --git a/src/3rdparty/chromium/build/android/pylib/utils/decorators_test.py b/src/3rdparty/chromium/build/android/pylib/utils/decorators_test.py +index 60f4811b4..64372a858 100755 +--- a/src/3rdparty/chromium/build/android/pylib/utils/decorators_test.py ++++ b/src/3rdparty/chromium/build/android/pylib/utils/decorators_test.py +@@ -35,8 +35,8 @@ class NoRaiseExceptionDecoratorTest(unittest.TestCase): + def doesNotRaiseException(): + return 999 + +- self.assertEquals(raiseException(), 111) +- self.assertEquals(doesNotRaiseException(), 999) ++ self.assertEqual(raiseException(), 111) ++ self.assertEqual(doesNotRaiseException(), 999) + + + class MemoizeDecoratorTest(unittest.TestCase): +@@ -79,13 +79,13 @@ class MemoizeDecoratorTest(unittest.TestCase): + return notMemoized.count + notMemoized.count = 0 + +- self.assertEquals(memoized(), 1) +- self.assertEquals(memoized(), 1) +- self.assertEquals(memoized(), 1) ++ self.assertEqual(memoized(), 1) ++ self.assertEqual(memoized(), 1) ++ self.assertEqual(memoized(), 1) + +- self.assertEquals(notMemoized(), 1) +- self.assertEquals(notMemoized(), 2) +- self.assertEquals(notMemoized(), 3) ++ self.assertEqual(notMemoized(), 1) ++ self.assertEqual(notMemoized(), 2) ++ self.assertEqual(notMemoized(), 3) + + def testFunctionMemoizedBasedOnArgs(self): + """Tests that |Memoize| caches results based on args and kwargs.""" +@@ -94,10 +94,10 @@ class MemoizeDecoratorTest(unittest.TestCase): + def returnValueBasedOnArgsKwargs(a, k=0): + return a + k + +- self.assertEquals(returnValueBasedOnArgsKwargs(1, 1), 2) +- self.assertEquals(returnValueBasedOnArgsKwargs(1, 2), 3) +- self.assertEquals(returnValueBasedOnArgsKwargs(2, 1), 3) +- self.assertEquals(returnValueBasedOnArgsKwargs(3, 3), 6) ++ self.assertEqual(returnValueBasedOnArgsKwargs(1, 1), 2) ++ self.assertEqual(returnValueBasedOnArgsKwargs(1, 2), 3) ++ self.assertEqual(returnValueBasedOnArgsKwargs(2, 1), 3) ++ self.assertEqual(returnValueBasedOnArgsKwargs(3, 3), 6) + + + if __name__ == '__main__': +diff --git a/src/3rdparty/chromium/build/android/pylib/utils/device_dependencies_test.py b/src/3rdparty/chromium/build/android/pylib/utils/device_dependencies_test.py +index aaa9ebf68..55084faaa 100755 +--- a/src/3rdparty/chromium/build/android/pylib/utils/device_dependencies_test.py ++++ b/src/3rdparty/chromium/build/android/pylib/utils/device_dependencies_test.py +@@ -16,7 +16,7 @@ class DevicePathComponentsForTest(unittest.TestCase): + test_path = os.path.join(constants.DIR_SOURCE_ROOT, 'foo', 'bar', 'baz.txt') + output_directory = os.path.join( + constants.DIR_SOURCE_ROOT, 'out-foo', 'Release') +- self.assertEquals( ++ self.assertEqual( + [None, 'foo', 'bar', 'baz.txt'], + device_dependencies.DevicePathComponentsFor( + test_path, output_directory)) +@@ -26,7 +26,7 @@ class DevicePathComponentsForTest(unittest.TestCase): + 'icudtl.dat') + output_directory = os.path.join( + constants.DIR_SOURCE_ROOT, 'out-foo', 'Release') +- self.assertEquals( ++ self.assertEqual( + [None, 'icudtl.dat'], + device_dependencies.DevicePathComponentsFor( + test_path, output_directory)) +@@ -36,7 +36,7 @@ class DevicePathComponentsForTest(unittest.TestCase): + 'test_dir', 'icudtl.dat') + output_directory = os.path.join( + constants.DIR_SOURCE_ROOT, 'out-foo', 'Release') +- self.assertEquals( ++ self.assertEqual( + [None, 'test_dir', 'icudtl.dat'], + device_dependencies.DevicePathComponentsFor( + test_path, output_directory)) +@@ -46,7 +46,7 @@ class DevicePathComponentsForTest(unittest.TestCase): + 'foo.pak') + output_directory = os.path.join( + constants.DIR_SOURCE_ROOT, 'out-foo', 'Release') +- self.assertEquals( ++ self.assertEqual( + [None, 'paks', 'foo.pak'], + device_dependencies.DevicePathComponentsFor( + test_path, output_directory)) +diff --git a/src/3rdparty/chromium/build/android/pylib/utils/dexdump.py b/src/3rdparty/chromium/build/android/pylib/utils/dexdump.py +index d0dfee45a..044ce33c5 100644 +--- a/src/3rdparty/chromium/build/android/pylib/utils/dexdump.py ++++ b/src/3rdparty/chromium/build/android/pylib/utils/dexdump.py +@@ -48,10 +48,10 @@ def Dump(apk_path): + # re-encode it (as etree expects a byte string as input so it can figure + # out the encoding itself from the XML declaration) + BAD_XML_CHARS = re.compile( +- u'[\x00-\x08\x0b-\x0c\x0e-\x1f\x7f-\x84\x86-\x9f' + +- u'\ud800-\udfff\ufdd0-\ufddf\ufffe-\uffff]') ++ '[\x00-\x08\x0b-\x0c\x0e-\x1f\x7f-\x84\x86-\x9f' + ++ '\ud800-\udfff\ufdd0-\ufddf\ufffe-\uffff]') + decoded_xml = output_xml.decode('utf-8', 'replace') +- clean_xml = BAD_XML_CHARS.sub(u'\ufffd', decoded_xml) ++ clean_xml = BAD_XML_CHARS.sub('\ufffd', decoded_xml) + parsed_dex_files.append( + _ParseRootNode(ElementTree.fromstring(clean_xml.encode('utf-8')))) + return parsed_dex_files +diff --git a/src/3rdparty/chromium/build/android/pylib/utils/dexdump_test.py b/src/3rdparty/chromium/build/android/pylib/utils/dexdump_test.py +index 6b2c4542f..36e70185b 100755 +--- a/src/3rdparty/chromium/build/android/pylib/utils/dexdump_test.py ++++ b/src/3rdparty/chromium/build/android/pylib/utils/dexdump_test.py +@@ -89,7 +89,7 @@ class DexdumpXMLParseTest(unittest.TestCase): + 'com.foo.bar2' : {'classes': {}}, + 'com.foo.bar3' : {'classes': {}}, + } +- self.assertEquals(expected, actual) ++ self.assertEqual(expected, actual) + + def testParsePackageNode(self): + example_xml_string = ( +@@ -116,7 +116,7 @@ class DexdumpXMLParseTest(unittest.TestCase): + }, + }, + } +- self.assertEquals(expected, actual) ++ self.assertEqual(expected, actual) + + def testParseClassNode(self): + example_xml_string = ( +@@ -134,7 +134,7 @@ class DexdumpXMLParseTest(unittest.TestCase): + 'methods': ['method1', 'method2'], + 'superclass': 'java.lang.Object', + } +- self.assertEquals(expected, actual) ++ self.assertEqual(expected, actual) + + + if __name__ == '__main__': +diff --git a/src/3rdparty/chromium/build/android/pylib/utils/google_storage_helper.py b/src/3rdparty/chromium/build/android/pylib/utils/google_storage_helper.py +index d18481051..c5bd5dc02 100644 +--- a/src/3rdparty/chromium/build/android/pylib/utils/google_storage_helper.py ++++ b/src/3rdparty/chromium/build/android/pylib/utils/google_storage_helper.py +@@ -13,7 +13,7 @@ import logging + import os + import sys + import time +-import urlparse ++import urllib.parse + + from pylib.constants import host_paths + from pylib.utils import decorators +@@ -67,7 +67,7 @@ def upload(name, filepath, bucket, gs_args=None, command_args=None, + def read_from_link(link): + # Note that urlparse returns the path with an initial '/', so we only need to + # add one more after the 'gs;' +- gs_path = 'gs:/%s' % urlparse.urlparse(link).path ++ gs_path = 'gs:/%s' % urllib.parse.urlparse(link).path + cmd = [_GSUTIL_PATH, '-q', 'cat', gs_path] + return cmd_helper.GetCmdOutput(cmd) + +diff --git a/src/3rdparty/chromium/build/android/pylib/utils/logging_utils.py b/src/3rdparty/chromium/build/android/pylib/utils/logging_utils.py +index 9c4eae3fc..a0b8fc048 100644 +--- a/src/3rdparty/chromium/build/android/pylib/utils/logging_utils.py ++++ b/src/3rdparty/chromium/build/android/pylib/utils/logging_utils.py +@@ -110,7 +110,7 @@ def OverrideColor(level, color): + try: + yield + finally: +- for formatter, prev_color in prev_colors.iteritems(): ++ for formatter, prev_color in prev_colors.items(): + formatter.color_map[level] = prev_color + + +diff --git a/src/3rdparty/chromium/build/android/pylib/utils/proguard_test.py b/src/3rdparty/chromium/build/android/pylib/utils/proguard_test.py +index 7672476e0..cb499b386 100755 +--- a/src/3rdparty/chromium/build/android/pylib/utils/proguard_test.py ++++ b/src/3rdparty/chromium/build/android/pylib/utils/proguard_test.py +@@ -26,7 +26,7 @@ class TestParse(unittest.TestCase): + } + ] + } +- self.assertEquals(expected, actual) ++ self.assertEqual(expected, actual) + + def testMethod(self): + actual = proguard.Parse( +@@ -48,7 +48,7 @@ class TestParse(unittest.TestCase): + } + ] + } +- self.assertEquals(expected, actual) ++ self.assertEqual(expected, actual) + + def testClassAnnotation(self): + actual = proguard.Parse( +@@ -77,7 +77,7 @@ class TestParse(unittest.TestCase): + } + ] + } +- self.assertEquals(expected, actual) ++ self.assertEqual(expected, actual) + + def testClassAnnotationWithArrays(self): + actual = proguard.Parse( +@@ -109,7 +109,7 @@ class TestParse(unittest.TestCase): + } + ] + } +- self.assertEquals(expected, actual) ++ self.assertEqual(expected, actual) + + def testNestedClassAnnotations(self): + actual = proguard.Parse( +@@ -157,7 +157,7 @@ class TestParse(unittest.TestCase): + } + ] + } +- self.assertEquals(expected, actual) ++ self.assertEqual(expected, actual) + + def testClassArraysOfAnnotations(self): + actual = proguard.Parse( +@@ -216,7 +216,7 @@ class TestParse(unittest.TestCase): + } + ] + } +- self.assertEquals(expected, actual) ++ self.assertEqual(expected, actual) + + def testReadFullClassFileAttributes(self): + actual = proguard.Parse( +@@ -248,7 +248,7 @@ class TestParse(unittest.TestCase): + } + ] + } +- self.assertEquals(expected, actual) ++ self.assertEqual(expected, actual) + + def testMethodAnnotation(self): + actual = proguard.Parse( +@@ -283,7 +283,7 @@ class TestParse(unittest.TestCase): + } + ] + } +- self.assertEquals(expected, actual) ++ self.assertEqual(expected, actual) + + def testMethodAnnotationWithArrays(self): + actual = proguard.Parse( +@@ -321,7 +321,7 @@ class TestParse(unittest.TestCase): + } + ] + } +- self.assertEquals(expected, actual) ++ self.assertEqual(expected, actual) + + def testMethodAnnotationWithPrimitivesAndArrays(self): + actual = proguard.Parse( +@@ -369,7 +369,7 @@ class TestParse(unittest.TestCase): + } + ] + } +- self.assertEquals(expected, actual) ++ self.assertEqual(expected, actual) + + def testNestedMethodAnnotations(self): + actual = proguard.Parse( +@@ -423,7 +423,7 @@ class TestParse(unittest.TestCase): + } + ] + } +- self.assertEquals(expected, actual) ++ self.assertEqual(expected, actual) + + def testMethodArraysOfAnnotations(self): + actual = proguard.Parse( +@@ -488,7 +488,7 @@ class TestParse(unittest.TestCase): + } + ] + } +- self.assertEquals(expected, actual) ++ self.assertEqual(expected, actual) + + + if __name__ == '__main__': +diff --git a/src/3rdparty/chromium/build/android/pylib/utils/shared_preference_utils.py b/src/3rdparty/chromium/build/android/pylib/utils/shared_preference_utils.py +index ae0d31b78..0fc0623ee 100644 +--- a/src/3rdparty/chromium/build/android/pylib/utils/shared_preference_utils.py ++++ b/src/3rdparty/chromium/build/android/pylib/utils/shared_preference_utils.py +@@ -20,10 +20,10 @@ def UnicodeToStr(data): + """ + if isinstance(data, dict): + return {UnicodeToStr(key): UnicodeToStr(value) +- for key, value in data.iteritems()} ++ for key, value in data.items()} + elif isinstance(data, list): + return [UnicodeToStr(element) for element in data] +- elif isinstance(data, unicode): ++ elif isinstance(data, str): + return data.encode('utf-8') + return data + +@@ -80,12 +80,12 @@ def ApplySharedPreferenceSetting(shared_pref, setting): + shared_pref.Remove(key) + except KeyError: + logging.warning("Attempted to remove non-existent key %s", key) +- for key, value in setting.get('set', {}).iteritems(): ++ for key, value in setting.get('set', {}).items(): + if isinstance(value, bool): + shared_pref.SetBoolean(key, value) +- elif isinstance(value, basestring): ++ elif isinstance(value, str): + shared_pref.SetString(key, value) +- elif isinstance(value, long) or isinstance(value, int): ++ elif isinstance(value, int) or isinstance(value, int): + shared_pref.SetLong(key, value) + elif isinstance(value, list): + shared_pref.SetStringSet(key, value) +diff --git a/src/3rdparty/chromium/build/android/pylib/utils/test_filter_test.py b/src/3rdparty/chromium/build/android/pylib/utils/test_filter_test.py +index 1ae5a7ebe..fcd1c9238 100755 +--- a/src/3rdparty/chromium/build/android/pylib/utils/test_filter_test.py ++++ b/src/3rdparty/chromium/build/android/pylib/utils/test_filter_test.py +@@ -22,7 +22,7 @@ class ParseFilterFileTest(unittest.TestCase): + ] + actual = test_filter.ParseFilterFile(input_lines) + expected = ['positive1', 'positive2', 'positive3'], [] +- self.assertEquals(expected, actual) ++ self.assertEqual(expected, actual) + + def testParseFilterFile_onlyPositive(self): + input_lines = [ +@@ -31,7 +31,7 @@ class ParseFilterFileTest(unittest.TestCase): + ] + actual = test_filter.ParseFilterFile(input_lines) + expected = ['positive1', 'positive2'], [] +- self.assertEquals(expected, actual) ++ self.assertEqual(expected, actual) + + def testParseFilterFile_onlyNegative(self): + input_lines = [ +@@ -40,7 +40,7 @@ class ParseFilterFileTest(unittest.TestCase): + ] + actual = test_filter.ParseFilterFile(input_lines) + expected = [], ['negative1', 'negative2'] +- self.assertEquals(expected, actual) ++ self.assertEqual(expected, actual) + + def testParseFilterFile_positiveAndNegative(self): + input_lines = [ +@@ -51,7 +51,7 @@ class ParseFilterFileTest(unittest.TestCase): + ] + actual = test_filter.ParseFilterFile(input_lines) + expected = ['positive1', 'positive2'], ['negative1', 'negative2'] +- self.assertEquals(expected, actual) ++ self.assertEqual(expected, actual) + + + class InitializeFilterFromArgsTest(unittest.TestCase): +@@ -64,7 +64,7 @@ class InitializeFilterFromArgsTest(unittest.TestCase): + 'FooTest.testFoo:BarTest.testBar']) + expected = 'FooTest.testFoo:BarTest.testBar' + actual = test_filter.InitializeFilterFromArgs(args) +- self.assertEquals(actual, expected) ++ self.assertEqual(actual, expected) + + def testInitializeJavaStyleFilter(self): + parser = argparse.ArgumentParser() +@@ -74,7 +74,7 @@ class InitializeFilterFromArgsTest(unittest.TestCase): + 'FooTest#testFoo:BarTest#testBar']) + expected = 'FooTest.testFoo:BarTest.testBar' + actual = test_filter.InitializeFilterFromArgs(args) +- self.assertEquals(actual, expected) ++ self.assertEqual(actual, expected) + + def testInitializeBasicIsolatedScript(self): + parser = argparse.ArgumentParser() +@@ -84,7 +84,7 @@ class InitializeFilterFromArgsTest(unittest.TestCase): + 'FooTest.testFoo::BarTest.testBar']) + expected = 'FooTest.testFoo:BarTest.testBar' + actual = test_filter.InitializeFilterFromArgs(args) +- self.assertEquals(actual, expected) ++ self.assertEqual(actual, expected) + + def testFilterArgWithPositiveFilterInFilterFile(self): + parser = argparse.ArgumentParser() +@@ -98,7 +98,7 @@ class InitializeFilterFromArgsTest(unittest.TestCase): + tmp_file.name]) + expected = 'positive1:positive2-negative1:negative2:negative3' + actual = test_filter.InitializeFilterFromArgs(args) +- self.assertEquals(actual, expected) ++ self.assertEqual(actual, expected) + + def testFilterFileWithPositiveFilterInFilterArg(self): + parser = argparse.ArgumentParser() +@@ -113,7 +113,7 @@ class InitializeFilterFromArgsTest(unittest.TestCase): + tmp_file.name]) + expected = 'positive1:positive2-negative1:negative2:negative3' + actual = test_filter.InitializeFilterFromArgs(args) +- self.assertEquals(actual, expected) ++ self.assertEqual(actual, expected) + + def testPositiveFilterInBothFileAndArg(self): + parser = argparse.ArgumentParser() +@@ -141,59 +141,59 @@ class InitializeFilterFromArgsTest(unittest.TestCase): + tmp_file.name]) + expected = '-negative1:negative2:negative3:negative4' + actual = test_filter.InitializeFilterFromArgs(args) +- self.assertEquals(actual, expected) ++ self.assertEqual(actual, expected) + + + class AppendPatternsToFilter(unittest.TestCase): + def testAllEmpty(self): + expected = '' + actual = test_filter.AppendPatternsToFilter('', [], []) +- self.assertEquals(actual, expected) ++ self.assertEqual(actual, expected) + def testAppendOnlyPositiveToEmptyFilter(self): + expected = 'positive' + actual = test_filter.AppendPatternsToFilter('', ['positive']) +- self.assertEquals(actual, expected) ++ self.assertEqual(actual, expected) + def testAppendOnlyNegativeToEmptyFilter(self): + expected = '-negative' + actual = test_filter.AppendPatternsToFilter('', + negative_patterns=['negative']) +- self.assertEquals(actual, expected) ++ self.assertEqual(actual, expected) + def testAppendToEmptyFilter(self): + expected = 'positive-negative' + actual = test_filter.AppendPatternsToFilter('', ['positive'], ['negative']) +- self.assertEquals(actual, expected) ++ self.assertEqual(actual, expected) + def testAppendToPositiveOnlyFilter(self): + expected = 'positive1:positive2-negative' + actual = test_filter.AppendPatternsToFilter('positive1', ['positive2'], + ['negative']) +- self.assertEquals(actual, expected) ++ self.assertEqual(actual, expected) + def testAppendToNegativeOnlyFilter(self): + expected = 'positive-negative1:negative2' + actual = test_filter.AppendPatternsToFilter('-negative1', ['positive'], + ['negative2']) +- self.assertEquals(actual, expected) ++ self.assertEqual(actual, expected) + def testAppendPositiveToFilter(self): + expected = 'positive1:positive2-negative1' + actual = test_filter.AppendPatternsToFilter('positive1-negative1', + ['positive2']) +- self.assertEquals(actual, expected) ++ self.assertEqual(actual, expected) + def testAppendNegativeToFilter(self): + expected = 'positive1-negative1:negative2' + actual = test_filter.AppendPatternsToFilter('positive1-negative1', + negative_patterns=['negative2']) +- self.assertEquals(actual, expected) ++ self.assertEqual(actual, expected) + def testAppendBothToFilter(self): + expected = 'positive1:positive2-negative1:negative2' + actual = test_filter.AppendPatternsToFilter('positive1-negative1', + positive_patterns=['positive2'], + negative_patterns=['negative2']) +- self.assertEquals(actual, expected) ++ self.assertEqual(actual, expected) + def testAppendMultipleToFilter(self): + expected = 'positive1:positive2:positive3-negative1:negative2:negative3' + actual = test_filter.AppendPatternsToFilter('positive1-negative1', + ['positive2', 'positive3'], + ['negative2', 'negative3']) +- self.assertEquals(actual, expected) ++ self.assertEqual(actual, expected) + def testRepeatedAppendToFilter(self): + expected = 'positive1:positive2:positive3-negative1:negative2:negative3' + filter_string = test_filter.AppendPatternsToFilter('positive1-negative1', +@@ -201,32 +201,32 @@ class AppendPatternsToFilter(unittest.TestCase): + ['negative2']) + actual = test_filter.AppendPatternsToFilter(filter_string, ['positive3'], + ['negative3']) +- self.assertEquals(actual, expected) ++ self.assertEqual(actual, expected) + def testAppendHashSeparatedPatternsToFilter(self): + expected = 'positive.test1:positive.test2-negative.test1:negative.test2' + actual = test_filter.AppendPatternsToFilter('positive#test1-negative#test1', + ['positive#test2'], + ['negative#test2']) +- self.assertEquals(actual, expected) ++ self.assertEqual(actual, expected) + + + class HasPositivePatterns(unittest.TestCase): + def testEmpty(self): + expected = False + actual = test_filter.HasPositivePatterns('') +- self.assertEquals(actual, expected) ++ self.assertEqual(actual, expected) + def testHasOnlyPositive(self): + expected = True + actual = test_filter.HasPositivePatterns('positive') +- self.assertEquals(actual, expected) ++ self.assertEqual(actual, expected) + def testHasOnlyNegative(self): + expected = False + actual = test_filter.HasPositivePatterns('-negative') +- self.assertEquals(actual, expected) ++ self.assertEqual(actual, expected) + def testHasBoth(self): + expected = True + actual = test_filter.HasPositivePatterns('positive-negative') +- self.assertEquals(actual, expected) ++ self.assertEqual(actual, expected) + + + if __name__ == '__main__': +diff --git a/src/3rdparty/chromium/build/android/pylib/valgrind_tools.py b/src/3rdparty/chromium/build/android/pylib/valgrind_tools.py +index 4689dc384..3e88e9a1d 100644 +--- a/src/3rdparty/chromium/build/android/pylib/valgrind_tools.py ++++ b/src/3rdparty/chromium/build/android/pylib/valgrind_tools.py +@@ -4,7 +4,7 @@ + + # pylint: disable=R0201 + +-from __future__ import print_function ++ + + import logging + import sys +diff --git a/src/3rdparty/chromium/build/android/resource_sizes.py b/src/3rdparty/chromium/build/android/resource_sizes.py +index 11d933640..50cccdfa6 100755 +--- a/src/3rdparty/chromium/build/android/resource_sizes.py ++++ b/src/3rdparty/chromium/build/android/resource_sizes.py +@@ -8,7 +8,7 @@ + More information at //docs/speed/binary_size/metrics.md. + """ + +-from __future__ import print_function ++ + + import argparse + import collections +@@ -140,17 +140,17 @@ def _ExtractLibSectionSizesFromApk(apk_path, lib_path, tool_prefix): + grouped_section_sizes = collections.defaultdict(int) + no_bits_section_sizes, section_sizes = _CreateSectionNameSizeMap( + extracted_lib_path, tool_prefix) +- for group_name, section_names in _READELF_SIZES_METRICS.iteritems(): ++ for group_name, section_names in _READELF_SIZES_METRICS.items(): + for section_name in section_names: + if section_name in section_sizes: + grouped_section_sizes[group_name] += section_sizes.pop(section_name) + + # Consider all NOBITS sections as .bss. + grouped_section_sizes['bss'] = sum( +- v for v in no_bits_section_sizes.itervalues()) ++ v for v in no_bits_section_sizes.values()) + + # Group any unknown section headers into the "other" group. +- for section_header, section_size in section_sizes.iteritems(): ++ for section_header, section_size in section_sizes.items(): + sys.stderr.write('Unknown elf section header: %s\n' % section_header) + grouped_section_sizes['other'] += section_size + +@@ -222,7 +222,7 @@ def _NormalizeResourcesArsc(apk_path, num_arsc_files, num_translations, + config_count = num_translations - 2 + + size = 0 +- for res_id, string_val in en_strings.iteritems(): ++ for res_id, string_val in en_strings.items(): + if string_val == fr_strings[res_id]: + string_size = len(string_val) + # 7 bytes is the per-entry overhead (not specific to any string). See +@@ -263,7 +263,7 @@ def _ReportDfmSizes(zip_obj, report_func): + if module_name != 'base' and config_name[:-4] in ('master', 'hi'): + sizes[module_name] += info.file_size + +- for module_name, size in sorted(sizes.iteritems()): ++ for module_name, size in sorted(sizes.items()): + report_func('DFM_' + module_name, 'Size with hindi', size, 'bytes') + + +@@ -489,7 +489,7 @@ def _DoApkAnalysis(apk_filename, apks_path, tool_prefix, out_dir, report_func): + section_sizes = _ExtractLibSectionSizesFromApk( + apk_filename, lib_info.filename, tool_prefix) + native_code_unaligned_size += sum( +- v for k, v in section_sizes.iteritems() if k != 'bss') ++ v for k, v in section_sizes.items() if k != 'bss') + # Size of main .so vs remaining. + if lib_info == main_lib_info: + main_lib_size = lib_info.file_size +@@ -497,7 +497,7 @@ def _DoApkAnalysis(apk_filename, apks_path, tool_prefix, out_dir, report_func): + secondary_size = native_code.ComputeUncompressedSize() - main_lib_size + report_func('Specifics', 'other lib size', secondary_size, 'bytes') + +- for metric_name, size in section_sizes.iteritems(): ++ for metric_name, size in section_sizes.items(): + report_func('MainLibInfo', metric_name, size, 'bytes') + + # Main metric that we want to monitor for jumps. +@@ -598,10 +598,10 @@ def _DoDexAnalysis(apk_filename, report_func): + sizes, total_size, num_unique_methods = method_count.ExtractSizesFromZip( + apk_filename) + cumulative_sizes = collections.defaultdict(int) +- for classes_dex_sizes in sizes.itervalues(): +- for count_type, count in classes_dex_sizes.iteritems(): ++ for classes_dex_sizes in sizes.values(): ++ for count_type, count in classes_dex_sizes.items(): + cumulative_sizes[count_type] += count +- for count_type, count in cumulative_sizes.iteritems(): ++ for count_type, count in cumulative_sizes.items(): + report_func('Dex', count_type, count, 'entries') + + report_func('Dex', 'unique methods', num_unique_methods, 'entries') +@@ -675,7 +675,7 @@ class _Reporter(object): + value, units) + + def SynthesizeTotals(self): +- for tup, value in sorted(self._combined_metrics.iteritems()): ++ for tup, value in sorted(self._combined_metrics.items()): + graph_title, trace_title, units = tup + perf_tests_results_helper.ReportPerfResult( + self._chartjson, graph_title, 'Combined_' + trace_title, value, units) +diff --git a/src/3rdparty/chromium/build/android/stacktrace/stackwalker.py b/src/3rdparty/chromium/build/android/stacktrace/stackwalker.py +index 5fbab3392..983a6c33c 100755 +--- a/src/3rdparty/chromium/build/android/stacktrace/stackwalker.py ++++ b/src/3rdparty/chromium/build/android/stacktrace/stackwalker.py +@@ -4,7 +4,7 @@ + # Use of this source code is governed by a BSD-style license that can be + # found in the LICENSE file. + +-from __future__ import print_function ++ + + import argparse + import os +diff --git a/src/3rdparty/chromium/build/android/test_runner.py b/src/3rdparty/chromium/build/android/test_runner.py +index d15488194..3c9b96769 100755 +--- a/src/3rdparty/chromium/build/android/test_runner.py ++++ b/src/3rdparty/chromium/build/android/test_runner.py +@@ -694,7 +694,7 @@ def AddMonkeyTestOptions(parser): + + parser.add_argument( + '--browser', +- required=True, choices=constants.PACKAGE_INFO.keys(), ++ required=True, choices=list(constants.PACKAGE_INFO.keys()), + metavar='BROWSER', help='Browser under test.') + parser.add_argument( + '--category', +@@ -723,7 +723,7 @@ def AddPythonTestOptions(parser): + parser.add_argument( + '-s', '--suite', + dest='suite_name', metavar='SUITE_NAME', +- choices=constants.PYTHON_UNIT_TEST_SUITES.keys(), ++ choices=list(constants.PYTHON_UNIT_TEST_SUITES.keys()), + help='Name of the test suite to run.') + + +@@ -907,7 +907,7 @@ def RunTestsInPlatformMode(args, result_sink_client=None): + with out_manager, json_finalizer(): + with json_writer(), logcats_uploader, env, test_instance, test_run: + +- repetitions = (xrange(args.repeat + 1) if args.repeat >= 0 ++ repetitions = (range(args.repeat + 1) if args.repeat >= 0 + else itertools.count()) + result_counts = collections.defaultdict( + lambda: collections.defaultdict(int)) +diff --git a/src/3rdparty/chromium/build/android/update_verification.py b/src/3rdparty/chromium/build/android/update_verification.py +index a6529cdb3..c81ef7976 100755 +--- a/src/3rdparty/chromium/build/android/update_verification.py ++++ b/src/3rdparty/chromium/build/android/update_verification.py +@@ -38,7 +38,7 @@ from devil.utils import run_tests_helper + + def CreateAppData(device, old_apk, app_data, package_name): + device.Install(old_apk) +- raw_input('Set the application state. Once ready, press enter and ' ++ input('Set the application state. Once ready, press enter and ' + 'select "Backup my data" on the device.') + device.adb.Backup(app_data, packages=[package_name]) + logging.critical('Application data saved to %s', app_data) +@@ -47,7 +47,7 @@ def TestUpdate(device, old_apk, new_apk, app_data, package_name): + device.Install(old_apk) + device.adb.Restore(app_data) + # Restore command is not synchronous +- raw_input('Select "Restore my data" on the device. Then press enter to ' ++ input('Select "Restore my data" on the device. Then press enter to ' + 'continue.') + if not device.IsApplicationInstalled(package_name): + raise Exception('Expected package %s to already be installed. ' +diff --git a/src/3rdparty/chromium/build/apple/tweak_info_plist.py b/src/3rdparty/chromium/build/apple/tweak_info_plist.py +index 6dde0c482..b98c3c313 100755 +--- a/src/3rdparty/chromium/build/apple/tweak_info_plist.py ++++ b/src/3rdparty/chromium/build/apple/tweak_info_plist.py +@@ -20,7 +20,7 @@ + # by the time the app target is done, the info.plist is correct. + # + +-from __future__ import print_function ++ + + import optparse + import os +@@ -115,7 +115,7 @@ def _AddVersionKeys(plist, version_format_for_key, version=None, + if len(groups) != 4 or not all(element.isdigit() for element in groups): + print('Invalid version string specified: "%s"' % version, file=sys.stderr) + return False +- values = dict(zip(('MAJOR', 'MINOR', 'BUILD', 'PATCH'), groups)) ++ values = dict(list(zip(('MAJOR', 'MINOR', 'BUILD', 'PATCH'), groups))) + + for key in version_format_for_key: + plist[key] = _GetVersion(version_format_for_key[key], values, overrides) +diff --git a/src/3rdparty/chromium/build/apply_locales.py b/src/3rdparty/chromium/build/apply_locales.py +index fcd02eb03..c88cb6932 100755 +--- a/src/3rdparty/chromium/build/apply_locales.py ++++ b/src/3rdparty/chromium/build/apply_locales.py +@@ -5,7 +5,7 @@ + + # TODO: remove this script when GYP has for loops + +-from __future__ import print_function ++ + + import sys + import optparse +diff --git a/src/3rdparty/chromium/build/check_gn_headers.py b/src/3rdparty/chromium/build/check_gn_headers.py +index 9bdbba895..1d5986201 100755 +--- a/src/3rdparty/chromium/build/check_gn_headers.py ++++ b/src/3rdparty/chromium/build/check_gn_headers.py +@@ -9,7 +9,7 @@ This script gets all the header files from ninja_deps, which is from the true + dependency generated by the compiler, and report if they don't exist in GN. + """ + +-from __future__ import print_function ++ + + import argparse + import json +@@ -112,7 +112,7 @@ def ParseGNProjectJSON(gn, out_dir, tmp_out): + """Parse GN output and get the header files""" + all_headers = set() + +- for _target, properties in gn['targets'].iteritems(): ++ for _target, properties in gn['targets'].items(): + sources = properties.get('sources', []) + public = properties.get('public', []) + # Exclude '"public": "*"'. +@@ -294,7 +294,7 @@ def main(): + print(' ', cc) + + print('\nMissing headers sorted by number of affected object files:') +- count = {k: len(v) for (k, v) in d.iteritems()} ++ count = {k: len(v) for (k, v) in d.items()} + for f in sorted(count, key=count.get, reverse=True): + if f in missing: + print(count[f], f) +diff --git a/src/3rdparty/chromium/build/check_gn_headers_unittest.py b/src/3rdparty/chromium/build/check_gn_headers_unittest.py +index 20c3b1389..01b00b2fa 100755 +--- a/src/3rdparty/chromium/build/check_gn_headers_unittest.py ++++ b/src/3rdparty/chromium/build/check_gn_headers_unittest.py +@@ -71,7 +71,7 @@ class CheckGnHeadersTest(unittest.TestCase): + 'dir3/path/b.h': ['obj/c.o'], + 'c3.hh': ['obj/c.o'], + } +- self.assertEquals(headers, expected) ++ self.assertEqual(headers, expected) + + def testGn(self): + headers = check_gn_headers.ParseGNProjectJSON(gn_input, +@@ -83,7 +83,7 @@ class CheckGnHeadersTest(unittest.TestCase): + 'base/p.h', + 'out/Release/gen/a.h', + ]) +- self.assertEquals(headers, expected) ++ self.assertEqual(headers, expected) + + def testWhitelist(self): + output = check_gn_headers.ParseWhiteList(whitelist) +@@ -93,7 +93,7 @@ class CheckGnHeadersTest(unittest.TestCase): + 'dir/white-both.c', + 'a/b/c', + ]) +- self.assertEquals(output, expected) ++ self.assertEqual(output, expected) + + + if __name__ == '__main__': +diff --git a/src/3rdparty/chromium/build/check_return_value.py b/src/3rdparty/chromium/build/check_return_value.py +index 9caa15f11..0550b082d 100755 +--- a/src/3rdparty/chromium/build/check_return_value.py ++++ b/src/3rdparty/chromium/build/check_return_value.py +@@ -6,7 +6,7 @@ + """This program wraps an arbitrary command and prints "1" if the command ran + successfully.""" + +-from __future__ import print_function ++ + + import os + import subprocess +diff --git a/src/3rdparty/chromium/build/chromeos/create_test_runner_script.py b/src/3rdparty/chromium/build/chromeos/create_test_runner_script.py +index 60b7e67ca..17ba0c9f6 100755 +--- a/src/3rdparty/chromium/build/chromeos/create_test_runner_script.py ++++ b/src/3rdparty/chromium/build/chromeos/create_test_runner_script.py +@@ -114,7 +114,7 @@ def main(args): + vm_test_args=str(vm_test_args), + vm_test_path_args=str(vm_test_path_args))) + +- os.chmod(args.script_output_path, 0750) ++ os.chmod(args.script_output_path, 0o750) + + + if __name__ == '__main__': +diff --git a/src/3rdparty/chromium/build/chromeos/test_runner.py b/src/3rdparty/chromium/build/chromeos/test_runner.py +index f7e791423..d0df3369b 100755 +--- a/src/3rdparty/chromium/build/chromeos/test_runner.py ++++ b/src/3rdparty/chromium/build/chromeos/test_runner.py +@@ -157,7 +157,7 @@ class RemoteTest(object): + logging.info('Running the following command on the device:') + logging.info('\n' + '\n'.join(script_contents)) + fd, tmp_path = tempfile.mkstemp(suffix='.sh', dir=self._path_to_outdir) +- os.fchmod(fd, 0755) ++ os.fchmod(fd, 0o755) + with os.fdopen(fd, 'wb') as f: + f.write('\n'.join(script_contents) + '\n') + return tmp_path +@@ -182,7 +182,7 @@ class RemoteTest(object): + + signal.signal(signal.SIGTERM, _kill_child_procs) + +- for i in xrange(self._retries + 1): ++ for i in range(self._retries + 1): + logging.info('########################################') + logging.info('Test attempt #%d', i) + logging.info('########################################') +diff --git a/src/3rdparty/chromium/build/compute_build_timestamp.py b/src/3rdparty/chromium/build/compute_build_timestamp.py +index ceb507b26..70256527b 100755 +--- a/src/3rdparty/chromium/build/compute_build_timestamp.py ++++ b/src/3rdparty/chromium/build/compute_build_timestamp.py +@@ -27,7 +27,7 @@ Either way, it is guaranteed to be in the past and always in UTC. + # the symbol server, so rarely changing timestamps can cause conflicts there + # as well. We only upload symbols for official builds to the symbol server. + +-from __future__ import print_function ++ + + import argparse + import calendar +diff --git a/src/3rdparty/chromium/build/config/get_host_byteorder.py b/src/3rdparty/chromium/build/config/get_host_byteorder.py +index fc01d8571..78e70c041 100755 +--- a/src/3rdparty/chromium/build/config/get_host_byteorder.py ++++ b/src/3rdparty/chromium/build/config/get_host_byteorder.py +@@ -5,7 +5,7 @@ + + """Get Byteorder of host architecture""" + +-from __future__ import print_function ++ + + import sys + +diff --git a/src/3rdparty/chromium/build/config/ios/codesign.py b/src/3rdparty/chromium/build/config/ios/codesign.py +index 1c6e3cea2..f73455c5a 100644 +--- a/src/3rdparty/chromium/build/config/ios/codesign.py ++++ b/src/3rdparty/chromium/build/config/ios/codesign.py +@@ -2,7 +2,7 @@ + # Use of this source code is governed by a BSD-style license that can be + # found in the LICENSE file. + +-from __future__ import print_function ++ + + import argparse + import codecs +@@ -18,7 +18,7 @@ import sys + import tempfile + + if sys.version_info.major < 3: +- basestring_compat = basestring ++ basestring_compat = str + else: + basestring_compat = str + +@@ -97,7 +97,7 @@ class Bundle(object): + error message. The dictionary will be empty if there are no errors. + """ + errors = {} +- for key, expected_value in expected_mappings.items(): ++ for key, expected_value in list(expected_mappings.items()): + if key in self._data: + value = self._data[key] + if value != expected_value: +@@ -181,12 +181,12 @@ class Entitlements(object): + + def _ExpandVariables(self, data, substitutions): + if isinstance(data, basestring_compat): +- for key, substitution in substitutions.items(): ++ for key, substitution in list(substitutions.items()): + data = data.replace('$(%s)' % (key,), substitution) + return data + + if isinstance(data, dict): +- for key, value in data.items(): ++ for key, value in list(data.items()): + data[key] = self._ExpandVariables(value, substitutions) + return data + +@@ -197,7 +197,7 @@ class Entitlements(object): + return data + + def LoadDefaults(self, defaults): +- for key, value in defaults.items(): ++ for key, value in list(defaults.items()): + if key not in self._data: + self._data[key] = value + +diff --git a/src/3rdparty/chromium/build/config/ios/compile_ib_files.py b/src/3rdparty/chromium/build/config/ios/compile_ib_files.py +index 84781c177..00bd51599 100644 +--- a/src/3rdparty/chromium/build/config/ios/compile_ib_files.py ++++ b/src/3rdparty/chromium/build/config/ios/compile_ib_files.py +@@ -2,7 +2,7 @@ + # Use of this source code is governed by a BSD-style license that can be + # found in the LICENSE file. + +-from __future__ import print_function ++ + + import argparse + import logging +diff --git a/src/3rdparty/chromium/build/config/ios/compile_xcassets_unittests.py b/src/3rdparty/chromium/build/config/ios/compile_xcassets_unittests.py +index 7655df8c0..b3a08bec5 100644 +--- a/src/3rdparty/chromium/build/config/ios/compile_xcassets_unittests.py ++++ b/src/3rdparty/chromium/build/config/ios/compile_xcassets_unittests.py +@@ -16,7 +16,7 @@ class TestFilterCompilerOutput(unittest.TestCase): + } + + def testNoError(self): +- self.assertEquals( ++ self.assertEqual( + '', + compile_xcassets.FilterCompilerOutput( + '/* com.apple.actool.compilation-results */\n' +@@ -24,7 +24,7 @@ class TestFilterCompilerOutput(unittest.TestCase): + self.relative_paths)) + + def testNoErrorRandomMessages(self): +- self.assertEquals( ++ self.assertEqual( + '', + compile_xcassets.FilterCompilerOutput( + '2017-07-04 04:59:19.460 ibtoold[23487:41214] CoreSimulator is att' +@@ -37,7 +37,7 @@ class TestFilterCompilerOutput(unittest.TestCase): + self.relative_paths)) + + def testWarning(self): +- self.assertEquals( ++ self.assertEqual( + '/* com.apple.actool.document.warnings */\n' + '../../Chromium.xcassets:./image1.imageset/[universal][][][1x][][][][' + '][][]: warning: The file "image1.png" for the image set "image1"' +@@ -52,7 +52,7 @@ class TestFilterCompilerOutput(unittest.TestCase): + self.relative_paths)) + + def testError(self): +- self.assertEquals( ++ self.assertEqual( + '/* com.apple.actool.errors */\n' + '../../Chromium.xcassets: error: The output directory "/Users/janedoe/' + 'chromium/src/out/Default/Chromium.app" does not exist.\n', +@@ -65,7 +65,7 @@ class TestFilterCompilerOutput(unittest.TestCase): + self.relative_paths)) + + def testSpurious(self): +- self.assertEquals( ++ self.assertEqual( + '/* com.apple.actool.document.warnings */\n' + '../../Chromium.xcassets:./AppIcon.appiconset: warning: A 1024x1024 ap' + 'p store icon is required for iOS apps\n', +@@ -80,7 +80,7 @@ class TestFilterCompilerOutput(unittest.TestCase): + self.relative_paths)) + + def testComplexError(self): +- self.assertEquals( ++ self.assertEqual( + '/* com.apple.actool.errors */\n' + ': error: Failed to find a suitable device for the type SimDeviceType ' + ': com.apple.dt.Xcode.IBSimDeviceType.iPad-2x with runtime SimRunt' +diff --git a/src/3rdparty/chromium/build/config/ios/find_signing_identity.py b/src/3rdparty/chromium/build/config/ios/find_signing_identity.py +index 8116e688c..bd9892de2 100644 +--- a/src/3rdparty/chromium/build/config/ios/find_signing_identity.py ++++ b/src/3rdparty/chromium/build/config/ios/find_signing_identity.py +@@ -2,7 +2,7 @@ + # Use of this source code is governed by a BSD-style license that can be + # found in the LICENSE file. + +-from __future__ import print_function ++ + + import argparse + import os +diff --git a/src/3rdparty/chromium/build/config/ios/write_framework_hmap.py b/src/3rdparty/chromium/build/config/ios/write_framework_hmap.py +index ac467ee92..25dc0907f 100644 +--- a/src/3rdparty/chromium/build/config/ios/write_framework_hmap.py ++++ b/src/3rdparty/chromium/build/config/ios/write_framework_hmap.py +@@ -2,7 +2,7 @@ + # Use of this source code is governed by a BSD-style license that can be + # found in the LICENSE file. + +-from __future__ import print_function ++ + + import os + import struct +@@ -18,7 +18,7 @@ def Main(args): + (out, framework, all_headers) = args[1], args[2], args[3:] + + framework_name = os.path.basename(framework).split('.')[0] +- all_headers = map(os.path.abspath, all_headers) ++ all_headers = list(map(os.path.abspath, all_headers)) + filelist = {} + for header in all_headers: + filename = os.path.basename(header) +@@ -50,7 +50,7 @@ def WriteHmap(output_name, filelist): + count = len(filelist) + capacity = NextGreaterPowerOf2(count) + strings_offset = 24 + (12 * capacity) +- max_value_length = len(max(filelist.values(), key=lambda v: len(v))) ++ max_value_length = len(max(list(filelist.values()), key=lambda v: len(v))) + + out = open(output_name, 'wb') + out.write(struct.pack('= timeout: +diff --git a/src/3rdparty/chromium/build/fuchsia/boot_data.py b/src/3rdparty/chromium/build/fuchsia/boot_data.py +index e8b1e9dd0..1b23bcc7c 100644 +--- a/src/3rdparty/chromium/build/fuchsia/boot_data.py ++++ b/src/3rdparty/chromium/build/fuchsia/boot_data.py +@@ -4,7 +4,7 @@ + + """Functions used to provision Fuchsia boot images.""" + +-import common ++from . import common + import logging + import os + import subprocess +diff --git a/src/3rdparty/chromium/build/fuchsia/common_args.py b/src/3rdparty/chromium/build/fuchsia/common_args.py +index 6b3916ccb..595d2c63b 100644 +--- a/src/3rdparty/chromium/build/fuchsia/common_args.py ++++ b/src/3rdparty/chromium/build/fuchsia/common_args.py +@@ -8,7 +8,7 @@ import logging + import os + import sys + +-from common import GetHostArchFromPlatform ++from .common import GetHostArchFromPlatform + + + def _AddTargetSpecificationArgs(arg_parser): +diff --git a/src/3rdparty/chromium/build/fuchsia/deploy_to_amber_repo.py b/src/3rdparty/chromium/build/fuchsia/deploy_to_amber_repo.py +index 02dcf25f3..34fbca9d8 100755 +--- a/src/3rdparty/chromium/build/fuchsia/deploy_to_amber_repo.py ++++ b/src/3rdparty/chromium/build/fuchsia/deploy_to_amber_repo.py +@@ -7,7 +7,7 @@ + """Deploys Fuchsia packages to an Amber repository in a Fuchsia + build output directory.""" + +-import amber_repo ++from . import amber_repo + import argparse + import os + import sys +diff --git a/src/3rdparty/chromium/build/fuchsia/device_target.py b/src/3rdparty/chromium/build/fuchsia/device_target.py +index 135770cd7..c3c8189cd 100644 +--- a/src/3rdparty/chromium/build/fuchsia/device_target.py ++++ b/src/3rdparty/chromium/build/fuchsia/device_target.py +@@ -4,22 +4,22 @@ + + """Implements commands for running and interacting with Fuchsia on devices.""" + +-from __future__ import print_function + +-import amber_repo +-import boot_data ++ ++from . import amber_repo ++from . import boot_data + import filecmp + import logging + import os + import re + import subprocess + import sys +-import target ++from . import target + import tempfile + import time + import uuid + +-from common import SDK_ROOT, EnsurePathExists, GetHostToolPathFromPlatform ++from .common import SDK_ROOT, EnsurePathExists, GetHostToolPathFromPlatform + + # The maximum times to attempt mDNS resolution when connecting to a freshly + # booted Fuchsia instance before aborting. +@@ -258,7 +258,7 @@ class DeviceTarget(target.Target): + # Repeatdly query mDNS until we find the device, or we hit the timeout of + # DISCOVERY_TIMEOUT_SECS. + logging.info('Waiting for device to join network.') +- for _ in xrange(_BOOT_DISCOVERY_ATTEMPTS): ++ for _ in range(_BOOT_DISCOVERY_ATTEMPTS): + if self.__Discover(): + break + +diff --git a/src/3rdparty/chromium/build/fuchsia/emu_target.py b/src/3rdparty/chromium/build/fuchsia/emu_target.py +index 440f1ab79..5e3cdc4b4 100644 +--- a/src/3rdparty/chromium/build/fuchsia/emu_target.py ++++ b/src/3rdparty/chromium/build/fuchsia/emu_target.py +@@ -4,14 +4,14 @@ + + """Implements commands for running/interacting with Fuchsia on an emulator.""" + +-import amber_repo +-import boot_data ++from . import amber_repo ++from . import boot_data + import logging + import os +-import runner_logs ++from . import runner_logs + import subprocess + import sys +-import target ++from . import target + import tempfile + + +diff --git a/src/3rdparty/chromium/build/fuchsia/generic_x64_target.py b/src/3rdparty/chromium/build/fuchsia/generic_x64_target.py +index 172d20365..3cc8b9763 100644 +--- a/src/3rdparty/chromium/build/fuchsia/generic_x64_target.py ++++ b/src/3rdparty/chromium/build/fuchsia/generic_x64_target.py +@@ -4,12 +4,12 @@ + """Implements commands for running and interacting with Fuchsia generic + build on devices.""" + +-import boot_data +-import device_target ++from . import boot_data ++from . import device_target + import logging + import os + +-from common import SDK_ROOT, EnsurePathExists, \ ++from .common import SDK_ROOT, EnsurePathExists, \ + GetHostToolPathFromPlatform, SubprocessCallWithTimeout + + +@@ -83,7 +83,7 @@ class GenericX64PavedDeviceTarget(device_target.DeviceTarget): + # Repeatdly query mDNS until we find the device, or we hit + # BOOT_DISCOVERY_ATTEMPTS + logging.info('Waiting for device to join network.') +- for _ in xrange(device_target.BOOT_DISCOVERY_ATTEMPTS): ++ for _ in range(device_target.BOOT_DISCOVERY_ATTEMPTS): + if self.__Discover(): + break + +diff --git a/src/3rdparty/chromium/build/fuchsia/net_test_server.py b/src/3rdparty/chromium/build/fuchsia/net_test_server.py +index 60ad78f60..9823e8190 100644 +--- a/src/3rdparty/chromium/build/fuchsia/net_test_server.py ++++ b/src/3rdparty/chromium/build/fuchsia/net_test_server.py +@@ -2,7 +2,7 @@ + # Use of this source code is governed by a BSD-style license that can be + # found in the LICENSE file. + +-import common ++from . import common + import json + import logging + import os +@@ -37,7 +37,7 @@ class SSHPortForwarder(chrome_test_server_spawner.PortForwarder): + return self._port_mapping[host_port] + + def Unmap(self, device_port): +- for host_port, entry in self._port_mapping.iteritems(): ++ for host_port, entry in self._port_mapping.items(): + if entry == device_port: + forwarding_args = [ + '-NT', '-O', 'cancel', '-R', '0:localhost:%d' % host_port] +diff --git a/src/3rdparty/chromium/build/fuchsia/qemu_target.py b/src/3rdparty/chromium/build/fuchsia/qemu_target.py +index 6c858233a..b31ca6c31 100644 +--- a/src/3rdparty/chromium/build/fuchsia/qemu_target.py ++++ b/src/3rdparty/chromium/build/fuchsia/qemu_target.py +@@ -4,23 +4,23 @@ + + """Implements commands for running and interacting with Fuchsia on QEMU.""" + +-import boot_data +-import common +-import emu_target ++from . import boot_data ++from . import common ++from . import emu_target + import logging + import md5 + import os + import platform +-import qemu_image ++from . import qemu_image + import shutil + import subprocess + import sys + import tempfile + +-from common import GetHostArchFromPlatform, GetEmuRootForPlatform +-from common import EnsurePathExists +-from qemu_image import ExecQemuImgWithRetry +-from target import FuchsiaTargetException ++from .common import GetHostArchFromPlatform, GetEmuRootForPlatform ++from .common import EnsurePathExists ++from .qemu_image import ExecQemuImgWithRetry ++from .target import FuchsiaTargetException + + + # Virtual networking configuration data for QEMU. +diff --git a/src/3rdparty/chromium/build/fuchsia/qemu_target_test.py b/src/3rdparty/chromium/build/fuchsia/qemu_target_test.py +index da596ee5b..3f6a95887 100755 +--- a/src/3rdparty/chromium/build/fuchsia/qemu_target_test.py ++++ b/src/3rdparty/chromium/build/fuchsia/qemu_target_test.py +@@ -3,7 +3,7 @@ + # Use of this source code is governed by a BSD-style license that can be + # found in the LICENSE file. + +-import qemu_target ++from . import qemu_target + import shutil + import subprocess + import tempfile +diff --git a/src/3rdparty/chromium/build/fuchsia/remote_cmd.py b/src/3rdparty/chromium/build/fuchsia/remote_cmd.py +index 56aa8b172..ef9875bf5 100644 +--- a/src/3rdparty/chromium/build/fuchsia/remote_cmd.py ++++ b/src/3rdparty/chromium/build/fuchsia/remote_cmd.py +@@ -7,7 +7,7 @@ import os + import subprocess + import threading + +-from common import SubprocessCallWithTimeout ++from .common import SubprocessCallWithTimeout + + _SSH = ['ssh'] + _SCP = ['scp', '-C'] # Use gzip compression. +diff --git a/src/3rdparty/chromium/build/fuchsia/run_package.py b/src/3rdparty/chromium/build/fuchsia/run_package.py +index ed2cca3bf..cfa302bd3 100644 +--- a/src/3rdparty/chromium/build/fuchsia/run_package.py ++++ b/src/3rdparty/chromium/build/fuchsia/run_package.py +@@ -5,9 +5,9 @@ + """Contains a helper function for deploying and executing a packaged + executable on a Target.""" + +-from __future__ import print_function + +-import common ++ ++from . import common + import hashlib + import logging + import multiprocessing +@@ -19,7 +19,7 @@ import sys + import threading + import uuid + +-from symbolizer import BuildIdsPaths, RunSymbolizer, SymbolizerFilter ++from .symbolizer import BuildIdsPaths, RunSymbolizer, SymbolizerFilter + + FAR = common.GetHostToolPathFromPlatform('far') + +diff --git a/src/3rdparty/chromium/build/fuchsia/runner_exceptions.py b/src/3rdparty/chromium/build/fuchsia/runner_exceptions.py +index 03f872e45..59841f6e0 100644 +--- a/src/3rdparty/chromium/build/fuchsia/runner_exceptions.py ++++ b/src/3rdparty/chromium/build/fuchsia/runner_exceptions.py +@@ -15,7 +15,7 @@ import subprocess + import sys + import traceback + +-from target import FuchsiaTargetException ++from .target import FuchsiaTargetException + + def _PrintException(value, trace): + """Prints stack trace and error message for the current exception.""" +diff --git a/src/3rdparty/chromium/build/fuchsia/runner_logs.py b/src/3rdparty/chromium/build/fuchsia/runner_logs.py +index 20ab6b227..4b94ae8a2 100644 +--- a/src/3rdparty/chromium/build/fuchsia/runner_logs.py ++++ b/src/3rdparty/chromium/build/fuchsia/runner_logs.py +@@ -12,7 +12,7 @@ import collections + import multiprocessing + import os + +-from symbolizer import RunSymbolizer ++from .symbolizer import RunSymbolizer + + SYMBOLIZED_SUFFIX = '.symbolized' + +diff --git a/src/3rdparty/chromium/build/fuchsia/symbolizer.py b/src/3rdparty/chromium/build/fuchsia/symbolizer.py +index e9732c666..831cb59a1 100644 +--- a/src/3rdparty/chromium/build/fuchsia/symbolizer.py ++++ b/src/3rdparty/chromium/build/fuchsia/symbolizer.py +@@ -6,9 +6,9 @@ import logging + import os + import subprocess + +-from common import SDK_ROOT +-from common import GetHostArchFromPlatform +-from common import GetHostToolPathFromPlatform ++from .common import SDK_ROOT ++from .common import GetHostArchFromPlatform ++from .common import GetHostToolPathFromPlatform + + # TODO(crbug.com/1131647): Change 'llvm-3.8' to 'llvm' after docker image is + # updated. +@@ -17,10 +17,8 @@ ARM64_DOCKER_LLVM_SYMBOLIZER_PATH = os.path.join('/', 'usr', 'lib', 'llvm-3.8', + + def BuildIdsPaths(package_paths): + """Generate build ids paths for symbolizer processes.""" +- build_ids_paths = map( +- lambda package_path: os.path.join( +- os.path.dirname(package_path), 'ids.txt'), +- package_paths) ++ build_ids_paths = [os.path.join( ++ os.path.dirname(package_path), 'ids.txt') for package_path in package_paths] + return build_ids_paths + + +diff --git a/src/3rdparty/chromium/build/fuchsia/target.py b/src/3rdparty/chromium/build/fuchsia/target.py +index ce89e8156..58738a5b7 100644 +--- a/src/3rdparty/chromium/build/fuchsia/target.py ++++ b/src/3rdparty/chromium/build/fuchsia/target.py +@@ -2,12 +2,12 @@ + # Use of this source code is governed by a BSD-style license that can be + # found in the LICENSE file. + +-import common ++from . import common + import json + import logging + import os +-import remote_cmd +-import runner_logs ++from . import remote_cmd ++from . import runner_logs + import subprocess + import time + +@@ -203,7 +203,7 @@ class Target(object): + assert type(sources) is tuple or type(sources) is list + self._AssertIsStarted() + if for_package: +- sources = map(_MapIsolatedPathsForPackage(for_package, 0), sources) ++ sources = list(map(_MapIsolatedPathsForPackage(for_package, 0), sources)) + logging.debug('copy remote:%s => local:%s' % (sources, dest)) + return self.GetCommandRunner().RunScp(sources, dest, + remote_cmd.COPY_FROM_TARGET) +diff --git a/src/3rdparty/chromium/build/fuchsia/test_runner.py b/src/3rdparty/chromium/build/fuchsia/test_runner.py +index b47483dad..7263ae9f2 100755 +--- a/src/3rdparty/chromium/build/fuchsia/test_runner.py ++++ b/src/3rdparty/chromium/build/fuchsia/test_runner.py +@@ -8,15 +8,15 @@ + + import argparse + import os +-import runner_logs ++from . import runner_logs + import sys + +-from common_args import AddCommonArgs, ConfigureLogging, GetDeploymentTargetForArgs +-from net_test_server import SetupTestServer +-from run_package import RunPackage, RunPackageArgs, SystemLogReader +-from runner_exceptions import HandleExceptionAndReturnExitCode +-from runner_logs import RunnerLogManager +-from symbolizer import BuildIdsPaths ++from .common_args import AddCommonArgs, ConfigureLogging, GetDeploymentTargetForArgs ++from .net_test_server import SetupTestServer ++from .run_package import RunPackage, RunPackageArgs, SystemLogReader ++from .runner_exceptions import HandleExceptionAndReturnExitCode ++from .runner_logs import RunnerLogManager ++from .symbolizer import BuildIdsPaths + + DEFAULT_TEST_SERVER_CONCURRENCY = 4 + +diff --git a/src/3rdparty/chromium/build/fuchsia/update_sdk.py b/src/3rdparty/chromium/build/fuchsia/update_sdk.py +index 6e36f8dcd..49f33aaef 100755 +--- a/src/3rdparty/chromium/build/fuchsia/update_sdk.py ++++ b/src/3rdparty/chromium/build/fuchsia/update_sdk.py +@@ -16,7 +16,7 @@ import subprocess + import sys + import tarfile + +-from common import GetHostOsFromPlatform, GetHostArchFromPlatform, \ ++from .common import GetHostOsFromPlatform, GetHostArchFromPlatform, \ + DIR_SOURCE_ROOT, SDK_ROOT, IMAGES_ROOT + + sys.path.append(os.path.join(DIR_SOURCE_ROOT, 'build')) +diff --git a/src/3rdparty/chromium/build/get_landmines.py b/src/3rdparty/chromium/build/get_landmines.py +index a32ab9937..e5ef7c1ff 100755 +--- a/src/3rdparty/chromium/build/get_landmines.py ++++ b/src/3rdparty/chromium/build/get_landmines.py +@@ -8,7 +8,7 @@ This file emits the list of reasons why a particular build needs to be clobbered + (or a list of 'landmines'). + """ + +-from __future__ import print_function ++ + + import sys + +diff --git a/src/3rdparty/chromium/build/gn_helpers.py b/src/3rdparty/chromium/build/gn_helpers.py +index 825865868..3f82dac1a 100644 +--- a/src/3rdparty/chromium/build/gn_helpers.py ++++ b/src/3rdparty/chromium/build/gn_helpers.py +@@ -67,7 +67,7 @@ def ToGNString(value, pretty=False): + """ + + if sys.version_info.major < 3: +- basestring_compat = basestring ++ basestring_compat = str + else: + basestring_compat = str + +diff --git a/src/3rdparty/chromium/build/gn_helpers_unittest.py b/src/3rdparty/chromium/build/gn_helpers_unittest.py +index 5886577ea..e67882d88 100644 +--- a/src/3rdparty/chromium/build/gn_helpers_unittest.py ++++ b/src/3rdparty/chromium/build/gn_helpers_unittest.py +@@ -17,7 +17,7 @@ class UnitTest(unittest.TestCase): + (False, 'false', 'false'), ('', '""', '""'), + ('\\$"$\\', '"\\\\\\$\\"\\$\\\\"', '"\\\\\\$\\"\\$\\\\"'), + (' \t\r\n', '" $0x09$0x0D$0x0A"', '" $0x09$0x0D$0x0A"'), +- (u'\u2713', '"$0xE2$0x9C$0x93"', '"$0xE2$0x9C$0x93"'), ++ ('\u2713', '"$0xE2$0x9C$0x93"', '"$0xE2$0x9C$0x93"'), + ([], '[ ]', '[]'), ([1], '[ 1 ]', '[\n 1\n]\n'), + ([3, 1, 4, 1], '[ 3, 1, 4, 1 ]', '[\n 3,\n 1,\n 4,\n 1\n]\n'), + (['a', True, 2], '[ "a", true, 2 ]', '[\n "a",\n true,\n 2\n]\n'), +@@ -31,7 +31,7 @@ class UnitTest(unittest.TestCase): + '_42A_Zaz_ = [\n false,\n true\n]\nkEy = 137\n'), + ([1, 'two', + ['"thr,.$\\', True, False, [], +- u'(\u2713)']], '[ 1, "two", [ "\\"thr,.\\$\\\\", true, false, ' + ++ '(\u2713)']], '[ 1, "two", [ "\\"thr,.\\$\\\\", true, false, ' + + '[ ], "($0xE2$0x9C$0x93)" ] ]', '''[ + 1, + "two", +diff --git a/src/3rdparty/chromium/build/gn_run_binary.py b/src/3rdparty/chromium/build/gn_run_binary.py +index d7e2926fa..69d956656 100644 +--- a/src/3rdparty/chromium/build/gn_run_binary.py ++++ b/src/3rdparty/chromium/build/gn_run_binary.py +@@ -8,7 +8,7 @@ Run with: + python gn_run_binary.py [args ...] + """ + +-from __future__ import print_function ++ + + import os + import subprocess +diff --git a/src/3rdparty/chromium/build/linux/dump_app_syms.py b/src/3rdparty/chromium/build/linux/dump_app_syms.py +index f156baf3b..b9fec5a1b 100644 +--- a/src/3rdparty/chromium/build/linux/dump_app_syms.py ++++ b/src/3rdparty/chromium/build/linux/dump_app_syms.py +@@ -5,7 +5,7 @@ + # Helper script to run dump_syms on Chrome Linux executables and strip + # them if needed. + +-from __future__ import print_function ++ + + import os + import subprocess +diff --git a/src/3rdparty/chromium/build/linux/install-chromeos-fonts.py b/src/3rdparty/chromium/build/linux/install-chromeos-fonts.py +index affe132ad..dbdf51135 100755 +--- a/src/3rdparty/chromium/build/linux/install-chromeos-fonts.py ++++ b/src/3rdparty/chromium/build/linux/install-chromeos-fonts.py +@@ -7,7 +7,7 @@ + # This script can be run manually (as root), but is also run as part + # install-build-deps.sh. + +-from __future__ import print_function ++ + + import os + import shutil +@@ -59,7 +59,7 @@ def main(args): + if os.path.isdir(dest_dir): + shutil.rmtree(dest_dir) + os.mkdir(dest_dir) +- os.chmod(dest_dir, 0755) ++ os.chmod(dest_dir, 0o755) + + print("Installing Chrome OS fonts to %s." % dest_dir) + for url in URLS: +@@ -80,9 +80,9 @@ def main(args): + + for base, dirs, files in os.walk(dest_dir): + for dir in dirs: +- os.chmod(os.path.join(base, dir), 0755) ++ os.chmod(os.path.join(base, dir), 0o755) + for file in files: +- os.chmod(os.path.join(base, file), 0644) ++ os.chmod(os.path.join(base, file), 0o644) + + print("""\ + +diff --git a/src/3rdparty/chromium/build/linux/rewrite_dirs.py b/src/3rdparty/chromium/build/linux/rewrite_dirs.py +index 17659c3d3..a87d978f7 100755 +--- a/src/3rdparty/chromium/build/linux/rewrite_dirs.py ++++ b/src/3rdparty/chromium/build/linux/rewrite_dirs.py +@@ -5,7 +5,7 @@ + + """Rewrites paths in -I, -L and other option to be relative to a sysroot.""" + +-from __future__ import print_function ++ + + import sys + import os +diff --git a/src/3rdparty/chromium/build/linux/sysroot_scripts/build_and_upload.py b/src/3rdparty/chromium/build/linux/sysroot_scripts/build_and_upload.py +index 1a24da290..9c0bba684 100755 +--- a/src/3rdparty/chromium/build/linux/sysroot_scripts/build_and_upload.py ++++ b/src/3rdparty/chromium/build/linux/sysroot_scripts/build_and_upload.py +@@ -7,7 +7,7 @@ + UploadSysroot for each supported arch of each sysroot creator. + """ + +-from __future__ import print_function ++ + + import glob + import hashlib +diff --git a/src/3rdparty/chromium/build/linux/sysroot_scripts/find_incompatible_glibc_symbols.py b/src/3rdparty/chromium/build/linux/sysroot_scripts/find_incompatible_glibc_symbols.py +index d79a89b98..923400745 100755 +--- a/src/3rdparty/chromium/build/linux/sysroot_scripts/find_incompatible_glibc_symbols.py ++++ b/src/3rdparty/chromium/build/linux/sysroot_scripts/find_incompatible_glibc_symbols.py +@@ -6,7 +6,7 @@ + """Find incompatible symbols in glibc and output a list of replacements. + """ + +-from __future__ import print_function ++ + + import re + import sys +@@ -31,7 +31,7 @@ def get_replacements(nm_file, max_allowed_glibc_version): + symbols[symbol] = set([version]) + + replacements = [] +- for symbol, versions in symbols.iteritems(): ++ for symbol, versions in symbols.items(): + if len(versions) <= 1: + continue + versions_parsed = [[ +diff --git a/src/3rdparty/chromium/build/linux/sysroot_scripts/find_incompatible_glibc_symbols_unittest.py b/src/3rdparty/chromium/build/linux/sysroot_scripts/find_incompatible_glibc_symbols_unittest.py +index 5af3eb2c2..a1735505e 100755 +--- a/src/3rdparty/chromium/build/linux/sysroot_scripts/find_incompatible_glibc_symbols_unittest.py ++++ b/src/3rdparty/chromium/build/linux/sysroot_scripts/find_incompatible_glibc_symbols_unittest.py +@@ -3,7 +3,7 @@ + # Use of this source code is governed by a BSD-style license that can be + # found in the LICENSE file. + +-import cStringIO ++import io + import find_incompatible_glibc_symbols + + NM_DATA = """\ +@@ -27,7 +27,7 @@ EXPECTED_REPLACEMENTS = [ + '__asm__(".symver foo2, foo2@GLIBC_2.3");', + ] + +-nm_file = cStringIO.StringIO() ++nm_file = io.StringIO() + nm_file.write(NM_DATA) + nm_file.seek(0) + +diff --git a/src/3rdparty/chromium/build/linux/sysroot_scripts/install-sysroot.py b/src/3rdparty/chromium/build/linux/sysroot_scripts/install-sysroot.py +index f8b7906cc..4a268f82e 100755 +--- a/src/3rdparty/chromium/build/linux/sysroot_scripts/install-sysroot.py ++++ b/src/3rdparty/chromium/build/linux/sysroot_scripts/install-sysroot.py +@@ -18,7 +18,7 @@ + # time chrome's build dependencies are changed but should also be updated + # periodically to include upstream security fixes from Debian. + +-from __future__ import print_function ++ + + import hashlib + import json +@@ -34,7 +34,7 @@ try: + from urllib.request import urlopen + except ImportError: + # Fall back to Python 2's urllib2 +- from urllib2 import urlopen ++ from urllib.request import urlopen + + SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__)) + +diff --git a/src/3rdparty/chromium/build/linux/sysroot_scripts/merge-package-lists.py b/src/3rdparty/chromium/build/linux/sysroot_scripts/merge-package-lists.py +index 58bd16302..70b3d0577 100755 +--- a/src/3rdparty/chromium/build/linux/sysroot_scripts/merge-package-lists.py ++++ b/src/3rdparty/chromium/build/linux/sysroot_scripts/merge-package-lists.py +@@ -22,7 +22,7 @@ def AddPackagesFromFile(file): + lines = file.readlines() + if len(lines) % 3 != 0: + exit(1) +- for i in xrange(0, len(lines), 3): ++ for i in range(0, len(lines), 3): + packages[lines[i]] = (lines[i + 1], lines[i + 2]) + + AddPackagesFromFile(open(sys.argv[1], 'r')) +@@ -30,5 +30,5 @@ AddPackagesFromFile(sys.stdin) + + output_file = open(sys.argv[1], 'w') + +-for (package, (filename, sha256)) in packages.iteritems(): ++for (package, (filename, sha256)) in packages.items(): + output_file.write(package + filename + sha256) +diff --git a/src/3rdparty/chromium/build/linux/unbundle/remove_bundled_libraries.py b/src/3rdparty/chromium/build/linux/unbundle/remove_bundled_libraries.py +index a1cfffb37..d2e8e82a7 100755 +--- a/src/3rdparty/chromium/build/linux/unbundle/remove_bundled_libraries.py ++++ b/src/3rdparty/chromium/build/linux/unbundle/remove_bundled_libraries.py +@@ -9,7 +9,7 @@ Removes bundled libraries to make sure they are not used. + See README for more details. + """ + +-from __future__ import print_function ++ + + import optparse + import os.path +@@ -91,7 +91,7 @@ def DoMain(argv): + + # Fail if exclusion list contains stale entries - this helps keep it + # up to date. +- for exclusion, used in exclusion_used.iteritems(): ++ for exclusion, used in exclusion_used.items(): + if not used: + print('%s does not exist' % exclusion) + exit_code = 1 +diff --git a/src/3rdparty/chromium/build/linux/unbundle/replace_gn_files.py b/src/3rdparty/chromium/build/linux/unbundle/replace_gn_files.py +index eba4bd1fb..3e0d7a00f 100755 +--- a/src/3rdparty/chromium/build/linux/unbundle/replace_gn_files.py ++++ b/src/3rdparty/chromium/build/linux/unbundle/replace_gn_files.py +@@ -8,7 +8,7 @@ Replaces GN files in tree with files from here that + make the build use system libraries. + """ + +-from __future__ import print_function ++ + + import argparse + import os +@@ -51,7 +51,7 @@ def DoMain(argv): + args = parser.parse_args(argv) + + handled_libraries = set() +- for lib, path in REPLACEMENTS.items(): ++ for lib, path in list(REPLACEMENTS.items()): + if lib not in args.system_libraries: + continue + handled_libraries.add(lib) +diff --git a/src/3rdparty/chromium/build/locale_tool.py b/src/3rdparty/chromium/build/locale_tool.py +index cad51908f..0496a1d9f 100755 +--- a/src/3rdparty/chromium/build/locale_tool.py ++++ b/src/3rdparty/chromium/build/locale_tool.py +@@ -28,7 +28,7 @@ inside its section that breaks the script. The check will fail, and + trying to fix it too, but at least the file will not be modified. + """ + +-from __future__ import print_function ++ + + import argparse + import json +@@ -160,7 +160,7 @@ def _SortListSubRange(input_list, start, end, key_func): + """ + result = input_list[:start] + inputs = [] +- for pos in xrange(start, end): ++ for pos in range(start, end): + line = input_list[pos] + key = key_func(line) + inputs.append((key, line)) +@@ -522,8 +522,8 @@ class _GetXmlLangAttributeTest(unittest.TestCase): + } + + def test_GetXmlLangAttribute(self): +- for test_line, expected in self.TEST_DATA.iteritems(): +- self.assertEquals(_GetXmlLangAttribute(test_line), expected) ++ for test_line, expected in self.TEST_DATA.items(): ++ self.assertEqual(_GetXmlLangAttribute(test_line), expected) + + + def _SortGrdElementsRanges(grd_lines, element_predicate): +@@ -550,7 +550,7 @@ def _CheckGrdElementRangeLang(grd_lines, start, end, wanted_locales): + """ + errors = [] + locales = set() +- for pos in xrange(start, end): ++ for pos in range(start, end): + line = grd_lines[pos] + lang = _GetXmlLangAttribute(line) + if not lang: +@@ -613,7 +613,7 @@ def _CheckGrdElementRangeAndroidOutputFilename(grd_lines, start, end, + List of error message strings for this input. Empty on success. + """ + errors = [] +- for pos in xrange(start, end): ++ for pos in range(start, end): + line = grd_lines[pos] + lang = _GetXmlLangAttribute(line) + if not lang: +@@ -673,7 +673,7 @@ def _AddMissingLocalesInGrdAndroidOutputs(grd_file, grd_lines, wanted_locales): + intervals = _BuildIntervalList(grd_lines, _IsGrdAndroidOutputLine) + for start, end in reversed(intervals): + locales = set() +- for pos in xrange(start, end): ++ for pos in range(start, end): + lang = _GetXmlLangAttribute(grd_lines[pos]) + locale = _FixChromiumLangAttribute(lang) + locales.add(locale) +@@ -685,7 +685,7 @@ def _AddMissingLocalesInGrdAndroidOutputs(grd_file, grd_lines, wanted_locales): + src_locale = 'bg' + src_lang_attribute = 'lang="%s"' % src_locale + src_line = None +- for pos in xrange(start, end): ++ for pos in range(start, end): + if src_lang_attribute in grd_lines[pos]: + src_line = grd_lines[pos] + break +@@ -762,7 +762,7 @@ def _CheckGrdTranslationElementRange(grd_lines, start, end, + List of error message strings for this input. Empty on success. + """ + errors = [] +- for pos in xrange(start, end): ++ for pos in range(start, end): + line = grd_lines[pos] + lang = _GetXmlLangAttribute(line) + if not lang: +@@ -845,7 +845,7 @@ def _AddMissingLocalesInGrdTranslations(grd_file, grd_lines, wanted_locales): + intervals = _BuildIntervalList(grd_lines, _IsTranslationGrdOutputLine) + for start, end in reversed(intervals): + locales = set() +- for pos in xrange(start, end): ++ for pos in range(start, end): + lang = _GetXmlLangAttribute(grd_lines[pos]) + locale = _FixChromiumLangAttribute(lang) + locales.add(locale) +@@ -857,7 +857,7 @@ def _AddMissingLocalesInGrdTranslations(grd_file, grd_lines, wanted_locales): + src_locale = 'en-GB' + src_lang_attribute = 'lang="%s"' % src_locale + src_line = None +- for pos in xrange(start, end): ++ for pos in range(start, end): + if src_lang_attribute in grd_lines[pos]: + src_line = grd_lines[pos] + break +@@ -938,7 +938,7 @@ def _CheckGnOutputsRangeForLocalizedStrings(gn_lines, start, end): + These are non-localized strings, and should be ignored. This function is + used to detect them quickly. + """ +- for pos in xrange(start, end): ++ for pos in range(start, end): + if not 'values/' in gn_lines[pos]: + return True + return False +@@ -950,7 +950,7 @@ def _CheckGnOutputsRange(gn_lines, start, end, wanted_locales): + + errors = [] + locales = set() +- for pos in xrange(start, end): ++ for pos in range(start, end): + line = gn_lines[pos] + android_locale = _GetAndroidGnOutputLocale(line) + assert android_locale != None +@@ -991,7 +991,7 @@ def _AddMissingLocalesInGnAndroidOutputs(gn_file, gn_lines, wanted_locales): + continue + + locales = set() +- for pos in xrange(start, end): ++ for pos in range(start, end): + lang = _GetAndroidGnOutputLocale(gn_lines[pos]) + locale = resource_utils.ToChromiumLocaleName(lang) + locales.add(locale) +@@ -1003,7 +1003,7 @@ def _AddMissingLocalesInGnAndroidOutputs(gn_file, gn_lines, wanted_locales): + src_locale = 'bg' + src_values = 'values-%s/' % resource_utils.ToAndroidLocaleName(src_locale) + src_line = None +- for pos in xrange(start, end): ++ for pos in range(start, end): + if src_values in gn_lines[pos]: + src_line = gn_lines[pos] + break +@@ -1310,7 +1310,7 @@ instead of the default format (which is a space-separated list of locale names). + help='Output as JSON list.') + group.add_argument( + '--type', +- choices=tuple(self.TYPE_MAP.viewkeys()), ++ choices=tuple(self.TYPE_MAP.keys()), + default='all', + help='Select type of locale list to print.') + +diff --git a/src/3rdparty/chromium/build/mac/find_sdk.py b/src/3rdparty/chromium/build/mac/find_sdk.py +index d86f31093..77f8d6761 100755 +--- a/src/3rdparty/chromium/build/mac/find_sdk.py ++++ b/src/3rdparty/chromium/build/mac/find_sdk.py +@@ -21,7 +21,7 @@ Sample Output: + 10.14 + """ + +-from __future__ import print_function ++ + + import os + import re +diff --git a/src/3rdparty/chromium/build/mac/should_use_hermetic_xcode.py b/src/3rdparty/chromium/build/mac/should_use_hermetic_xcode.py +index ce36ccbc8..c954c49a8 100755 +--- a/src/3rdparty/chromium/build/mac/should_use_hermetic_xcode.py ++++ b/src/3rdparty/chromium/build/mac/should_use_hermetic_xcode.py +@@ -14,7 +14,7 @@ Usage: + python should_use_hermetic_xcode.py + """ + +-from __future__ import print_function ++ + + import argparse + import os +diff --git a/src/3rdparty/chromium/build/mac_toolchain.py b/src/3rdparty/chromium/build/mac_toolchain.py +index 00507af37..0a172828a 100755 +--- a/src/3rdparty/chromium/build/mac_toolchain.py ++++ b/src/3rdparty/chromium/build/mac_toolchain.py +@@ -17,7 +17,7 @@ The toolchain version can be overridden by setting MAC_TOOLCHAIN_REVISION with + the full revision, e.g. 9A235. + """ + +-from __future__ import print_function ++ + + import argparse + import os +diff --git a/src/3rdparty/chromium/build/print_python_deps.py b/src/3rdparty/chromium/build/print_python_deps.py +index fd29c0972..aa5fdcbf9 100755 +--- a/src/3rdparty/chromium/build/print_python_deps.py ++++ b/src/3rdparty/chromium/build/print_python_deps.py +@@ -1,4 +1,4 @@ +-#!/usr/bin/python2.7 ++#!/usr/bin/python + # Copyright 2016 The Chromium Authors. All rights reserved. + # Use of this source code is governed by a BSD-style license that can be + # found in the LICENSE file. +@@ -28,7 +28,7 @@ def ComputePythonDependencies(): + A path is assumed to be a "system" import if it is outside of chromium's + src/. The paths will be relative to the current directory. + """ +- module_paths = (m.__file__ for m in sys.modules.values() ++ module_paths = (m.__file__ for m in list(sys.modules.values()) + if m and hasattr(m, '__file__')) + + src_paths = set() +@@ -80,7 +80,6 @@ def _GetTargetPythonVersion(module): + if shebang.startswith('#!'): + # Examples: + # '#!/usr/bin/python' +- # '#!/usr/bin/python2.7' + # '#!/usr/bin/python3' + # '#!/usr/bin/env python3' + # '#!/usr/bin/env vpython' +@@ -152,7 +151,7 @@ def main(): + + # Trybots run with vpython as default Python, but with a different config + # from //.vpython. To make the is_vpython test work, and to match the behavior +- # of dev machines, the shebang line must be run with python2.7. ++ # of dev machines, the shebang line must be run with python3. + # + # E.g. $HOME/.vpython-root/dd50d3/bin/python + # E.g. /b/s/w/ir/cache/vpython/ab5c79/bin/python +diff --git a/src/3rdparty/chromium/build/protoc_java.py b/src/3rdparty/chromium/build/protoc_java.py +index fe602a9fc..86232544a 100755 +--- a/src/3rdparty/chromium/build/protoc_java.py ++++ b/src/3rdparty/chromium/build/protoc_java.py +@@ -15,7 +15,7 @@ It performs the following steps: + 4. Creates a new stamp file. + """ + +-from __future__ import print_function ++ + + import argparse + import os +diff --git a/src/3rdparty/chromium/build/redirect_stdout.py b/src/3rdparty/chromium/build/redirect_stdout.py +index 166293cb3..0b3304e3a 100644 +--- a/src/3rdparty/chromium/build/redirect_stdout.py ++++ b/src/3rdparty/chromium/build/redirect_stdout.py +@@ -2,7 +2,7 @@ + # Use of this source code is governed by a BSD-style license that can be + # found in the LICENSE file. + +-from __future__ import print_function ++ + + import subprocess + import sys +diff --git a/src/3rdparty/chromium/build/rm.py b/src/3rdparty/chromium/build/rm.py +index 43a663d6d..1b564cc8d 100755 +--- a/src/3rdparty/chromium/build/rm.py ++++ b/src/3rdparty/chromium/build/rm.py +@@ -8,7 +8,7 @@ + This module works much like the rm posix command. + """ + +-from __future__ import print_function ++ + + import argparse + import os +diff --git a/src/3rdparty/chromium/build/run_swarming_xcode_install.py b/src/3rdparty/chromium/build/run_swarming_xcode_install.py +index 1ed09263f..1bbcdfcec 100755 +--- a/src/3rdparty/chromium/build/run_swarming_xcode_install.py ++++ b/src/3rdparty/chromium/build/run_swarming_xcode_install.py +@@ -15,7 +15,7 @@ Example usage: + --isolate-server touch-isolate.appspot.com + """ + +-from __future__ import print_function ++ + + import argparse + import os +diff --git a/src/3rdparty/chromium/build/swarming_xcode_install.py b/src/3rdparty/chromium/build/swarming_xcode_install.py +index d214d0b06..8d8241822 100755 +--- a/src/3rdparty/chromium/build/swarming_xcode_install.py ++++ b/src/3rdparty/chromium/build/swarming_xcode_install.py +@@ -7,7 +7,7 @@ + Script used to install Xcode on the swarming bots. + """ + +-from __future__ import print_function ++ + + import os + import shutil +diff --git a/src/3rdparty/chromium/build/toolchain/clang_code_coverage_wrapper.py b/src/3rdparty/chromium/build/toolchain/clang_code_coverage_wrapper.py +index 4f76a0170..49e41175c 100755 +--- a/src/3rdparty/chromium/build/toolchain/clang_code_coverage_wrapper.py ++++ b/src/3rdparty/chromium/build/toolchain/clang_code_coverage_wrapper.py +@@ -46,7 +46,7 @@ Example usage: + --files-to-instrument=coverage_instrumentation_input.txt + """ + +-from __future__ import print_function ++ + + import argparse + import os +diff --git a/src/3rdparty/chromium/build/toolchain/get_concurrent_links.py b/src/3rdparty/chromium/build/toolchain/get_concurrent_links.py +index e895e13ba..c54011138 100755 +--- a/src/3rdparty/chromium/build/toolchain/get_concurrent_links.py ++++ b/src/3rdparty/chromium/build/toolchain/get_concurrent_links.py +@@ -6,7 +6,7 @@ + # This script computs the number of concurrent links we want to run in the build + # as a function of machine spec. It's based on GetDefaultConcurrentLinks in GYP. + +-from __future__ import print_function ++ + + import argparse + import multiprocessing +diff --git a/src/3rdparty/chromium/build/toolchain/get_cpu_count.py b/src/3rdparty/chromium/build/toolchain/get_cpu_count.py +index 765c7c78f..e430cb02a 100644 +--- a/src/3rdparty/chromium/build/toolchain/get_cpu_count.py ++++ b/src/3rdparty/chromium/build/toolchain/get_cpu_count.py +@@ -4,7 +4,7 @@ + + # This script shows cpu count to specify capacity of action pool. + +-from __future__ import print_function ++ + + import multiprocessing + import sys +diff --git a/src/3rdparty/chromium/build/toolchain/mac/filter_libtool.py b/src/3rdparty/chromium/build/toolchain/mac/filter_libtool.py +index 9132c8fb3..4164141ad 100644 +--- a/src/3rdparty/chromium/build/toolchain/mac/filter_libtool.py ++++ b/src/3rdparty/chromium/build/toolchain/mac/filter_libtool.py +@@ -2,7 +2,7 @@ + # Use of this source code is governed by a BSD-style license that can be + # found in the LICENSE file. + +-from __future__ import print_function ++ + + import os + import re +diff --git a/src/3rdparty/chromium/build/toolchain/mac/get_tool_mtime.py b/src/3rdparty/chromium/build/toolchain/mac/get_tool_mtime.py +index ff0254c63..a26d48056 100644 +--- a/src/3rdparty/chromium/build/toolchain/mac/get_tool_mtime.py ++++ b/src/3rdparty/chromium/build/toolchain/mac/get_tool_mtime.py +@@ -2,7 +2,7 @@ + # Use of this source code is governed by a BSD-style license that can be + # found in the LICENSE file. + +-from __future__ import print_function ++ + + import os + import sys +diff --git a/src/3rdparty/chromium/build/toolchain/mac/linker_driver.py b/src/3rdparty/chromium/build/toolchain/mac/linker_driver.py +index 66c97af86..19e829d17 100755 +--- a/src/3rdparty/chromium/build/toolchain/mac/linker_driver.py ++++ b/src/3rdparty/chromium/build/toolchain/mac/linker_driver.py +@@ -104,7 +104,7 @@ def Main(args): + except: + # If a linker driver action failed, remove all the outputs to make the + # build step atomic. +- map(_RemovePath, linker_driver_outputs) ++ list(map(_RemovePath, linker_driver_outputs)) + + # Re-report the original failure. + raise +diff --git a/src/3rdparty/chromium/build/toolchain/win/midl.py b/src/3rdparty/chromium/build/toolchain/win/midl.py +index ff4651b89..79b984ea4 100644 +--- a/src/3rdparty/chromium/build/toolchain/win/midl.py ++++ b/src/3rdparty/chromium/build/toolchain/win/midl.py +@@ -2,8 +2,8 @@ + # Use of this source code is governed by a BSD-style license that can be + # found in the LICENSE file. + +-from __future__ import division +-from __future__ import print_function ++ ++ + + import array + import difflib +diff --git a/src/3rdparty/chromium/build/toolchain/win/ml.py b/src/3rdparty/chromium/build/toolchain/win/ml.py +index 5676ef033..516532cfa 100755 +--- a/src/3rdparty/chromium/build/toolchain/win/ml.py ++++ b/src/3rdparty/chromium/build/toolchain/win/ml.py +@@ -36,7 +36,7 @@ class Struct(object): + + def Subtract(nt, **kwargs): + """Subtract(nt, f=2) returns a new namedtuple with 2 subtracted from nt.f""" +- return nt._replace(**{k: getattr(nt, k) - v for k, v in kwargs.iteritems()}) ++ return nt._replace(**{k: getattr(nt, k) - v for k, v in kwargs.items()}) + + + def MakeDeterministic(objdata): +diff --git a/src/3rdparty/chromium/build/toolchain/win/rc/rc.py b/src/3rdparty/chromium/build/toolchain/win/rc/rc.py +index d30ca3e8d..e9bee138f 100755 +--- a/src/3rdparty/chromium/build/toolchain/win/rc/rc.py ++++ b/src/3rdparty/chromium/build/toolchain/win/rc/rc.py +@@ -15,7 +15,7 @@ options: + /nologo Ignored (rc.py doesn't print a logo by default). + /showIncludes Print referenced header and resource files.""" + +-from __future__ import print_function ++ + from collections import namedtuple + import codecs + import os +diff --git a/src/3rdparty/chromium/build/toolchain/win/setup_toolchain.py b/src/3rdparty/chromium/build/toolchain/win/setup_toolchain.py +index 7485ae661..0f04a732a 100644 +--- a/src/3rdparty/chromium/build/toolchain/win/setup_toolchain.py ++++ b/src/3rdparty/chromium/build/toolchain/win/setup_toolchain.py +@@ -10,7 +10,7 @@ + # win tool. The script assumes that the root build directory is the current dir + # and the files will be written to the current directory. + +-from __future__ import print_function ++ + + import errno + import json +@@ -184,7 +184,7 @@ def _FormatAsEnvironmentBlock(envvar_dict): + CreateProcess documentation for more details.""" + block = '' + nul = '\0' +- for key, value in envvar_dict.items(): ++ for key, value in list(envvar_dict.items()): + block += key + '=' + value + nul + block += nul + return block +@@ -279,7 +279,7 @@ def main(): + lib = [p.replace('"', r'\"') for p in env['LIB'].split(';') if p] + # Make lib path relative to builddir when cwd and sdk in same drive. + try: +- lib = map(os.path.relpath, lib) ++ lib = list(map(os.path.relpath, lib)) + except ValueError: + pass + +diff --git a/src/3rdparty/chromium/build/toolchain/win/tool_wrapper.py b/src/3rdparty/chromium/build/toolchain/win/tool_wrapper.py +index 1dffafe3c..f7a5081b3 100644 +--- a/src/3rdparty/chromium/build/toolchain/win/tool_wrapper.py ++++ b/src/3rdparty/chromium/build/toolchain/win/tool_wrapper.py +@@ -8,7 +8,7 @@ This file is copied to the build directory as part of toolchain setup and + is used to set up calls to tools used by the build that need wrappers. + """ + +-from __future__ import print_function ++ + + import os + import re +@@ -185,7 +185,7 @@ class WinTool(object): + env = self._GetEnv(arch) + # TODO(scottmg): This is a temporary hack to get some specific variables + # through to actions that are set after GN-time. http://crbug.com/333738. +- for k, v in os.environ.items(): ++ for k, v in list(os.environ.items()): + if k not in env: + env[k] = v + args = open(rspfile).read() +diff --git a/src/3rdparty/chromium/build/util/android_chrome_version.py b/src/3rdparty/chromium/build/util/android_chrome_version.py +index 91d31e97a..e6747869a 100644 +--- a/src/3rdparty/chromium/build/util/android_chrome_version.py ++++ b/src/3rdparty/chromium/build/util/android_chrome_version.py +@@ -106,7 +106,7 @@ _ARCH_TO_MFG_AND_BITNESS = { + } + + # Expose the available choices to other scripts. +-ARCH_CHOICES = _ARCH_TO_MFG_AND_BITNESS.keys() ++ARCH_CHOICES = list(_ARCH_TO_MFG_AND_BITNESS.keys()) + """ + The architecture preference is encoded into the version_code for devices + that support multiple architectures. (exploiting play store logic that pushes +diff --git a/src/3rdparty/chromium/build/util/generate_wrapper.py b/src/3rdparty/chromium/build/util/generate_wrapper.py +index 7ba9fbf1d..bd45beb30 100755 +--- a/src/3rdparty/chromium/build/util/generate_wrapper.py ++++ b/src/3rdparty/chromium/build/util/generate_wrapper.py +@@ -140,7 +140,7 @@ def CreateArgumentParser(): + help='Path to the output directory.') + parser.add_argument( + '--script-language', +- choices=SCRIPT_TEMPLATES.keys(), ++ choices=list(SCRIPT_TEMPLATES.keys()), + help='Language in which the wrapper script will be written.') + parser.add_argument( + 'executable_args', nargs='*', +diff --git a/src/3rdparty/chromium/build/util/lastchange.py b/src/3rdparty/chromium/build/util/lastchange.py +index 874870ad5..90c1cbe2e 100755 +--- a/src/3rdparty/chromium/build/util/lastchange.py ++++ b/src/3rdparty/chromium/build/util/lastchange.py +@@ -6,7 +6,7 @@ + """ + lastchange.py -- Chromium revision fetching utility. + """ +-from __future__ import print_function ++ + + import argparse + import collections +diff --git a/src/3rdparty/chromium/build/util/lib/common/chrome_test_server_spawner.py b/src/3rdparty/chromium/build/util/lib/common/chrome_test_server_spawner.py +index fcfc18105..52152e7f3 100644 +--- a/src/3rdparty/chromium/build/util/lib/common/chrome_test_server_spawner.py ++++ b/src/3rdparty/chromium/build/util/lib/common/chrome_test_server_spawner.py +@@ -9,7 +9,7 @@ chrome test server on the host. + """ + # pylint: disable=W0702 + +-import BaseHTTPServer ++import http.server + import json + import logging + import os +@@ -19,7 +19,7 @@ import subprocess + import sys + import threading + import time +-import urlparse ++import urllib.parse + + + SERVER_TYPES = { +@@ -202,7 +202,7 @@ class TestServerThread(threading.Thread): + self.command_line.append('--startup-pipe=%d' % self.pipe_out) + + # Pass the remaining arguments as-is. +- for key, values in args_copy.iteritems(): ++ for key, values in args_copy.items(): + if not isinstance(values, list): + values = [values] + for value in values: +@@ -215,7 +215,7 @@ class TestServerThread(threading.Thread): + # This is required to avoid subtle deadlocks that could be caused by the + # test server child process inheriting undesirable file descriptors such as + # file lock file descriptors. +- for fd in xrange(0, 1024): ++ for fd in range(0, 1024): + if fd != self.pipe_out: + try: + os.close(fd) +@@ -296,7 +296,7 @@ class TestServerThread(threading.Thread): + self.wait_event.wait() + + +-class SpawningServerRequestHandler(BaseHTTPServer.BaseHTTPRequestHandler): ++class SpawningServerRequestHandler(http.server.BaseHTTPRequestHandler): + """A handler used to process http GET/POST request.""" + + def _SendResponse(self, response_code, response_reason, additional_headers, +@@ -404,7 +404,7 @@ class SpawningServerRequestHandler(BaseHTTPServer.BaseHTTPRequestHandler): + pass + + def do_POST(self): +- parsed_path = urlparse.urlparse(self.path) ++ parsed_path = urllib.parse.urlparse(self.path) + action = parsed_path.path + _logger.info('Action for POST method is: %s.', action) + if action == '/start': +@@ -414,9 +414,9 @@ class SpawningServerRequestHandler(BaseHTTPServer.BaseHTTPRequestHandler): + _logger.info('Encounter unknown request: %s.', action) + + def do_GET(self): +- parsed_path = urlparse.urlparse(self.path) ++ parsed_path = urllib.parse.urlparse(self.path) + action = parsed_path.path +- params = urlparse.parse_qs(parsed_path.query, keep_blank_values=1) ++ params = urllib.parse.parse_qs(parsed_path.query, keep_blank_values=1) + _logger.info('Action for GET method is: %s.', action) + for param in params: + _logger.info('%s=%s', param, params[param][0]) +@@ -437,7 +437,7 @@ class SpawningServer(object): + """The class used to start/stop a http server.""" + + def __init__(self, test_server_spawner_port, port_forwarder, max_instances): +- self.server = BaseHTTPServer.HTTPServer(('', test_server_spawner_port), ++ self.server = http.server.HTTPServer(('', test_server_spawner_port), + SpawningServerRequestHandler) + self.server_port = self.server.server_port + _logger.info('Started test server spawner on port: %d.', self.server_port) +diff --git a/src/3rdparty/chromium/build/util/lib/common/perf_tests_results_helper.py b/src/3rdparty/chromium/build/util/lib/common/perf_tests_results_helper.py +index 153886dce..2f141adae 100644 +--- a/src/3rdparty/chromium/build/util/lib/common/perf_tests_results_helper.py ++++ b/src/3rdparty/chromium/build/util/lib/common/perf_tests_results_helper.py +@@ -2,7 +2,7 @@ + # Use of this source code is governed by a BSD-style license that can be + # found in the LICENSE file. + +-from __future__ import print_function ++ + + import re + import sys +@@ -11,7 +11,7 @@ import json + import logging + import math + +-import perf_result_data_type ++from . import perf_result_data_type + + + # Mapping from result type to test output +diff --git a/src/3rdparty/chromium/build/util/lib/common/unittest_util.py b/src/3rdparty/chromium/build/util/lib/common/unittest_util.py +index 9683ab717..d4d228377 100644 +--- a/src/3rdparty/chromium/build/util/lib/common/unittest_util.py ++++ b/src/3rdparty/chromium/build/util/lib/common/unittest_util.py +@@ -82,7 +82,7 @@ def GetTestsFromSuite(suite): + + def GetTestNamesFromSuite(suite): + """Returns a list of every test name in the given suite.""" +- return map(lambda x: GetTestName(x), GetTestsFromSuite(suite)) ++ return [GetTestName(x) for x in GetTestsFromSuite(suite)] + + + def GetTestName(test): +diff --git a/src/3rdparty/chromium/build/util/lib/common/unittest_util_test.py b/src/3rdparty/chromium/build/util/lib/common/unittest_util_test.py +index 1514c9b6d..e33b4cc39 100755 +--- a/src/3rdparty/chromium/build/util/lib/common/unittest_util_test.py ++++ b/src/3rdparty/chromium/build/util/lib/common/unittest_util_test.py +@@ -8,7 +8,7 @@ + import logging + import sys + import unittest +-import unittest_util ++from . import unittest_util + + + class FilterTestNamesTest(unittest.TestCase): +@@ -25,19 +25,19 @@ class FilterTestNamesTest(unittest.TestCase): + + def testMatchAll(self): + x = unittest_util.FilterTestNames(self.possible_list, "*") +- self.assertEquals(x, self.possible_list) ++ self.assertEqual(x, self.possible_list) + + def testMatchPartial(self): + x = unittest_util.FilterTestNames(self.possible_list, "Foo.*") +- self.assertEquals(x, ["Foo.One", "Foo.Two", "Foo.Three"]) ++ self.assertEqual(x, ["Foo.One", "Foo.Two", "Foo.Three"]) + + def testMatchFull(self): + x = unittest_util.FilterTestNames(self.possible_list, "Foo.Two") +- self.assertEquals(x, ["Foo.Two"]) ++ self.assertEqual(x, ["Foo.Two"]) + + def testMatchTwo(self): + x = unittest_util.FilterTestNames(self.possible_list, "Bar.*:Foo.*") +- self.assertEquals(x, ["Bar.One", ++ self.assertEqual(x, ["Bar.One", + "Bar.Two", + "Bar.Three", + "Foo.One", +@@ -46,14 +46,14 @@ class FilterTestNamesTest(unittest.TestCase): + + def testMatchWithNegative(self): + x = unittest_util.FilterTestNames(self.possible_list, "Bar.*:Foo.*-*.Three") +- self.assertEquals(x, ["Bar.One", ++ self.assertEqual(x, ["Bar.One", + "Bar.Two", + "Foo.One", + "Foo.Two"]) + + def testMatchOverlapping(self): + x = unittest_util.FilterTestNames(self.possible_list, "Bar.*:*.Two") +- self.assertEquals(x, ["Bar.One", ++ self.assertEqual(x, ["Bar.One", + "Bar.Two", + "Bar.Three", + "Foo.Two", +diff --git a/src/3rdparty/chromium/build/util/lib/common/util.py b/src/3rdparty/chromium/build/util/lib/common/util.py +index a415b1f53..1d6f05980 100644 +--- a/src/3rdparty/chromium/build/util/lib/common/util.py ++++ b/src/3rdparty/chromium/build/util/lib/common/util.py +@@ -5,14 +5,14 @@ + """Generic utilities for all python scripts.""" + + import atexit +-import httplib ++import http.client + import os + import signal + import stat + import subprocess + import sys + import tempfile +-import urlparse ++import urllib.parse + + + def GetPlatformName(): +@@ -136,9 +136,9 @@ def DoesUrlExist(url): + Returns: + True if url exists, otherwise False. + """ +- parsed = urlparse.urlparse(url) ++ parsed = urllib.parse.urlparse(url) + try: +- conn = httplib.HTTPConnection(parsed.netloc) ++ conn = http.client.HTTPConnection(parsed.netloc) + conn.request('HEAD', parsed.path) + response = conn.getresponse() + except (socket.gaierror, socket.error): +diff --git a/src/3rdparty/chromium/build/util/python2_action.py b/src/3rdparty/chromium/build/util/python2_action.py +index a62d065ce..0353c88a8 100644 +--- a/src/3rdparty/chromium/build/util/python2_action.py ++++ b/src/3rdparty/chromium/build/util/python2_action.py +@@ -1,22 +1,22 @@ + # Copyright 2020 The Chromium Authors. All rights reserved. + # Use of this source code is governed by a BSD-style license that can be + # found in the LICENSE file. +-"""Script for ensuring that a python action runs under Python2, not Python3.""" ++"""Script for ensuring that a python action runs under Python3, not Python3.""" + + import subprocess + import sys + + if sys.version_info.major == 2: +- # If we get here, we're already Python2, so just re-execute the ++ # If we get here, we're already Python3, so just re-execute the + # command without the wrapper. + exe = sys.executable + elif sys.executable.endswith('.exe'): + # If we get here, we're a Python3 executable likely running on +- # Windows, so look for the Python2 wrapper in depot_tools. ++ # Windows, so look for the Python3 wrapper in depot_tools. + exe = 'python.bat' + else: + # If we get here, we are a Python3 executable. Hope that we can find +- # a `python2.7` in path somewhere. +- exe = 'python2.7' ++ # a `python3` in path somewhere. ++ exe = 'python3' + + sys.exit(subprocess.call([exe] + sys.argv[1:])) +diff --git a/src/3rdparty/chromium/build/util/version.py b/src/3rdparty/chromium/build/util/version.py +index 4f440c4ee..68d4892a8 100755 +--- a/src/3rdparty/chromium/build/util/version.py ++++ b/src/3rdparty/chromium/build/util/version.py +@@ -7,7 +7,7 @@ + version.py -- Chromium version string substitution utility. + """ + +-from __future__ import print_function ++ + + import argparse + import os +@@ -70,7 +70,7 @@ def SubstTemplate(contents, values): + contains any @KEYWORD@ strings expecting them to be recursively + substituted, okay? + """ +- for key, val in values.items(): ++ for key, val in list(values.items()): + try: + contents = contents.replace('@' + key + '@', val) + except TypeError: +@@ -189,12 +189,12 @@ def GenerateValues(options, evals): + """ + values = FetchValues(options.file, options.official) + +- for key, val in evals.items(): ++ for key, val in list(evals.items()): + values[key] = str(eval(val, globals(), values)) + + if options.os == 'android': + android_chrome_version_codes = android_chrome_version.GenerateVersionCodes( +- values, options.arch, options.next) ++ values, options.arch, options.__next__) + values.update(android_chrome_version_codes) + + return values +diff --git a/src/3rdparty/chromium/build/util/version_test.py b/src/3rdparty/chromium/build/util/version_test.py +index 2a65ddc71..71bf2b1a1 100644 +--- a/src/3rdparty/chromium/build/util/version_test.py ++++ b/src/3rdparty/chromium/build/util/version_test.py +@@ -95,7 +95,7 @@ class _VersionTest(unittest.TestCase): + result = {} + version.FetchValuesFromFile(result, self._CHROME_VERSION_FILE) + +- for key, val in result.iteritems(): ++ for key, val in result.items(): + self.assertIsInstance(key, str) + self.assertIsInstance(val, str) + +@@ -105,15 +105,15 @@ class _VersionTest(unittest.TestCase): + get_new_args=lambda args: self._EXAMPLE_ANDROID_ARGS) + contents = output['contents'] + +- self.assertRegexpMatches(contents, r'\bchrome_version_code = "\d+"\s') +- self.assertRegexpMatches(contents, ++ self.assertRegex(contents, r'\bchrome_version_code = "\d+"\s') ++ self.assertRegex(contents, + r'\bchrome_modern_version_code = "\d+"\s') +- self.assertRegexpMatches(contents, r'\bmonochrome_version_code = "\d+"\s') +- self.assertRegexpMatches(contents, r'\btrichrome_version_code = "\d+"\s') +- self.assertRegexpMatches(contents, ++ self.assertRegex(contents, r'\bmonochrome_version_code = "\d+"\s') ++ self.assertRegex(contents, r'\btrichrome_version_code = "\d+"\s') ++ self.assertRegex(contents, + r'\bwebview_stable_version_code = "\d+"\s') +- self.assertRegexpMatches(contents, r'\bwebview_beta_version_code = "\d+"\s') +- self.assertRegexpMatches(contents, r'\bwebview_dev_version_code = "\d+"\s') ++ self.assertRegex(contents, r'\bwebview_beta_version_code = "\d+"\s') ++ self.assertRegex(contents, r'\bwebview_dev_version_code = "\d+"\s') + + def testBuildOutputAndroidArchVariantsArm64(self): + """Assert 64-bit-specific version codes""" +@@ -129,13 +129,13 @@ class _VersionTest(unittest.TestCase): + output = self._RunBuildOutput(get_new_args=lambda args: new_args) + contents = output['contents'] + +- self.assertRegexpMatches(contents, ++ self.assertRegex(contents, + r'\bmonochrome_64_32_version_code = "\d+"\s') +- self.assertRegexpMatches(contents, ++ self.assertRegex(contents, + r'\bmonochrome_64_version_code = "\d+"\s') +- self.assertRegexpMatches(contents, ++ self.assertRegex(contents, + r'\btrichrome_64_32_version_code = "\d+"\s') +- self.assertRegexpMatches(contents, ++ self.assertRegex(contents, + r'\btrichrome_64_version_code = "\d+"\s') + + def testBuildOutputAndroidArchVariantsX64(self): +@@ -152,13 +152,13 @@ class _VersionTest(unittest.TestCase): + output = self._RunBuildOutput(get_new_args=lambda args: new_args) + contents = output['contents'] + +- self.assertRegexpMatches(contents, ++ self.assertRegex(contents, + r'\bmonochrome_64_32_version_code = "\d+"\s') +- self.assertRegexpMatches(contents, ++ self.assertRegex(contents, + r'\bmonochrome_64_version_code = "\d+"\s') +- self.assertRegexpMatches(contents, ++ self.assertRegex(contents, + r'\btrichrome_64_32_version_code = "\d+"\s') +- self.assertRegexpMatches(contents, ++ self.assertRegex(contents, + r'\btrichrome_64_version_code = "\d+"\s') + + def testBuildOutputAndroidChromeArchInput(self): +diff --git a/src/3rdparty/chromium/build/vs_toolchain.py b/src/3rdparty/chromium/build/vs_toolchain.py +index 6bc24a921..3ea500c1f 100755 +--- a/src/3rdparty/chromium/build/vs_toolchain.py ++++ b/src/3rdparty/chromium/build/vs_toolchain.py +@@ -3,7 +3,7 @@ + # Use of this source code is governed by a BSD-style license that can be + # found in the LICENSE file. + +-from __future__ import print_function ++ + + import collections + import glob +@@ -120,12 +120,12 @@ def _RegistryGetValueUsingWinReg(key, value): + contents of the registry key's value, or None on failure. Throws + ImportError if _winreg is unavailable. + """ +- import _winreg ++ import winreg + try: + root, subkey = key.split('\\', 1) + assert root == 'HKLM' # Only need HKLM for now. +- with _winreg.OpenKey(_winreg.HKEY_LOCAL_MACHINE, subkey) as hkey: +- return _winreg.QueryValueEx(hkey, value)[0] ++ with winreg.OpenKey(winreg.HKEY_LOCAL_MACHINE, subkey) as hkey: ++ return winreg.QueryValueEx(hkey, value)[0] + except WindowsError: + return None + +@@ -148,7 +148,7 @@ def GetVisualStudioVersion(): + + # VS installed in system for external developers + supported_versions_str = ', '.join('{} ({})'.format(v,k) +- for k,v in MSVS_VERSIONS.items()) ++ for k,v in list(MSVS_VERSIONS.items())) + available_versions = [] + for version in supported_versions: + # Checking vs%s_install environment variables. +diff --git a/src/3rdparty/chromium/build/win/copy_cdb_to_output.py b/src/3rdparty/chromium/build/win/copy_cdb_to_output.py +index a0b99bb77..077051feb 100755 +--- a/src/3rdparty/chromium/build/win/copy_cdb_to_output.py ++++ b/src/3rdparty/chromium/build/win/copy_cdb_to_output.py +@@ -3,7 +3,7 @@ + # Use of this source code is governed by a BSD-style license that can be + # found in the LICENSE file. + +-from __future__ import print_function ++ + + import glob + import hashlib +diff --git a/src/3rdparty/chromium/build/win/gn_meta_sln.py b/src/3rdparty/chromium/build/win/gn_meta_sln.py +index 862d27824..1da3677bd 100644 +--- a/src/3rdparty/chromium/build/win/gn_meta_sln.py ++++ b/src/3rdparty/chromium/build/win/gn_meta_sln.py +@@ -6,7 +6,7 @@ + # Helper utility to combine GN-generated Visual Studio projects into + # a single meta-solution. + +-from __future__ import print_function ++ + + import os + import glob +@@ -34,7 +34,7 @@ def ExtractIdg(proj_file_name): + if " [*] + +-from __future__ import print_function ++ + + import difflib + import distutils.dir_util +diff --git a/src/3rdparty/chromium/build/win/use_ansi_codes.py b/src/3rdparty/chromium/build/win/use_ansi_codes.py +index 5951c2ab4..a7aadb720 100755 +--- a/src/3rdparty/chromium/build/win/use_ansi_codes.py ++++ b/src/3rdparty/chromium/build/win/use_ansi_codes.py +@@ -4,7 +4,7 @@ + # found in the LICENSE file. + """Prints if the the terminal is likely to understand ANSI codes.""" + +-from __future__ import print_function ++ + + import os + +diff --git a/src/3rdparty/chromium/buildtools/checkdeps/builddeps.py b/src/3rdparty/chromium/buildtools/checkdeps/builddeps.py +index 2fe48b2d6..5d2d40625 100755 +--- a/src/3rdparty/chromium/buildtools/checkdeps/builddeps.py ++++ b/src/3rdparty/chromium/buildtools/checkdeps/builddeps.py +@@ -180,7 +180,7 @@ class DepsBuilder(object): + if self._ignore_specific_rules: + return rules + +- for regexp, specific_rules in specific_includes.iteritems(): ++ for regexp, specific_rules in specific_includes.items(): + for rule_str in specific_rules: + ApplyOneRule(rule_str, regexp) + +@@ -210,7 +210,7 @@ class DepsBuilder(object): + + # Check the DEPS file in this directory. + if self.verbose: +- print 'Applying rules from', dir_path_local_abs ++ print('Applying rules from', dir_path_local_abs) + def FromImpl(*_): + pass # NOP function so "From" doesn't fail. + +@@ -248,9 +248,9 @@ class DepsBuilder(object): + if os.path.isfile(deps_file_path) and not ( + self._under_test and + os.path.basename(dir_path_local_abs) == 'checkdeps'): +- execfile(deps_file_path, global_scope, local_scope) ++ exec(compile(open(deps_file_path, "rb").read(), deps_file_path, 'exec'), global_scope, local_scope) + elif self.verbose: +- print ' No deps file found in', dir_path_local_abs ++ print(' No deps file found in', dir_path_local_abs) + + # Even if a DEPS file does not exist we still invoke ApplyRules + # to apply the implicit "allow" rule for the current directory +diff --git a/src/3rdparty/chromium/buildtools/checkdeps/checkdeps.py b/src/3rdparty/chromium/buildtools/checkdeps/checkdeps.py +index 4713dc0f3..91151c664 100755 +--- a/src/3rdparty/chromium/buildtools/checkdeps/checkdeps.py ++++ b/src/3rdparty/chromium/buildtools/checkdeps/checkdeps.py +@@ -68,7 +68,7 @@ class DepsChecker(DepsBuilder): + if self.results_formatter.GetResults(): + self.results_formatter.PrintResults() + return 1 +- print '\nSUCCESS\n' ++ print('\nSUCCESS\n') + return 0 + + def CheckDirectory(self, start_dir): +@@ -185,7 +185,7 @@ class DepsChecker(DepsBuilder): + verbose=self.verbose, root_dir=self.base_directory)) + + def PrintUsage(): +- print """Usage: python checkdeps.py [--root ] [tocheck] ++ print("""Usage: python checkdeps.py [--root ] [tocheck] + + --root ROOT Specifies the repository root. This defaults to "../../.." + relative to the script file. This will be correct given the +@@ -198,7 +198,7 @@ def PrintUsage(): + + Examples: + python checkdeps.py +- python checkdeps.py --root c:\\source chrome""" ++ python checkdeps.py --root c:\\source chrome""") + + + def main(): +@@ -266,12 +266,12 @@ def main(): + return 1 + + if not start_dir.startswith(deps_checker.base_directory): +- print 'Directory to check must be a subdirectory of the base directory,' +- print 'but %s is not a subdirectory of %s' % (start_dir, base_directory) ++ print('Directory to check must be a subdirectory of the base directory,') ++ print('but %s is not a subdirectory of %s' % (start_dir, base_directory)) + return 1 + +- print 'Using base directory:', base_directory +- print 'Checking:', start_dir ++ print('Using base directory:', base_directory) ++ print('Checking:', start_dir) + + if options.generate_temp_rules: + deps_checker.results_formatter = results.TemporaryRulesFormatter() +diff --git a/src/3rdparty/chromium/buildtools/checkdeps/checkdeps_test.py b/src/3rdparty/chromium/buildtools/checkdeps/checkdeps_test.py +index 8d0588dff..006e3e4b3 100755 +--- a/src/3rdparty/chromium/buildtools/checkdeps/checkdeps_test.py ++++ b/src/3rdparty/chromium/buildtools/checkdeps/checkdeps_test.py +@@ -31,9 +31,9 @@ class CheckDepsTest(unittest.TestCase): + + problems = self.deps_checker.results_formatter.GetResults() + if skip_tests: +- self.failUnlessEqual(4, len(problems)) ++ self.assertEqual(4, len(problems)) + else: +- self.failUnlessEqual(5, len(problems)) ++ self.assertEqual(5, len(problems)) + + def VerifySubstringsInProblems(key_path, substrings_in_sequence): + """Finds the problem in |problems| that contains |key_path|, +@@ -48,7 +48,7 @@ class CheckDepsTest(unittest.TestCase): + if index != -1: + for substring in substrings_in_sequence: + index = problem.find(substring, index + 1) +- self.failUnless(index != -1, '%s in %s' % (substring, problem)) ++ self.assertTrue(index != -1, '%s in %s' % (substring, problem)) + found = True + break + if not found: +@@ -103,16 +103,16 @@ class CheckDepsTest(unittest.TestCase): + return self.deps_checker.results_formatter.GetResults() + + def testCountViolations(self): +- self.failUnlessEqual('11', self.CountViolations(False)) ++ self.assertEqual('11', self.CountViolations(False)) + + def testCountViolationsIgnoringTempRules(self): +- self.failUnlessEqual('12', self.CountViolations(True)) ++ self.assertEqual('12', self.CountViolations(True)) + + def testCountViolationsWithRelativePath(self): + self.deps_checker.results_formatter = results.CountViolationsFormatter() + self.deps_checker.CheckDirectory( + os.path.join('buildtools', 'checkdeps', 'testdata', 'allowed')) +- self.failUnlessEqual('4', self.deps_checker.results_formatter.GetResults()) ++ self.assertEqual('4', self.deps_checker.results_formatter.GetResults()) + + def testTempRulesGenerator(self): + self.deps_checker.results_formatter = results.TemporaryRulesFormatter() +@@ -120,11 +120,11 @@ class CheckDepsTest(unittest.TestCase): + os.path.join(self.deps_checker.base_directory, + 'buildtools/checkdeps/testdata/allowed')) + temp_rules = self.deps_checker.results_formatter.GetResults() +- expected = [u' "!buildtools/checkdeps/testdata/disallowed/bad.h",', +- u' "!buildtools/checkdeps/testdata/disallowed/teststuff/bad.h",', +- u' "!third_party/explicitly_disallowed/bad.h",', +- u' "!third_party/no_rule/bad.h",'] +- self.failUnlessEqual(expected, temp_rules) ++ expected = [' "!buildtools/checkdeps/testdata/disallowed/bad.h",', ++ ' "!buildtools/checkdeps/testdata/disallowed/teststuff/bad.h",', ++ ' "!third_party/explicitly_disallowed/bad.h",', ++ ' "!third_party/no_rule/bad.h",'] ++ self.assertEqual(expected, temp_rules) + + def testBadBaseDirectoryNotCheckoutRoot(self): + # This assumes git. It's not a valid test if buildtools is fetched via svn. +@@ -138,34 +138,34 @@ class CheckDepsTest(unittest.TestCase): + ['#include "buildtools/checkdeps/testdata/allowed/good.h"', + '#include "buildtools/checkdeps/testdata/disallowed/allowed/good.h"'] + ]]) +- self.failIf(problems) ++ self.assertFalse(problems) + + def testCheckAddedIncludesManyGarbageLines(self): + garbage_lines = ["My name is Sam%d\n" % num for num in range(50)] + problems = self.deps_checker.CheckAddedCppIncludes( + [['buildtools/checkdeps/testdata/allowed/test.cc', garbage_lines]]) +- self.failIf(problems) ++ self.assertFalse(problems) + + def testCheckAddedIncludesNoRule(self): + problems = self.deps_checker.CheckAddedCppIncludes( + [['buildtools/checkdeps/testdata/allowed/test.cc', + ['#include "no_rule_for_this/nogood.h"'] + ]]) +- self.failUnless(problems) ++ self.assertTrue(problems) + + def testCheckAddedIncludesSkippedDirectory(self): + problems = self.deps_checker.CheckAddedCppIncludes( + [['buildtools/checkdeps/testdata/disallowed/allowed/skipped/test.cc', + ['#include "whatever/whocares.h"'] + ]]) +- self.failIf(problems) ++ self.assertFalse(problems) + + def testCheckAddedIncludesTempAllowed(self): + problems = self.deps_checker.CheckAddedCppIncludes( + [['buildtools/checkdeps/testdata/allowed/test.cc', + ['#include "buildtools/checkdeps/testdata/disallowed/temporarily_allowed.h"'] + ]]) +- self.failUnless(problems) ++ self.assertTrue(problems) + + def testCopyIsDeep(self): + # Regression test for a bug where we were making shallow copies of +@@ -187,26 +187,26 @@ class CheckDepsTest(unittest.TestCase): + ]]) + # With the bug in place, there would be two problems reported, and + # the second would be for foo_unittest.cc. +- self.failUnless(len(problems) == 1) +- self.failUnless(problems[0][0].endswith('/test.cc')) ++ self.assertTrue(len(problems) == 1) ++ self.assertTrue(problems[0][0].endswith('/test.cc')) + + def testTraversalIsOrdered(self): + dirs_traversed = [] + for rules, filenames in self.deps_checker.GetAllRulesAndFiles(dir_name='buildtools'): +- self.failUnlessEqual(type(filenames), list) +- self.failUnlessEqual(filenames, sorted(filenames)) ++ self.assertEqual(type(filenames), list) ++ self.assertEqual(filenames, sorted(filenames)) + if filenames: + dir_names = set(os.path.dirname(file) for file in filenames) +- self.failUnlessEqual(1, len(dir_names)) ++ self.assertEqual(1, len(dir_names)) + dirs_traversed.append(dir_names.pop()) +- self.failUnlessEqual(dirs_traversed, sorted(dirs_traversed)) ++ self.assertEqual(dirs_traversed, sorted(dirs_traversed)) + + def testCheckPartialImportsAreAllowed(self): + problems = self.deps_checker.CheckAddedProtoImports( + [['buildtools/checkdeps/testdata/test.proto', + ['import "no_rule_for_this/nogood.proto"'] + ]]) +- self.failIf(problems) ++ self.assertFalse(problems) + + def testCheckAddedFullPathImportsAllowed(self): + problems = self.deps_checker.CheckAddedProtoImports( +@@ -214,28 +214,28 @@ class CheckDepsTest(unittest.TestCase): + ['import "buildtools/checkdeps/testdata/allowed/good.proto"', + 'import "buildtools/checkdeps/testdata/disallowed/sub_folder/good.proto"'] + ]]) +- self.failIf(problems) ++ self.assertFalse(problems) + + def testCheckAddedFullPathImportsDisallowed(self): + problems = self.deps_checker.CheckAddedProtoImports( + [['buildtools/checkdeps/testdata/test.proto', + ['import "buildtools/checkdeps/testdata/disallowed/bad.proto"'] + ]]) +- self.failUnless(problems) ++ self.assertTrue(problems) + + def testCheckAddedFullPathImportsManyGarbageLines(self): + garbage_lines = ["My name is Sam%d\n" % num for num in range(50)] + problems = self.deps_checker.CheckAddedProtoImports( + [['buildtools/checkdeps/testdata/test.proto', + garbage_lines]]) +- self.failIf(problems) ++ self.assertFalse(problems) + + def testCheckAddedIncludesNoRuleFullPath(self): + problems = self.deps_checker.CheckAddedProtoImports( + [['buildtools/checkdeps/testdata/test.proto', + ['import "tools/some.proto"'] + ]]) +- self.failUnless(problems) ++ self.assertTrue(problems) + + if __name__ == '__main__': + unittest.main() +diff --git a/src/3rdparty/chromium/buildtools/checkdeps/cpp_checker.py b/src/3rdparty/chromium/buildtools/checkdeps/cpp_checker.py +index 3efad9741..6b87e9509 100644 +--- a/src/3rdparty/chromium/buildtools/checkdeps/cpp_checker.py ++++ b/src/3rdparty/chromium/buildtools/checkdeps/cpp_checker.py +@@ -64,7 +64,7 @@ class CppChecker(object): + # Don't fail when no directory is specified. We may want to be more + # strict about this in the future. + if self._verbose: +- print ' WARNING: include specified with no directory: ' + include_path ++ print(' WARNING: include specified with no directory: ' + include_path) + return True, None + + if self._resolve_dotdot and '../' in include_path: +@@ -80,7 +80,7 @@ class CppChecker(object): + + def CheckFile(self, rules, filepath): + if self._verbose: +- print 'Checking: ' + filepath ++ print('Checking: ' + filepath) + + dependee_status = results.DependeeStatus(filepath) + ret_val = '' # We'll collect the error messages in here +diff --git a/src/3rdparty/chromium/buildtools/checkdeps/graphdeps.py b/src/3rdparty/chromium/buildtools/checkdeps/graphdeps.py +index aff3c765c..b87542a29 100755 +--- a/src/3rdparty/chromium/buildtools/checkdeps/graphdeps.py ++++ b/src/3rdparty/chromium/buildtools/checkdeps/graphdeps.py +@@ -187,7 +187,7 @@ class DepsGrapher(DepsBuilder): + # Edges and nodes are emphasized with color and line/border weight depending + # on how many of incl/excl/hilite_fanins/hilite_fanouts filters they hit, + # and in what way. +- for src in deps_graph.keys(): ++ for src in list(deps_graph.keys()): + for (dst, allow) in deps_graph[src]: + if allow == Rule.DISALLOW and self.hide_disallowed_deps: + continue +@@ -233,7 +233,7 @@ class DepsGrapher(DepsBuilder): + + # Reformat the computed raw node attributes into a final DOT representation. + nodes = [] +- for (node, attrs) in node_props.iteritems(): ++ for (node, attrs) in node_props.items(): + attr_strs = [] + if attrs['hilite']: + attr_strs.append('style=filled,fillcolor=%s' % attrs['hilite']) +@@ -253,7 +253,7 @@ class DepsGrapher(DepsBuilder): + + + def PrintUsage(): +- print """Usage: python graphdeps.py [--root ] ++ print("""Usage: python graphdeps.py [--root ] + + --root ROOT Specifies the repository root. This defaults to "../../.." + relative to the script file. This will be correct given the +@@ -280,7 +280,7 @@ Examples: + --excl='.*->third_party' \ + --fanin='^(apps|content/browser/renderer_host)$' \ + --ignore-specific-rules \ +- --ignore-temp-rules""" ++ --ignore-temp-rules""") + + + def main(): +@@ -392,11 +392,11 @@ def main(): + PrintUsage() + return 1 + +- print 'Using base directory: ', deps_grapher.base_directory +- print 'include nodes : ', options.incl +- print 'exclude nodes : ', options.excl +- print 'highlight fanins of : ', options.hilite_fanins +- print 'highlight fanouts of: ', options.hilite_fanouts ++ print('Using base directory: ', deps_grapher.base_directory) ++ print('include nodes : ', options.incl) ++ print('exclude nodes : ', options.excl) ++ print('highlight fanins of : ', options.hilite_fanins) ++ print('highlight fanouts of: ', options.hilite_fanouts) + + deps_grapher.DumpDependencies() + return 0 +diff --git a/src/3rdparty/chromium/buildtools/checkdeps/java_checker.py b/src/3rdparty/chromium/buildtools/checkdeps/java_checker.py +index a5b1db73f..8a8e2dd7d 100644 +--- a/src/3rdparty/chromium/buildtools/checkdeps/java_checker.py ++++ b/src/3rdparty/chromium/buildtools/checkdeps/java_checker.py +@@ -112,21 +112,21 @@ class JavaChecker(object): + + def _PrescanFile(self, filepath, added_classset): + if self._verbose: +- print 'Prescanning: ' + filepath ++ print('Prescanning: ' + filepath) + full_class_name = self._GetClassFullName(filepath) + if full_class_name: + if full_class_name in self._classmap: + if self._verbose or full_class_name in added_classset: + if not any(re.match(i, filepath) for i in + self._allow_multiple_definitions): +- print 'WARNING: multiple definitions of %s:' % full_class_name +- print ' ' + filepath +- print ' ' + self._classmap[full_class_name] +- print ++ print('WARNING: multiple definitions of %s:' % full_class_name) ++ print(' ' + filepath) ++ print(' ' + self._classmap[full_class_name]) ++ print() + else: + self._classmap[full_class_name] = filepath + elif self._verbose: +- print 'WARNING: no package definition found in %s' % filepath ++ print('WARNING: no package definition found in %s' % filepath) + + def CheckLine(self, rules, line, filepath, fail_on_temp_allow=False): + """Checks the given line with the given rule set. +@@ -157,7 +157,7 @@ class JavaChecker(object): + + def CheckFile(self, rules, filepath): + if self._verbose: +- print 'Checking: ' + filepath ++ print('Checking: ' + filepath) + + dependee_status = results.DependeeStatus(filepath) + with codecs.open(filepath, encoding='utf-8') as f: +diff --git a/src/3rdparty/chromium/buildtools/checkdeps/proto_checker.py b/src/3rdparty/chromium/buildtools/checkdeps/proto_checker.py +index a90628a6c..676d0fe42 100644 +--- a/src/3rdparty/chromium/buildtools/checkdeps/proto_checker.py ++++ b/src/3rdparty/chromium/buildtools/checkdeps/proto_checker.py +@@ -67,7 +67,7 @@ class ProtoChecker(object): + # Don't fail when no directory is specified. We may want to be more + # strict about this in the future. + if self._verbose: +- print ' WARNING: import specified with no directory: ' + import_path ++ print(' WARNING: import specified with no directory: ' + import_path) + return True, None + + if self._resolve_dotdot and '../' in import_path: +@@ -87,7 +87,7 @@ class ProtoChecker(object): + + def CheckFile(self, rules, filepath): + if self._verbose: +- print 'Checking: ' + filepath ++ print('Checking: ' + filepath) + + dependee_status = results.DependeeStatus(filepath) + last_import = 0 +diff --git a/src/3rdparty/chromium/buildtools/checkdeps/results.py b/src/3rdparty/chromium/buildtools/checkdeps/results.py +index b52880ccc..e21beba3b 100644 +--- a/src/3rdparty/chromium/buildtools/checkdeps/results.py ++++ b/src/3rdparty/chromium/buildtools/checkdeps/results.py +@@ -96,9 +96,9 @@ class NormalResultsFormatter(ResultsFormatter): + + def PrintResults(self): + for result in self.results: +- print result ++ print(result) + if self.results: +- print '\nFAILED\n' ++ print('\nFAILED\n') + + + class JSONResultsFormatter(ResultsFormatter): +@@ -133,7 +133,7 @@ class JSONResultsFormatter(ResultsFormatter): + self.wrapped_formatter.PrintResults() + return + +- print self.results ++ print(self.results) + + + class TemporaryRulesFormatter(ResultsFormatter): +@@ -154,7 +154,7 @@ class TemporaryRulesFormatter(ResultsFormatter): + + def PrintResults(self): + for result in self.GetResults(): +- print result ++ print(result) + + + class CountViolationsFormatter(ResultsFormatter): +@@ -175,4 +175,4 @@ class CountViolationsFormatter(ResultsFormatter): + return '%d' % self.count + + def PrintResults(self): +- print self.count ++ print(self.count) +diff --git a/src/3rdparty/chromium/buildtools/checkdeps/rules.py b/src/3rdparty/chromium/buildtools/checkdeps/rules.py +index dd3884db2..583f90afd 100644 +--- a/src/3rdparty/chromium/buildtools/checkdeps/rules.py ++++ b/src/3rdparty/chromium/buildtools/checkdeps/rules.py +@@ -112,7 +112,7 @@ class Rules(object): + def __str__(self): + result = ['Rules = {\n (apply to all files): [\n%s\n ],' % '\n'.join( + ' %s' % x for x in self._general_rules)] +- for regexp, rules in self._specific_rules.iteritems(): ++ for regexp, rules in self._specific_rules.items(): + result.append(' (limited to files matching %s): [\n%s\n ]' % ( + regexp, '\n'.join(' %s' % x for x in rules))) + result.append(' }') +@@ -132,7 +132,7 @@ class Rules(object): + if include_general_rules: + AddDependencyTuplesImpl(deps, self._general_rules) + if include_specific_rules: +- for regexp, rules in self._specific_rules.iteritems(): ++ for regexp, rules in self._specific_rules.items(): + AddDependencyTuplesImpl(deps, rules, "/" + regexp) + return deps + +@@ -175,7 +175,7 @@ class Rules(object): + file located at |dependee_path|. + """ + dependee_filename = os.path.basename(dependee_path) +- for regexp, specific_rules in self._specific_rules.iteritems(): ++ for regexp, specific_rules in self._specific_rules.items(): + if re.match(regexp, dependee_filename): + for rule in specific_rules: + if rule.ChildOrMatch(include_path): +diff --git a/src/3rdparty/chromium/buildtools/ensure_gn_version.py b/src/3rdparty/chromium/buildtools/ensure_gn_version.py +index cfc75a377..7570bf353 100755 +--- a/src/3rdparty/chromium/buildtools/ensure_gn_version.py ++++ b/src/3rdparty/chromium/buildtools/ensure_gn_version.py +@@ -15,7 +15,7 @@ until we have a proper fix in place. + TODO(crbug.com/944667): remove this script when it is no longer needed. + """ + +-from __future__ import print_function ++ + + import argparse + import errno +@@ -97,7 +97,7 @@ def main(): + url = 'https://chrome-infra-packages.appspot.com/dl/gn/gn/%s/+/%s' % ( + platform, args.version) + try: +- zipdata = urllib.urlopen(url).read() ++ zipdata = urllib.request.urlopen(url).read() + except urllib.HTTPError as e: + print('Failed to download the package from %s: %d %s' % ( + url, e.code, e.reason)) +diff --git a/src/3rdparty/chromium/chrome/browser/resources/optimize_webui.py b/src/3rdparty/chromium/chrome/browser/resources/optimize_webui.py +index 00da92471..9ca3c8026 100755 +--- a/src/3rdparty/chromium/chrome/browser/resources/optimize_webui.py ++++ b/src/3rdparty/chromium/chrome/browser/resources/optimize_webui.py +@@ -108,8 +108,7 @@ for (redirect_url, file_path) in [ + _URL_MAPPINGS.append(('//' + redirect_url, file_path)) + + +-_VULCANIZE_REDIRECT_ARGS = list(itertools.chain.from_iterable(map( +- lambda m: ['--redirect', '%s|%s' % (m[0], m[1])], _URL_MAPPINGS))) ++_VULCANIZE_REDIRECT_ARGS = list(itertools.chain.from_iterable([['--redirect', '%s|%s' % (m[0], m[1])] for m in _URL_MAPPINGS])) + + + def _undo_mapping(mappings, url): +@@ -134,9 +133,7 @@ def _update_dep_file(in_folder, args, manifest): + + # Add a slash in front of every dependency that is not a chrome:// URL, so + # that we can map it to the correct source file path below. +- request_list = map( +- lambda dep: '/' + dep if not (dep.startswith('chrome://') or dep.startswith('//')) else dep, +- request_list) ++ request_list = ['/' + dep if not (dep.startswith('chrome://') or dep.startswith('//')) else dep for dep in request_list] + + # Undo the URL mappings applied by vulcanize to get file paths relative to + # current working directory. +@@ -146,7 +143,7 @@ def _update_dep_file(in_folder, args, manifest): + ] + + deps = [_undo_mapping(url_mappings, u) for u in request_list] +- deps = map(os.path.normpath, deps) ++ deps = list(map(os.path.normpath, deps)) + + # If the input was a folder holding an unpacked .pak file, the generated + # depfile should not list files already in the .pak file. +diff --git a/src/3rdparty/chromium/chrome/browser/resources/unpack_pak.py b/src/3rdparty/chromium/chrome/browser/resources/unpack_pak.py +index 24b1d5f29..d7ba057c0 100755 +--- a/src/3rdparty/chromium/chrome/browser/resources/unpack_pak.py ++++ b/src/3rdparty/chromium/chrome/browser/resources/unpack_pak.py +@@ -77,7 +77,7 @@ def Unpack(pak_path, out_path, pak_base_dir, excludes): + + root_dir = pak_base_dir if pak_base_dir else pak_dir + # Extract packed files, while preserving directory structure. +- for (resource_id, text) in data.resources.iteritems(): ++ for (resource_id, text) in data.resources.items(): + UnpackResource(root_dir, out_path, excludes or [], + resource_filenames[resource_ids[resource_id]], text) + +diff --git a/src/3rdparty/chromium/chrome/browser/resources/vr/assets/PRESUBMIT.py b/src/3rdparty/chromium/chrome/browser/resources/vr/assets/PRESUBMIT.py +index 055131e62..6678ba2bb 100644 +--- a/src/3rdparty/chromium/chrome/browser/resources/vr/assets/PRESUBMIT.py ++++ b/src/3rdparty/chromium/chrome/browser/resources/vr/assets/PRESUBMIT.py +@@ -61,7 +61,7 @@ def CheckVersionAndAssetParity(input_api, output_api): + return [ + output_api.PresubmitError( + 'Must have same asset files for %s in \'%s\'.' % +- (changed_asset_files.keys(), ++ (list(changed_asset_files.keys()), + input_api.os_path.dirname( + input_api.AffectedFiles()[0].LocalPath()))) + ] +diff --git a/src/3rdparty/chromium/chrome/browser/resources/vr/assets/push_assets_component.py b/src/3rdparty/chromium/chrome/browser/resources/vr/assets/push_assets_component.py +index b47f275ad..db8c7abb9 100755 +--- a/src/3rdparty/chromium/chrome/browser/resources/vr/assets/push_assets_component.py ++++ b/src/3rdparty/chromium/chrome/browser/resources/vr/assets/push_assets_component.py +@@ -35,7 +35,7 @@ class TempDir(): + + def PrintInfo(header, items): + print('\n%s' % header) +- print ' ', '\n '.join(items) ++ print(' ', '\n '.join(items)) + + + def main(): +@@ -76,8 +76,8 @@ def main(): + PrintInfo('Which pushes the following file', [zip_path]) + PrintInfo('Which contains the files', zip_files) + +- if raw_input('\nAre you sure (y/N) ').lower() != 'y': +- print 'aborting' ++ if input('\nAre you sure (y/N) ').lower() != 'y': ++ print('aborting') + return 1 + return subprocess.call(command, cwd=temp_dir) + +diff --git a/src/3rdparty/chromium/components/assist_ranker/print_example_preprocessor_config.py b/src/3rdparty/chromium/components/assist_ranker/print_example_preprocessor_config.py +index 5e35a0f1a..581ec7360 100755 +--- a/src/3rdparty/chromium/components/assist_ranker/print_example_preprocessor_config.py ++++ b/src/3rdparty/chromium/components/assist_ranker/print_example_preprocessor_config.py +@@ -1,4 +1,4 @@ +-#!/usr/bin/env python2 ++#!/usr/bin/env python3 + + # Copyright 2018 The Chromium Authors. All rights reserved. + # Use of this source code is governed by a BSD-style license that can be +@@ -61,31 +61,31 @@ def PrintExamplePreprocessorConfig(pb_file): + # Indent description by a tab and wrap text. + max_len = 80 - 8 # Leave at least 8 columns for tab width. + description += ('\n\t').join(textwrap.wrap(bucket_str, max_len)) +- print description ++ print(description) + return 0 + + + def Main(args): + if len(args) != 2: +- print 'Usage: %s ' % ( +- __file__) ++ print('Usage: %s ' % ( ++ __file__)) + return 1 + + out_dir = args[0] + if not os.path.isdir(out_dir): +- print 'Could not find out directory: %s' % out_dir ++ print('Could not find out directory: %s' % out_dir) + return 1 + + pb_file = args[1] + if not os.path.isfile(pb_file): +- print 'Protobuf file not found: %s' % pb_file ++ print('Protobuf file not found: %s' % pb_file) + return 1 + + proto_dir = os.path.join(out_dir, 'pyproto/components/assist_ranker/proto') + if not os.path.isdir(proto_dir): +- print 'Proto directory not found: %s' % proto_dir +- print 'Build the "components/assist_ranker/proto" target' +- print ' (usually built with chrome)' ++ print('Proto directory not found: %s' % proto_dir) ++ print('Build the "components/assist_ranker/proto" target') ++ print(' (usually built with chrome)') + return 1 + + # Allow importing the ExamplePreprocessorConfig proto definition. +diff --git a/src/3rdparty/chromium/components/autofill_assistant/browser/devtools/devtools_api/client_api_generator.py b/src/3rdparty/chromium/components/autofill_assistant/browser/devtools/devtools_api/client_api_generator.py +index 853b5b924..72fed7492 100644 +--- a/src/3rdparty/chromium/components/autofill_assistant/browser/devtools/devtools_api/client_api_generator.py ++++ b/src/3rdparty/chromium/components/autofill_assistant/browser/devtools/devtools_api/client_api_generator.py +@@ -435,7 +435,7 @@ def InitializeDomainDependencies(json_api): + + if not isinstance(json, dict): + return +- for value in json.itervalues(): ++ for value in json.values(): + GetDomainDepsFromRefs(domain_name, value) + + if '$ref' in json: +diff --git a/src/3rdparty/chromium/components/certificate_transparency/tools/make_ct_known_logs_list.py b/src/3rdparty/chromium/components/certificate_transparency/tools/make_ct_known_logs_list.py +index f1563dbe4..1a25c1e75 100755 +--- a/src/3rdparty/chromium/components/certificate_transparency/tools/make_ct_known_logs_list.py ++++ b/src/3rdparty/chromium/components/certificate_transparency/tools/make_ct_known_logs_list.py +@@ -83,7 +83,7 @@ def _get_log_ids_for_operator(logs_by_operator, operator_name): + + def _is_log_disqualified(log): + # Disqualified logs are denoted with state="retired" +- assert (len(log.get("state").keys()) == 1) ++ assert (len(list(log.get("state").keys())) == 1) + log_state = list(log.get("state"))[0] + return log_state == "retired" + +@@ -157,7 +157,7 @@ def _write_qualifying_logs_loginfo(f, qualifying_logs): + + + def _is_log_once_or_currently_qualified(log): +- assert (len(log.get("state").keys()) == 1) ++ assert (len(list(log.get("state").keys())) == 1) + return list(log.get("state"))[0] not in ("pending", "rejected") + + +diff --git a/src/3rdparty/chromium/components/certificate_transparency/tools/make_ct_known_logs_list_unittest.py b/src/3rdparty/chromium/components/certificate_transparency/tools/make_ct_known_logs_list_unittest.py +index db94cf4f3..c2c5e833d 100755 +--- a/src/3rdparty/chromium/components/certificate_transparency/tools/make_ct_known_logs_list_unittest.py ++++ b/src/3rdparty/chromium/components/certificate_transparency/tools/make_ct_known_logs_list_unittest.py +@@ -17,7 +17,7 @@ def b64e(x): + class FormattingTest(unittest.TestCase): + + def testSplitAndHexifyBinData(self): +- bin_data = bytes(bytearray(range(32, 60))) ++ bin_data = bytes(bytearray(list(range(32, 60)))) + expected_encoded_array = [ + ('"\\x20\\x21\\x22\\x23\\x24\\x25\\x26\\x27\\x28\\x29\\x2a' + '\\x2b\\x2c\\x2d\\x2e\\x2f\\x30"'), +@@ -28,7 +28,7 @@ class FormattingTest(unittest.TestCase): + expected_encoded_array) + + # This data should fit in exactly one line - 17 bytes. +- short_bin_data = bytes(bytearray(range(32, 49))) ++ short_bin_data = bytes(bytearray(list(range(32, 49)))) + expected_short_array = [ + ('"\\x20\\x21\\x22\\x23\\x24\\x25\\x26\\x27\\x28\\x29\\x2a' + '\\x2b\\x2c\\x2d\\x2e\\x2f\\x30"') +@@ -38,7 +38,7 @@ class FormattingTest(unittest.TestCase): + expected_short_array) + + # This data should fit exactly in two lines - 34 bytes. +- two_line_data = bytes(bytearray(range(32, 66))) ++ two_line_data = bytes(bytearray(list(range(32, 66)))) + expected_two_line_data_array = [ + ('"\\x20\\x21\\x22\\x23\\x24\\x25\\x26\\x27\\x28\\x29\\x2a' + '\\x2b\\x2c\\x2d\\x2e\\x2f\\x30"'), +diff --git a/src/3rdparty/chromium/components/crash/content/tools/dmp2minidump.py b/src/3rdparty/chromium/components/crash/content/tools/dmp2minidump.py +index 7823d4836..c92ea1eea 100755 +--- a/src/3rdparty/chromium/components/crash/content/tools/dmp2minidump.py ++++ b/src/3rdparty/chromium/components/crash/content/tools/dmp2minidump.py +@@ -25,11 +25,11 @@ def ProcessDump(dump_file, minidump_file): + boundary = dump.readline().strip()[2:] + data = parse_multipart(dump, {'boundary': boundary}) + except: +- print 'Failed to read dmp file %s' % dump_file ++ print('Failed to read dmp file %s' % dump_file) + return + + if not 'upload_file_minidump' in data: +- print 'Could not find minidump file in dump.' ++ print('Could not find minidump file in dump.') + return + + f = open(minidump_file, 'w') +@@ -39,9 +39,9 @@ def ProcessDump(dump_file, minidump_file): + + def main(): + if len(sys.argv) != 3: +- print 'Usage: %s [dmp file] [minidump]' % sys.argv[0] +- print '' +- print 'Extracts the minidump stored in the crash dump file' ++ print('Usage: %s [dmp file] [minidump]' % sys.argv[0]) ++ print('') ++ print('Extracts the minidump stored in the crash dump file') + return 1 + + ProcessDump(sys.argv[1], sys.argv[2]) +diff --git a/src/3rdparty/chromium/components/crash/content/tools/generate_breakpad_symbols.py b/src/3rdparty/chromium/components/crash/content/tools/generate_breakpad_symbols.py +index dcf26b720..1f30349cf 100755 +--- a/src/3rdparty/chromium/components/crash/content/tools/generate_breakpad_symbols.py ++++ b/src/3rdparty/chromium/components/crash/content/tools/generate_breakpad_symbols.py +@@ -15,7 +15,7 @@ import glob + import multiprocessing + import optparse + import os +-import Queue ++import queue + import re + import shutil + import subprocess +@@ -35,7 +35,7 @@ def GetDumpSymsBinary(build_dir=None): + DUMP_SYMS = 'dump_syms' + dump_syms_bin = os.path.join(os.path.expanduser(build_dir), DUMP_SYMS) + if not os.access(dump_syms_bin, os.X_OK): +- print 'Cannot find %s.' % dump_syms_bin ++ print('Cannot find %s.' % dump_syms_bin) + return None + + return dump_syms_bin +@@ -129,7 +129,7 @@ def GetDeveloperDirMac(): + for path in candidate_paths: + if os.path.exists(path): + return path +- print 'WARNING: no value found for DEVELOPER_DIR. Some commands may fail.' ++ print('WARNING: no value found for DEVELOPER_DIR. Some commands may fail.') + + + def GetSharedLibraryDependenciesMac(binary, exe_path): +@@ -195,10 +195,10 @@ def GetSharedLibraryDependenciesMac(binary, exe_path): + if dep: + deps.append(os.path.normpath(dep)) + else: +- print >>sys.stderr, ( ++ print(( + 'ERROR: failed to resolve %s, exe_path %s, loader_path %s, ' + 'rpaths %s' % (m.group(1), exe_path, loader_path, +- ', '.join(rpaths))) ++ ', '.join(rpaths))), file=sys.stderr) + sys.exit(1) + return deps + +@@ -223,7 +223,7 @@ def GetSharedLibraryDependencies(options, binary, exe_path): + elif options.platform == 'chromeos': + deps = GetSharedLibraryDependenciesChromeOS(binary) + else: +- print "Platform not supported." ++ print("Platform not supported.") + sys.exit(1) + + result = [] +@@ -255,7 +255,7 @@ def GetTransitiveDependencies(options): + binaries |= new_deps + queue.extend(list(new_deps)) + return binaries +- print "Platform not supported." ++ print("Platform not supported.") + sys.exit(1) + + +@@ -293,7 +293,7 @@ def CreateSymbolDir(options, output_dir, relative_hash_dir): + def GenerateSymbols(options, binaries): + """Dumps the symbols of binary and places them in the given directory.""" + +- queue = Queue.Queue() ++ queue = queue.Queue() + print_lock = threading.Lock() + + def _Worker(): +@@ -343,22 +343,22 @@ def GenerateSymbols(options, binaries): + if not should_dump_syms: + if options.verbose: + with print_lock: +- print "Skipping %s (%s)" % (binary, reason) ++ print("Skipping %s (%s)" % (binary, reason)) + queue.task_done() + continue + + if options.verbose: + with print_lock: +- print "Generating symbols for %s" % binary ++ print("Generating symbols for %s" % binary) + + CreateSymbolDir(options, output_dir, binary_info.hash) + try: + with open(output_path, 'wb') as f: + subprocess.check_call([dump_syms, '-r', binary], stdout=f) +- except Exception, e: ++ except Exception as e: + # Not much we can do about this. + with print_lock: +- print e ++ print(e) + + queue.task_done() + +@@ -394,19 +394,19 @@ def main(): + (options, _) = parser.parse_args() + + if not options.symbols_dir: +- print "Required option --symbols-dir missing." ++ print("Required option --symbols-dir missing.") + return 1 + + if not options.build_dir: +- print "Required option --build-dir missing." ++ print("Required option --build-dir missing.") + return 1 + + if not options.binary: +- print "Required option --binary missing." ++ print("Required option --binary missing.") + return 1 + + if not os.access(options.binary, os.X_OK): +- print "Cannot find %s." % options.binary ++ print("Cannot find %s." % options.binary) + return 1 + + if options.clear: +diff --git a/src/3rdparty/chromium/components/domain_reliability/bake_in_configs.py b/src/3rdparty/chromium/components/domain_reliability/bake_in_configs.py +index cf2e8b483..0828ac39d 100755 +--- a/src/3rdparty/chromium/components/domain_reliability/bake_in_configs.py ++++ b/src/3rdparty/chromium/components/domain_reliability/bake_in_configs.py +@@ -8,7 +8,7 @@ + encodes their contents as an array of C strings that gets compiled in to Chrome + and loaded at runtime.""" + +-from __future__ import print_function ++ + + import ast + import json +diff --git a/src/3rdparty/chromium/components/feed/core/v2/tools/protoc_util.py b/src/3rdparty/chromium/components/feed/core/v2/tools/protoc_util.py +index 2d2ce70da..a86d9583f 100755 +--- a/src/3rdparty/chromium/components/feed/core/v2/tools/protoc_util.py ++++ b/src/3rdparty/chromium/components/feed/core/v2/tools/protoc_util.py +@@ -9,6 +9,7 @@ + import glob + import os + import subprocess ++import sys + + _protoc_path = None + +diff --git a/src/3rdparty/chromium/components/feed/core/v2/tools/stream_dump.py b/src/3rdparty/chromium/components/feed/core/v2/tools/stream_dump.py +index c6a0040bb..ce2eab6bc 100755 +--- a/src/3rdparty/chromium/components/feed/core/v2/tools/stream_dump.py ++++ b/src/3rdparty/chromium/components/feed/core/v2/tools/stream_dump.py +@@ -18,7 +18,7 @@ import argparse + import glob + import os + import plyvel +-import protoc_util ++from . import protoc_util + import re + import subprocess + import sys +diff --git a/src/3rdparty/chromium/components/feed/core/v2/tools/textpb_to_binarypb.py b/src/3rdparty/chromium/components/feed/core/v2/tools/textpb_to_binarypb.py +index ef07d256c..856a15a91 100755 +--- a/src/3rdparty/chromium/components/feed/core/v2/tools/textpb_to_binarypb.py ++++ b/src/3rdparty/chromium/components/feed/core/v2/tools/textpb_to_binarypb.py +@@ -18,7 +18,7 @@ Usage example: + import base64 + import glob + import os +-import protoc_util ++from . import protoc_util + import subprocess + import sys + import urllib.parse +diff --git a/src/3rdparty/chromium/components/feed/tools/content_dump.py b/src/3rdparty/chromium/components/feed/tools/content_dump.py +index 91d5cdee0..6dd33b51a 100755 +--- a/src/3rdparty/chromium/components/feed/tools/content_dump.py ++++ b/src/3rdparty/chromium/components/feed/tools/content_dump.py +@@ -18,7 +18,7 @@ import argparse + import glob + import os + import plyvel +-import protoc_util ++from . import protoc_util + import re + import subprocess + import sys +diff --git a/src/3rdparty/chromium/components/feed/tools/mockserver_textpb_to_binary.py b/src/3rdparty/chromium/components/feed/tools/mockserver_textpb_to_binary.py +index b6d75745b..e0b02396b 100755 +--- a/src/3rdparty/chromium/components/feed/tools/mockserver_textpb_to_binary.py ++++ b/src/3rdparty/chromium/components/feed/tools/mockserver_textpb_to_binary.py +@@ -23,7 +23,7 @@ Usage example: + + import glob + import os +-import protoc_util ++from . import protoc_util + import subprocess + + from absl import app +diff --git a/src/3rdparty/chromium/components/feed/tools/protoc_util.py b/src/3rdparty/chromium/components/feed/tools/protoc_util.py +index 0ff20f2c0..fa7bba00b 100755 +--- a/src/3rdparty/chromium/components/feed/tools/protoc_util.py ++++ b/src/3rdparty/chromium/components/feed/tools/protoc_util.py +@@ -9,6 +9,7 @@ + import glob + import os + import subprocess ++import sys + + _protoc_path = None + +diff --git a/src/3rdparty/chromium/components/ntp_snippets/remote/fetch.py b/src/3rdparty/chromium/components/ntp_snippets/remote/fetch.py +index d5624c050..c9833fdf5 100755 +--- a/src/3rdparty/chromium/components/ntp_snippets/remote/fetch.py ++++ b/src/3rdparty/chromium/components/ntp_snippets/remote/fetch.py +@@ -15,7 +15,7 @@ If getting signed-in results, authenticates with OAuth2 and stores the + credentials at ~/.zineauth. + """ + +-from __future__ import absolute_import, division, print_function, unicode_literals ++ + + import argparse + import base64 +diff --git a/src/3rdparty/chromium/components/resources/protobufs/binary_proto_generator.py b/src/3rdparty/chromium/components/resources/protobufs/binary_proto_generator.py +index 7422ead96..fcf10080a 100755 +--- a/src/3rdparty/chromium/components/resources/protobufs/binary_proto_generator.py ++++ b/src/3rdparty/chromium/components/resources/protobufs/binary_proto_generator.py +@@ -8,6 +8,7 @@ + + """ + ++from __future__ import print_function + import abc + import imp + import optparse +@@ -196,12 +197,12 @@ class BinaryProtoGenerator: + self._ImportProtoModules(opts.path) + + if not self.VerifyArgs(opts): +- print "Wrong arguments" ++ print("Wrong arguments") + return 1 + + try: + self._GenerateBinaryProtos(opts) + except Exception as e: +- print "ERROR: Failed to render binary version of %s:\n %s\n%s" % ( +- opts.infile, str(e), traceback.format_exc()) ++ print("ERROR: Failed to render binary version of %s:\n %s\n%s" % ( ++ opts.infile, str(e), traceback.format_exc())) + return 1 +diff --git a/src/3rdparty/chromium/components/resources/ssl/ssl_error_assistant/push_proto.py b/src/3rdparty/chromium/components/resources/ssl/ssl_error_assistant/push_proto.py +index d2d3824d1..2634d844c 100755 +--- a/src/3rdparty/chromium/components/resources/ssl/ssl_error_assistant/push_proto.py ++++ b/src/3rdparty/chromium/components/resources/ssl/ssl_error_assistant/push_proto.py +@@ -46,10 +46,10 @@ def main(): + gn_command = ['ninja', + '-C', opts.dir, + RESOURCE_SUBDIR + ':make_ssl_error_assistant_protobuf'] +- print "Running the following" +- print " " + (' '.join(gn_command)) ++ print("Running the following") ++ print(" " + (' '.join(gn_command))) + if subprocess.call(gn_command): +- print "Ninja failed." ++ print("Ninja failed.") + return 1 + + # Use the versioned files under the copy directory to push to the GCS bucket. +@@ -68,19 +68,19 @@ def main(): + version_dir = dirs[0] + command = ['gsutil', 'cp', '-Rn', version_dir, DEST_BUCKET] + +- print '\nGoing to run the following command' +- print ' ', ' '.join(command) +- print '\nIn directory' +- print ' ', copy_dir +- print '\nWhich should push the following files' ++ print('\nGoing to run the following command') ++ print(' ', ' '.join(command)) ++ print('\nIn directory') ++ print(' ', copy_dir) ++ print('\nWhich should push the following files') + expected_files = [os.path.join(dp, f) for dp, _, fn in + os.walk(version_dir) for f in fn] + for f in expected_files: +- print ' ', f ++ print(' ', f) + +- shall = raw_input('\nAre you sure (y/N) ').lower() == 'y' ++ shall = input('\nAre you sure (y/N) ').lower() == 'y' + if not shall: +- print 'aborting' ++ print('aborting') + return 1 + return subprocess.call(command) + +diff --git a/src/3rdparty/chromium/components/safe_browsing/core/resources/gen_file_type_proto.py b/src/3rdparty/chromium/components/safe_browsing/core/resources/gen_file_type_proto.py +index 15de89899..0e4305ed2 100755 +--- a/src/3rdparty/chromium/components/safe_browsing/core/resources/gen_file_type_proto.py ++++ b/src/3rdparty/chromium/components/safe_browsing/core/resources/gen_file_type_proto.py +@@ -154,7 +154,7 @@ class DownloadFileTypeProtoGenerator(BinaryProtoGenerator): + FilterForPlatformAndWrite(pb, platform_enum, outfile) + else: + # Make a separate file for each platform +- for platform_type, platform_enum in PlatformTypes().iteritems(): ++ for platform_type, platform_enum in PlatformTypes().items(): + # e.g. .../all/77/chromeos/download_file_types.pb + outfile = os.path.join(opts.outdir, + str(pb.version_id), +@@ -179,12 +179,12 @@ class DownloadFileTypeProtoGenerator(BinaryProtoGenerator): + + def VerifyArgs(self, opts): + if (not opts.all and opts.type not in PlatformTypes()): +- print "ERROR: Unknown platform type '%s'" % opts.type ++ print("ERROR: Unknown platform type '%s'" % opts.type) + self.opt_parser.print_help() + return False + + if (bool(opts.all) == bool(opts.type)): +- print "ERROR: Need exactly one of --type or --all" ++ print("ERROR: Need exactly one of --type or --all") + self.opt_parser.print_help() + return False + return True +diff --git a/src/3rdparty/chromium/components/safe_browsing/core/resources/push_file_type_proto.py b/src/3rdparty/chromium/components/safe_browsing/core/resources/push_file_type_proto.py +index 4f9bf243c..b01fa3144 100755 +--- a/src/3rdparty/chromium/components/safe_browsing/core/resources/push_file_type_proto.py ++++ b/src/3rdparty/chromium/components/safe_browsing/core/resources/push_file_type_proto.py +@@ -40,10 +40,10 @@ def main(): + gn_command = ['ninja', + '-C', opts.dir, + RESOURCE_SUBDIR + ':make_all_file_types_protobuf'] +- print "Running the following" +- print " " + (' '.join(gn_command)) ++ print("Running the following") ++ print(" " + (' '.join(gn_command))) + if subprocess.call(gn_command): +- print "Ninja failed." ++ print("Ninja failed.") + return 1 + + os.chdir(all_dir) +@@ -60,19 +60,19 @@ def main(): + vers_dir = dirs[0] + command = ['gsutil', 'cp', '-Rn', vers_dir, DEST_BUCKET] + +- print '\nGoing to run the following command' +- print ' ', ' '.join(command) +- print '\nIn directory' +- print ' ', all_dir +- print '\nWhich should push the following files' ++ print('\nGoing to run the following command') ++ print(' ', ' '.join(command)) ++ print('\nIn directory') ++ print(' ', all_dir) ++ print('\nWhich should push the following files') + expected_files = [os.path.join(dp, f) for dp, dn, fn in + os.walk(vers_dir) for f in fn] + for f in expected_files: +- print ' ', f ++ print(' ', f) + +- shall = raw_input('\nAre you sure (y/N) ').lower() == 'y' ++ shall = input('\nAre you sure (y/N) ').lower() == 'y' + if not shall: +- print 'aborting' ++ print('aborting') + return 1 + return subprocess.call(command) + +diff --git a/src/3rdparty/chromium/components/schema_org/generate_schema_org_code.py b/src/3rdparty/chromium/components/schema_org/generate_schema_org_code.py +index b53e6abe3..71f820b7b 100644 +--- a/src/3rdparty/chromium/components/schema_org/generate_schema_org_code.py ++++ b/src/3rdparty/chromium/components/schema_org/generate_schema_org_code.py +@@ -225,7 +225,7 @@ def get_template_vars(schema, names): + template_vars['properties'].append( + parse_property(thing, schema, names)) + +- for entity, parents in entity_parent_lookup.iteritems(): ++ for entity, parents in entity_parent_lookup.items(): + template_vars['entity_parent_lookup'].append({ + 'name': + entity, +diff --git a/src/3rdparty/chromium/components/variations/service/generate_ui_string_overrider.py b/src/3rdparty/chromium/components/variations/service/generate_ui_string_overrider.py +index 859b758ef..e83e1d7a8 100755 +--- a/src/3rdparty/chromium/components/variations/service/generate_ui_string_overrider.py ++++ b/src/3rdparty/chromium/components/variations/service/generate_ui_string_overrider.py +@@ -101,7 +101,7 @@ def _CheckForHashCollisions(sorted_resource_list): + A set of all |Resource| objects with collisions. + """ + collisions = set() +- for i in xrange(len(sorted_resource_list) - 1): ++ for i in range(len(sorted_resource_list) - 1): + resource = sorted_resource_list[i] + next_resource = sorted_resource_list[i+1] + if resource.hash == next_resource.hash: +diff --git a/src/3rdparty/chromium/components/vector_icons/aggregate_vector_icons.py b/src/3rdparty/chromium/components/vector_icons/aggregate_vector_icons.py +index 7239690c4..dbe60ad7b 100644 +--- a/src/3rdparty/chromium/components/vector_icons/aggregate_vector_icons.py ++++ b/src/3rdparty/chromium/components/vector_icons/aggregate_vector_icons.py +@@ -18,7 +18,7 @@ TEMPLATE_PLACEHOLDER = "TEMPLATE_PLACEHOLDER" + + + def Error(msg): +- print >> sys.stderr, msg ++ print(msg, file=sys.stderr) + sys.exit(1) + + +diff --git a/src/3rdparty/chromium/components/viz/service/display/process_renderer_perftest_results.py b/src/3rdparty/chromium/components/viz/service/display/process_renderer_perftest_results.py +index 588dece3a..c77390081 100755 +--- a/src/3rdparty/chromium/components/viz/service/display/process_renderer_perftest_results.py ++++ b/src/3rdparty/chromium/components/viz/service/display/process_renderer_perftest_results.py +@@ -20,7 +20,7 @@ import sys + def SaveResultsAsCSV(csv_data, csv_filename): + assert len(csv_data) > 0 + with open(csv_filename, 'wb') as csv_file: +- labels = sorted(csv_data[0].keys(), reverse=True) ++ labels = sorted(list(csv_data[0].keys()), reverse=True) + writer = csv.DictWriter(csv_file, fieldnames=labels) + writer.writeheader() + writer.writerows(csv_data) +diff --git a/src/3rdparty/chromium/content/browser/tracing/generate_trace_viewer_grd.py b/src/3rdparty/chromium/content/browser/tracing/generate_trace_viewer_grd.py +index 037f9497d..be393d21f 100755 +--- a/src/3rdparty/chromium/content/browser/tracing/generate_trace_viewer_grd.py ++++ b/src/3rdparty/chromium/content/browser/tracing/generate_trace_viewer_grd.py +@@ -74,7 +74,7 @@ def main(argv): + for filename in parsed_args.source_files: + add_file_to_grd(doc, os.path.basename(filename)) + +- with open(parsed_args.output_filename, 'w') as output_file: ++ with open(parsed_args.output_filename, 'wb') as output_file: + output_file.write(doc.toxml(encoding='UTF-8')) + + +diff --git a/src/3rdparty/chromium/content/public/android/generate_child_service.py b/src/3rdparty/chromium/content/public/android/generate_child_service.py +index 8c28dd624..6c2765b13 100755 +--- a/src/3rdparty/chromium/content/public/android/generate_child_service.py ++++ b/src/3rdparty/chromium/content/public/android/generate_child_service.py +@@ -46,7 +46,7 @@ def DoMain(argv): + path_template = "org/chromium/content/app/SandboxedProcessService{0}.java" + with build_utils.AtomicOutput(output) as f: + with zipfile.ZipFile(f, 'w', zipfile.ZIP_STORED) as srcjar: +- for i in xrange(number): ++ for i in range(number): + build_utils.AddToZipHermetic(srcjar, + path_template.format(i), + data=GenerateService(i)) +diff --git a/src/3rdparty/chromium/docs/enterprise/extension_query.py b/src/3rdparty/chromium/docs/enterprise/extension_query.py +index 41c92623b..b5f99bb25 100755 +--- a/src/3rdparty/chromium/docs/enterprise/extension_query.py ++++ b/src/3rdparty/chromium/docs/enterprise/extension_query.py +@@ -4,8 +4,8 @@ + # found in the LICENSE file. + """Transform CBCM Takeout API Data (Python3).""" + +-from __future__ import print_function +-from __future__ import unicode_literals ++ ++ + + import argparse + import csv +@@ -81,7 +81,7 @@ def DictToList(data, key_name='id'): + The values from |data|, with each value's key inlined into the value. + """ + assert isinstance(data, dict), '|data| must be a dict' +- for key, value in data.items(): ++ for key, value in list(data.items()): + assert isinstance(value, dict), '|value| must contain dict items' + value[key_name] = key + yield value +@@ -111,7 +111,7 @@ def Flatten(data, all_columns): + + for item in data: + added_item = {} +- for prop, value in item.items(): ++ for prop, value in list(item.items()): + # Non-container properties can be added directly. + if not isinstance(value, (list, set)): + added_item[prop] = value +@@ -149,7 +149,7 @@ def Flatten(data, all_columns): + (int, bool, str)), ('unexpected type for item: %s' % + type(added_item[prop]).__name__) + +- all_columns.update(added_item.keys()) ++ all_columns.update(list(added_item.keys())) + yield added_item + + +diff --git a/src/3rdparty/chromium/docs/enterprise/extension_query_py2.py b/src/3rdparty/chromium/docs/enterprise/extension_query_py2.py +index 597a5647a..e9f4f8b2c 100755 +--- a/src/3rdparty/chromium/docs/enterprise/extension_query_py2.py ++++ b/src/3rdparty/chromium/docs/enterprise/extension_query_py2.py +@@ -2,9 +2,9 @@ + # Copyright 2020 The Chromium Authors. All rights reserved. + # Use of this source code is governed by a BSD-style license that can be + # found in the LICENSE file. +-"""Transform CBCM Takeout API Data (Python2).""" ++"""Transform CBCM Takeout API Data (Python3).""" ++ + +-from __future__ import print_function + + import argparse + import csv +@@ -74,8 +74,8 @@ def ToUtf8(data): + A list of dict objects whose values have been encoded as UTF-8. + """ + for entry in data: +- for prop, value in entry.iteritems(): +- entry[prop] = unicode(value).encode('utf-8') ++ for prop, value in entry.items(): ++ entry[prop] = str(value).encode('utf-8') + yield entry + + +@@ -93,7 +93,7 @@ def DictToList(data, key_name='id'): + The values from |data|, with each value's key inlined into the value. + """ + assert isinstance(data, dict), '|data| must be a dict' +- for key, value in data.items(): ++ for key, value in list(data.items()): + assert isinstance(value, dict), '|value| must contain dict items' + value[key_name] = key + yield value +@@ -123,7 +123,7 @@ def Flatten(data, all_columns): + + for item in data: + added_item = {} +- for prop, value in item.items(): ++ for prop, value in list(item.items()): + # Non-container properties can be added directly. + if not isinstance(value, (list, set)): + added_item[prop] = value +@@ -159,10 +159,10 @@ def Flatten(data, all_columns): + + assert isinstance( + added_item[prop], +- (int, bool, str, unicode)), ('unexpected type for item: %s' % ++ (int, bool, str)), ('unexpected type for item: %s' % + type(added_item[prop]).__name__) + +- all_columns.update(added_item.keys()) ++ all_columns.update(list(added_item.keys())) + yield added_item + + +diff --git a/src/3rdparty/chromium/extensions/common/api/externs_checker.py b/src/3rdparty/chromium/extensions/common/api/externs_checker.py +index 09a97bb10..55ff78fa1 100644 +--- a/src/3rdparty/chromium/extensions/common/api/externs_checker.py ++++ b/src/3rdparty/chromium/extensions/common/api/externs_checker.py +@@ -15,7 +15,7 @@ class ExternsChecker(object): + self._output_api = output_api + self._api_pairs = api_pairs + +- for path in api_pairs.keys() + api_pairs.values(): ++ for path in list(api_pairs.keys()) + list(api_pairs.values()): + if not input_api.os_path.exists(path): + raise OSError('Path Not Found: %s' % path) + +diff --git a/src/3rdparty/chromium/extensions/common/api/externs_checker_test.py b/src/3rdparty/chromium/extensions/common/api/externs_checker_test.py +index a28deafd6..ec98bb4ef 100755 +--- a/src/3rdparty/chromium/extensions/common/api/externs_checker_test.py ++++ b/src/3rdparty/chromium/extensions/common/api/externs_checker_test.py +@@ -30,10 +30,10 @@ class ExternsCheckerTest(unittest.TestCase): + + def testModifiedSourceWithoutModifiedExtern(self): + results = self._runChecks(['b', 'test', 'random']) +- self.assertEquals(1, len(results)) +- self.assertEquals(1, len(results[0].items)) +- self.assertEquals('b', results[0].items[0]) +- self.assertEquals( ++ self.assertEqual(1, len(results)) ++ self.assertEqual(1, len(results[0].items)) ++ self.assertEqual('b', results[0].items[0]) ++ self.assertEqual( + 'To update the externs, run:\n' + ' src/ $ python tools/json_schema_compiler/compiler.py b --root=. ' + '--generator=externs > 2', +@@ -41,15 +41,15 @@ class ExternsCheckerTest(unittest.TestCase): + + def testModifiedSourceWithModifiedExtern(self): + results = self._runChecks(['b', '2', 'test', 'random']) +- self.assertEquals(0, len(results)) ++ self.assertEqual(0, len(results)) + + def testModifiedMultipleSourcesWithNoModifiedExterns(self): + results = self._runChecks(['b', 'test', 'c', 'random']) +- self.assertEquals(1, len(results)) +- self.assertEquals(2, len(results[0].items)) ++ self.assertEqual(1, len(results)) ++ self.assertEqual(2, len(results[0].items)) + self.assertTrue('b' in results[0].items) + self.assertTrue('c' in results[0].items) +- self.assertEquals( ++ self.assertEqual( + 'To update the externs, run:\n' + ' src/ $ python tools/json_schema_compiler/compiler.py ' + '--root=. --generator=externs > ', +@@ -57,9 +57,9 @@ class ExternsCheckerTest(unittest.TestCase): + + def testModifiedMultipleSourcesWithOneModifiedExtern(self): + results = self._runChecks(['b', 'test', 'c', 'random', '2']) +- self.assertEquals(1, len(results)) +- self.assertEquals(1, len(results[0].items)) +- self.assertEquals('c', results[0].items[0]) ++ self.assertEqual(1, len(results)) ++ self.assertEqual(1, len(results[0].items)) ++ self.assertEqual('c', results[0].items[0]) + + def testApiFileDoesNotExist(self): + exists = lambda f: f in ['a', 'b', 'c', '1', '2'] +diff --git a/src/3rdparty/chromium/google_apis/build/check_internal.py b/src/3rdparty/chromium/google_apis/build/check_internal.py +index 89514c539..2a0e3b63e 100755 +--- a/src/3rdparty/chromium/google_apis/build/check_internal.py ++++ b/src/3rdparty/chromium/google_apis/build/check_internal.py +@@ -8,7 +8,7 @@ + Takes one argument, a path. Prints 1 if the path exists, 0 if not. + """ + +-from __future__ import print_function ++ + + import os + import sys +diff --git a/src/3rdparty/chromium/google_apis/google_api_keys.py b/src/3rdparty/chromium/google_apis/google_api_keys.py +index 8e528f2ad..ec583fdc9 100755 +--- a/src/3rdparty/chromium/google_apis/google_api_keys.py ++++ b/src/3rdparty/chromium/google_apis/google_api_keys.py +@@ -86,15 +86,15 @@ def GetClientSecret(client_name): + + + if __name__ == "__main__": +- print 'GOOGLE_API_KEY=%s' % GetAPIKey() +- print 'GOOGLE_CLIENT_ID_MAIN=%s' % GetClientID('MAIN') +- print 'GOOGLE_CLIENT_SECRET_MAIN=%s' % GetClientSecret('MAIN') +- print 'GOOGLE_CLIENT_ID_CLOUD_PRINT=%s' % GetClientID('CLOUD_PRINT') +- print 'GOOGLE_CLIENT_SECRET_CLOUD_PRINT=%s' % GetClientSecret('CLOUD_PRINT') +- print 'GOOGLE_CLIENT_ID_REMOTING=%s' % GetClientID('REMOTING') +- print 'GOOGLE_CLIENT_SECRET_REMOTING=%s' % GetClientSecret('REMOTING') +- print 'GOOGLE_CLIENT_ID_REMOTING_HOST=%s' % GetClientID('REMOTING_HOST') +- print 'GOOGLE_CLIENT_SECRET_REMOTING_HOST=%s' % GetClientSecret( +- 'REMOTING_HOST') +- print 'GOOGLE_CLIENT_ID_REMOTING_IDENTITY_API=%s' %GetClientID( +- 'REMOTING_IDENTITY_API') ++ print('GOOGLE_API_KEY=%s' % GetAPIKey()) ++ print('GOOGLE_CLIENT_ID_MAIN=%s' % GetClientID('MAIN')) ++ print('GOOGLE_CLIENT_SECRET_MAIN=%s' % GetClientSecret('MAIN')) ++ print('GOOGLE_CLIENT_ID_CLOUD_PRINT=%s' % GetClientID('CLOUD_PRINT')) ++ print('GOOGLE_CLIENT_SECRET_CLOUD_PRINT=%s' % GetClientSecret('CLOUD_PRINT')) ++ print('GOOGLE_CLIENT_ID_REMOTING=%s' % GetClientID('REMOTING')) ++ print('GOOGLE_CLIENT_SECRET_REMOTING=%s' % GetClientSecret('REMOTING')) ++ print('GOOGLE_CLIENT_ID_REMOTING_HOST=%s' % GetClientID('REMOTING_HOST')) ++ print('GOOGLE_CLIENT_SECRET_REMOTING_HOST=%s' % GetClientSecret( ++ 'REMOTING_HOST')) ++ print('GOOGLE_CLIENT_ID_REMOTING_IDENTITY_API=%s' %GetClientID( ++ 'REMOTING_IDENTITY_API')) +diff --git a/src/3rdparty/chromium/gpu/command_buffer/build_cmd_buffer_lib.py b/src/3rdparty/chromium/gpu/command_buffer/build_cmd_buffer_lib.py +index 2538596fb..38e9bebea 100644 +--- a/src/3rdparty/chromium/gpu/command_buffer/build_cmd_buffer_lib.py ++++ b/src/3rdparty/chromium/gpu/command_buffer/build_cmd_buffer_lib.py +@@ -682,7 +682,7 @@ def _Namespace(): + def Grouper(n, iterable, fillvalue=None): + """Collect data into fixed-length chunks or blocks""" + args = [iter(iterable)] * n +- return itertools.izip_longest(fillvalue=fillvalue, *args) ++ return itertools.zip_longest(fillvalue=fillvalue, *args) + + + def SplitWords(input_string): +@@ -5923,7 +5923,7 @@ class Function(object): + """Writes the cmd cmd_flags constant.""" + # By default trace only at the highest level 3. + trace_level = int(self.GetInfo('trace_level', default = 3)) +- if trace_level not in xrange(0, 4): ++ if trace_level not in range(0, 4): + raise KeyError("Unhandled trace_level: %d" % trace_level) + + cmd_flags = ('CMD_FLAG_SET_TRACE_LEVEL(%d)' % trace_level) +@@ -6351,11 +6351,11 @@ class GLGenerator(object): + def Log(self, msg): + """Prints something if verbose is true.""" + if self.verbose: +- print msg ++ print(msg) + + def Error(self, msg): + """Prints an error.""" +- print "Error: %s" % msg ++ print("Error: %s" % msg) + self.errors += 1 + + def ParseGLH(self, filename): +@@ -6381,7 +6381,7 @@ class GLGenerator(object): + 'return_type': match.group(1).strip(), + } + +- for k in parsed_func_info.keys(): ++ for k in list(parsed_func_info.keys()): + if not k in func_info: + func_info[k] = parsed_func_info[k] + +@@ -7424,13 +7424,13 @@ const size_t %(p)sUtil::enum_to_string_table_len_ = + f.write("#include \"ppapi/c/ppb_opengles2.h\"\n\n") + else: + f.write("\n#ifndef __gl2_h_\n") +- for (k, v) in _GL_TYPES.iteritems(): ++ for (k, v) in _GL_TYPES.items(): + f.write("typedef %s %s;\n" % (v, k)) + f.write("#ifdef _WIN64\n") +- for (k, v) in _GL_TYPES_64.iteritems(): ++ for (k, v) in _GL_TYPES_64.items(): + f.write("typedef %s %s;\n" % (v, k)) + f.write("#else\n") +- for (k, v) in _GL_TYPES_32.iteritems(): ++ for (k, v) in _GL_TYPES_32.items(): + f.write("typedef %s %s;\n" % (v, k)) + f.write("#endif // _WIN64\n") + f.write("#endif // __gl2_h_\n\n") +@@ -7578,7 +7578,6 @@ def Format(generated_files, output_dir, chromium_root_dir): + if platform.system() == "Windows": + formatter = "third_party\\depot_tools\\clang-format.bat" + formatter = os.path.join(chromium_root_dir, formatter) +- generated_files = map(lambda filename: os.path.join(output_dir, filename), +- generated_files) ++ generated_files = [os.path.join(output_dir, filename) for filename in generated_files] + for filename in generated_files: + call([formatter, "-i", "-style=chromium", filename], cwd=chromium_root_dir) +diff --git a/src/3rdparty/chromium/gpu/command_buffer/build_gles2_cmd_buffer.py b/src/3rdparty/chromium/gpu/command_buffer/build_gles2_cmd_buffer.py +index 8a418d991..0cbbfab17 100755 +--- a/src/3rdparty/chromium/gpu/command_buffer/build_gles2_cmd_buffer.py ++++ b/src/3rdparty/chromium/gpu/command_buffer/build_gles2_cmd_buffer.py +@@ -4396,7 +4396,7 @@ def main(argv): + chromium_root_dir) + + if gen.errors > 0: +- print "build_gles2_cmd_buffer.py: Failed with %d errors" % gen.errors ++ print("build_gles2_cmd_buffer.py: Failed with %d errors" % gen.errors) + return 1 + + check_failed_filenames = [] +@@ -4407,10 +4407,10 @@ def main(argv): + check_failed_filenames.append(filename) + + if len(check_failed_filenames) > 0: +- print 'Please run gpu/command_buffer/build_gles2_cmd_buffer.py' +- print 'Failed check on autogenerated command buffer files:' ++ print('Please run gpu/command_buffer/build_gles2_cmd_buffer.py') ++ print('Failed check on autogenerated command buffer files:') + for filename in check_failed_filenames: +- print filename ++ print(filename) + return 1 + + return 0 +diff --git a/src/3rdparty/chromium/gpu/command_buffer/build_raster_cmd_buffer.py b/src/3rdparty/chromium/gpu/command_buffer/build_raster_cmd_buffer.py +index 25fe5cd66..45f2d0ae5 100755 +--- a/src/3rdparty/chromium/gpu/command_buffer/build_raster_cmd_buffer.py ++++ b/src/3rdparty/chromium/gpu/command_buffer/build_raster_cmd_buffer.py +@@ -442,7 +442,7 @@ def main(argv): + chromium_root_dir) + + if gen.errors > 0: +- print "build_raster_cmd_buffer.py: Failed with %d errors" % gen.errors ++ print("build_raster_cmd_buffer.py: Failed with %d errors" % gen.errors) + return 1 + + check_failed_filenames = [] +@@ -453,10 +453,10 @@ def main(argv): + check_failed_filenames.append(filename) + + if len(check_failed_filenames) > 0: +- print 'Please run gpu/command_buffer/build_raster_cmd_buffer.py' +- print 'Failed check on autogenerated command buffer files:' ++ print('Please run gpu/command_buffer/build_raster_cmd_buffer.py') ++ print('Failed check on autogenerated command buffer files:') + for filename in check_failed_filenames: +- print filename ++ print(filename) + return 1 + + return 0 +diff --git a/src/3rdparty/chromium/gpu/command_buffer/build_webgpu_cmd_buffer.py b/src/3rdparty/chromium/gpu/command_buffer/build_webgpu_cmd_buffer.py +index 2a1067649..beade1626 100755 +--- a/src/3rdparty/chromium/gpu/command_buffer/build_webgpu_cmd_buffer.py ++++ b/src/3rdparty/chromium/gpu/command_buffer/build_webgpu_cmd_buffer.py +@@ -155,7 +155,7 @@ def main(argv): + chromium_root_dir) + + if gen.errors > 0: +- print "build_webgpu_cmd_buffer.py: Failed with %d errors" % gen.errors ++ print("build_webgpu_cmd_buffer.py: Failed with %d errors" % gen.errors) + return 1 + + check_failed_filenames = [] +@@ -166,10 +166,10 @@ def main(argv): + check_failed_filenames.append(filename) + + if len(check_failed_filenames) > 0: +- print 'Please run gpu/command_buffer/build_webgpu_cmd_buffer.py' +- print 'Failed check on autogenerated command buffer files:' ++ print('Please run gpu/command_buffer/build_webgpu_cmd_buffer.py') ++ print('Failed check on autogenerated command buffer files:') + for filename in check_failed_filenames: +- print filename ++ print(filename) + return 1 + + return 0 +diff --git a/src/3rdparty/chromium/gpu/config/process_json.py b/src/3rdparty/chromium/gpu/config/process_json.py +index 6a609fe93..cbf1af39b 100755 +--- a/src/3rdparty/chromium/gpu/config/process_json.py ++++ b/src/3rdparty/chromium/gpu/config/process_json.py +@@ -606,7 +606,7 @@ def write_conditions(entry_id, is_exception, exception_id, entry, + 'Intel' in driver_vendor) + assert is_intel, 'Intel driver schema is only for Intel GPUs' + valid_version = check_intel_driver_version(driver_version['value']) +- if driver_version.has_key('value2'): ++ if 'value2' in driver_version: + valid_version = (valid_version and + check_intel_driver_version(driver_version['value2'])) + assert valid_version, INTEL_DRIVER_VERSION_SCHEMA +@@ -616,7 +616,7 @@ def write_conditions(entry_id, is_exception, exception_id, entry, + is_nvidia = (format(vendor_id, '#04x') == '0x10de') + assert is_nvidia, 'Nvidia driver schema is only for Nvidia GPUs' + valid_version = check_nvidia_driver_version(driver_version['value']) +- if driver_version.has_key('value2'): ++ if 'value2' in driver_version: + valid_version = (valid_version and + check_nvidia_driver_version(driver_version['value2'])) + assert valid_version, NVIDIA_DRIVER_VERSION_SCHEMA +diff --git a/src/3rdparty/chromium/gpu/gles2_conform_support/generate_gles2_embedded_data.py b/src/3rdparty/chromium/gpu/gles2_conform_support/generate_gles2_embedded_data.py +index 809b24a57..fbb8db60c 100755 +--- a/src/3rdparty/chromium/gpu/gles2_conform_support/generate_gles2_embedded_data.py ++++ b/src/3rdparty/chromium/gpu/gles2_conform_support/generate_gles2_embedded_data.py +@@ -70,7 +70,7 @@ class GenerateEmbeddedFiles(object): + sub_dirs.append(full_path) + elif ext in GenerateEmbeddedFiles.extensions_to_include: + if self.base_dir == None: +- print full_path.replace("\\", "/") ++ print(full_path.replace("\\", "/")) + else: + self.count += 1 + name = "_FILE_%s_%d" % (ext.upper(), self.count) +diff --git a/src/3rdparty/chromium/gpu/ipc/common/generate_vulkan_types.py b/src/3rdparty/chromium/gpu/ipc/common/generate_vulkan_types.py +index dd9efa18a..8239d4c24 100755 +--- a/src/3rdparty/chromium/gpu/ipc/common/generate_vulkan_types.py ++++ b/src/3rdparty/chromium/gpu/ipc/common/generate_vulkan_types.py +@@ -598,10 +598,10 @@ def main(argv): + check_failed_filenames.append(filename) + + if len(check_failed_filenames) > 0: +- print 'Please run gpu/ipc/common/generate_vulkan_types.py' +- print 'Failed check on generated files:' ++ print('Please run gpu/ipc/common/generate_vulkan_types.py') ++ print('Failed check on generated files:') + for filename in check_failed_filenames: +- print filename ++ print(filename) + return 1 + + return 0 +diff --git a/src/3rdparty/chromium/gpu/khronos_glcts_support/generate_khronos_glcts_tests.py b/src/3rdparty/chromium/gpu/khronos_glcts_support/generate_khronos_glcts_tests.py +index b55379421..15b2f71af 100755 +--- a/src/3rdparty/chromium/gpu/khronos_glcts_support/generate_khronos_glcts_tests.py ++++ b/src/3rdparty/chromium/gpu/khronos_glcts_support/generate_khronos_glcts_tests.py +@@ -59,7 +59,7 @@ def ReadRunFile(run_file): + elif ext == ".run": + tests += ReadRunFile(os.path.join(base_dir, line)) + else: +- raise ValueError, "Unexpected line '%s' in '%s'" % (line, run_file) ++ raise ValueError("Unexpected line '%s' in '%s'" % (line, run_file)) + return tests + + def GenerateTests(run_files, output): +diff --git a/src/3rdparty/chromium/headless/lib/browser/devtools_api/client_api_generator.py b/src/3rdparty/chromium/headless/lib/browser/devtools_api/client_api_generator.py +index b6713168a..7d4c1da96 100644 +--- a/src/3rdparty/chromium/headless/lib/browser/devtools_api/client_api_generator.py ++++ b/src/3rdparty/chromium/headless/lib/browser/devtools_api/client_api_generator.py +@@ -410,7 +410,7 @@ def InitializeDomainDependencies(json_api): + + if not isinstance(json, dict): + return +- for value in json.values(): ++ for value in list(json.values()): + GetDomainDepsFromRefs(domain_name, value) + + if '$ref' in json: +diff --git a/src/3rdparty/chromium/infra/config/lint-luci-milo.py b/src/3rdparty/chromium/infra/config/lint-luci-milo.py +index a91c6f3e6..4bb51b935 100755 +--- a/src/3rdparty/chromium/infra/config/lint-luci-milo.py ++++ b/src/3rdparty/chromium/infra/config/lint-luci-milo.py +@@ -43,12 +43,12 @@ def compare_builders(name, main_builders, sub_builders): + sub_desc = to_list(sub_builders, name) + + if main_desc != sub_desc: +- print ('bot lists different between main waterfall ' + ++ print('bot lists different between main waterfall ' + + 'and stand-alone %s waterfall:' % name) +- print '\n'.join(difflib.unified_diff(main_desc, sub_desc, ++ print('\n'.join(difflib.unified_diff(main_desc, sub_desc, + fromfile='main', tofile=name, +- lineterm='')) +- print ++ lineterm=''))) ++ print() + return False + return True + +@@ -80,7 +80,7 @@ def main(): + referenced_names = set(subwaterfalls.keys()) + missing_names = referenced_names - set(all_console_names + excluded_names) + if missing_names: +- print 'Missing subwaterfall console for', missing_names ++ print('Missing subwaterfall console for', missing_names) + return 1 + + # Check that the bots on a subwaterfall match the corresponding bots on the +diff --git a/src/3rdparty/chromium/infra/scripts/sizes.py b/src/3rdparty/chromium/infra/scripts/sizes.py +index 01180fa9d..45766e8e3 100755 +--- a/src/3rdparty/chromium/infra/scripts/sizes.py ++++ b/src/3rdparty/chromium/infra/scripts/sizes.py +@@ -23,12 +23,12 @@ import subprocess + import sys + import tempfile + +-import build_directory ++from . import build_directory + + + SRC_DIR = os.path.abspath( + os.path.join(os.path.dirname(__file__), '..', '..')) +-print SRC_DIR ++print(SRC_DIR) + + # Add Catapult to the path so we can import the chartjson-histogramset + # conversion. +@@ -48,7 +48,7 @@ class ResultsCollector(object): + } + + # Legacy printing, previously used for parsing the text logs. +- print 'RESULT %s: %s= %s %s' % (name, identifier, value, units) ++ print('RESULT %s: %s= %s %s' % (name, identifier, value, units)) + + + def get_size(filename): +@@ -78,7 +78,7 @@ def run_process(result, command): + p = subprocess.Popen(command, stdout=subprocess.PIPE) + stdout = p.communicate()[0] + if p.returncode != 0: +- print 'ERROR from command "%s": %d' % (' '.join(command), p.returncode) ++ print('ERROR from command "%s": %d' % (' '.join(command), p.returncode)) + if result == 0: + result = p.returncode + return result, stdout +@@ -289,7 +289,7 @@ def main_linux(options, args, results_collector): + path = os.path.join(target_dir, filename) + try: + size = get_size(path) +- except OSError, e: ++ except OSError as e: + if e.errno == errno.ENOENT: + continue # Don't print anything for missing files. + raise +@@ -299,7 +299,7 @@ def main_linux(options, args, results_collector): + # TODO(mcgrathr): This should all be refactored so the mac and win flavors + # also deliver data structures rather than printing, and the logic for + # the printing and the summing totals is shared across all three flavors. +- for (identifier, units), value in sorted(totals.iteritems()): ++ for (identifier, units), value in sorted(totals.items()): + results_collector.add_result( + 'totals-%s' % identifier, identifier, value, units) + +@@ -431,7 +431,7 @@ def format_for_histograms_conversion(data): + # 1. Add a top-level "benchmark_name" key. + # 2. Pull out the "identifier" value to be the story name. + formatted_data = {} +- for metric, metric_data in data.iteritems(): ++ for metric, metric_data in data.items(): + story = metric_data['identifier'] + formatted_data[metric] = { + story: metric_data.copy() +diff --git a/src/3rdparty/chromium/media/PRESUBMIT.py b/src/3rdparty/chromium/media/PRESUBMIT.py +index 9f8837f49..2f9233fa1 100644 +--- a/src/3rdparty/chromium/media/PRESUBMIT.py ++++ b/src/3rdparty/chromium/media/PRESUBMIT.py +@@ -124,7 +124,7 @@ def _CheckForHistogramOffByOne(input_api, output_api): + + if (not uma_max_re.match(max_arg) and match.group(2) != + 'PRESUBMIT_IGNORE_UMA_MAX'): +- uma_range = range(match.start(), match.end() + 1) ++ uma_range = list(range(match.start(), match.end() + 1)) + # Check if any part of the match is in the changed lines: + for num, line in f.ChangedContents(): + if line_number <= num <= line_number + match.group().count('\n'): +diff --git a/src/3rdparty/chromium/media/tools/constrained_network_server/cn.py b/src/3rdparty/chromium/media/tools/constrained_network_server/cn.py +index fe5781cfa..a43ed42c3 100755 +--- a/src/3rdparty/chromium/media/tools/constrained_network_server/cn.py ++++ b/src/3rdparty/chromium/media/tools/constrained_network_server/cn.py +@@ -59,7 +59,7 @@ def _ParseArgs(): + (indent_first, '', opt_width, s, COMMANDS[s].desc)) + + parser.usage = ('usage: %%prog {%s} [options]\n\n%s' % +- ('|'.join(COMMANDS.keys()), '\n'.join(cmd_usage))) ++ ('|'.join(list(COMMANDS.keys())), '\n'.join(cmd_usage))) + + parser.add_option('--port', type='int', + help='The port to apply traffic control constraints to.') +@@ -83,7 +83,7 @@ def _ParseArgs(): + + # Check a valid command was entered + if not args or args[0].lower() not in COMMANDS: +- parser.error('Please specify a command {%s}.' % '|'.join(COMMANDS.keys())) ++ parser.error('Please specify a command {%s}.' % '|'.join(list(COMMANDS.keys()))) + user_cmd = args[0].lower() + + # Check if required options are available +diff --git a/src/3rdparty/chromium/media/tools/constrained_network_server/cns.py b/src/3rdparty/chromium/media/tools/constrained_network_server/cns.py +index d039d8e7a..f08d1d698 100755 +--- a/src/3rdparty/chromium/media/tools/constrained_network_server/cns.py ++++ b/src/3rdparty/chromium/media/tools/constrained_network_server/cns.py +@@ -56,8 +56,8 @@ import signal + import sys + import threading + import time +-import urllib +-import urllib2 ++import urllib.request, urllib.parse, urllib.error ++import urllib.request, urllib.error, urllib.parse + + import traffic_control + +@@ -126,7 +126,7 @@ class PortAllocator(object): + # so just iterate over ports dict for simplicity. + full_key = (key,) + tuple(kwargs.values()) + if not new_port: +- for port, status in self._ports.iteritems(): ++ for port, status in self._ports.items(): + if full_key == status['key']: + self._ports[port]['last_update'] = time.time() + return port +@@ -138,7 +138,7 @@ class PortAllocator(object): + + # Performance isn't really an issue here, so just iterate over the port + # range to find an unused port. If no port is found, None is returned. +- for port in xrange(self._port_range[0], self._port_range[1]): ++ for port in range(self._port_range[0], self._port_range[1]): + if port in self._ports: + continue + if self._SetupPort(port, **kwargs): +@@ -180,7 +180,7 @@ class PortAllocator(object): + with self._port_lock: + now = time.time() + # Use .items() instead of .iteritems() so we can delete keys w/o error. +- for port, status in self._ports.items(): ++ for port, status in list(self._ports.items()): + expired = now - status['last_update'] > self._expiry_time_secs + matching_ip = request_ip and status['key'][0].startswith(request_ip) + if all_ports or expired or matching_ip: +@@ -295,7 +295,7 @@ class ConstrainedNetworkServer(object): + test_url = self._GetServerURL(f, self._options.local_server_port) + try: + cherrypy.log('Check file exist using URL: %s' % test_url) +- return urllib2.urlopen(test_url) is not None ++ return urllib.request.urlopen(test_url) is not None + except Exception: + raise cherrypy.HTTPError(404, 'File not found on local server.') + +@@ -323,7 +323,7 @@ class ConstrainedNetworkServer(object): + cherrypy.url().replace('ServeConstrained', self._options.www_root), f) + + url = url.replace(':%d' % self._options.port, ':%d' % port) +- extra_args = urllib.urlencode(kwargs) ++ extra_args = urllib.parse.urlencode(kwargs) + if extra_args: + url += extra_args + return url +diff --git a/src/3rdparty/chromium/media/tools/constrained_network_server/cns_test.py b/src/3rdparty/chromium/media/tools/constrained_network_server/cns_test.py +index a3ac54e36..506eb27d7 100755 +--- a/src/3rdparty/chromium/media/tools/constrained_network_server/cns_test.py ++++ b/src/3rdparty/chromium/media/tools/constrained_network_server/cns_test.py +@@ -45,7 +45,7 @@ import subprocess + import tempfile + import time + import unittest +-import urllib2 ++import urllib.request, urllib.error, urllib.parse + import cherrypy + import cns + import traffic_control +@@ -73,7 +73,7 @@ class PortAllocatorTest(unittest.TestCase): + def tearDown(self): + self._pa.Cleanup(all_ports=True) + # Ensure ports are cleaned properly. +- self.assertEquals(self._pa._ports, {}) ++ self.assertEqual(self._pa._ports, {}) + time.time = self._old_time + self._RestoreTrafficControl() + +@@ -93,45 +93,45 @@ class PortAllocatorTest(unittest.TestCase): + + def testPortAllocator(self): + # Ensure Get() succeeds and returns the correct port. +- self.assertEquals(self._pa.Get('test'), cns._DEFAULT_CNS_PORT_RANGE[0]) ++ self.assertEqual(self._pa.Get('test'), cns._DEFAULT_CNS_PORT_RANGE[0]) + + # Call again with the same key and make sure we get the same port. +- self.assertEquals(self._pa.Get('test'), cns._DEFAULT_CNS_PORT_RANGE[0]) ++ self.assertEqual(self._pa.Get('test'), cns._DEFAULT_CNS_PORT_RANGE[0]) + + # Call with a different key and make sure we get a different port. +- self.assertEquals(self._pa.Get('test2'), cns._DEFAULT_CNS_PORT_RANGE[0] + 1) ++ self.assertEqual(self._pa.Get('test2'), cns._DEFAULT_CNS_PORT_RANGE[0] + 1) + + # Update fake time so that ports should expire. + self._current_time += self._EXPIRY_TIME + 1 + + # Test to make sure cache is checked before expiring ports. +- self.assertEquals(self._pa.Get('test2'), cns._DEFAULT_CNS_PORT_RANGE[0] + 1) ++ self.assertEqual(self._pa.Get('test2'), cns._DEFAULT_CNS_PORT_RANGE[0] + 1) + + # Update fake time so that ports should expire. + self._current_time += self._EXPIRY_TIME + 1 + + # Request a new port, old ports should be expired, so we should get the + # first port in the range. Make sure this is the only allocated port. +- self.assertEquals(self._pa.Get('test3'), cns._DEFAULT_CNS_PORT_RANGE[0]) +- self.assertEquals(self._pa._ports.keys(), [cns._DEFAULT_CNS_PORT_RANGE[0]]) ++ self.assertEqual(self._pa.Get('test3'), cns._DEFAULT_CNS_PORT_RANGE[0]) ++ self.assertEqual(list(self._pa._ports.keys()), [cns._DEFAULT_CNS_PORT_RANGE[0]]) + + def testPortAllocatorExpiresOnlyCorrectPorts(self): + # Ensure Get() succeeds and returns the correct port. +- self.assertEquals(self._pa.Get('test'), cns._DEFAULT_CNS_PORT_RANGE[0]) ++ self.assertEqual(self._pa.Get('test'), cns._DEFAULT_CNS_PORT_RANGE[0]) + + # Stagger port allocation and so we can ensure only ports older than the + # expiry time are actually expired. + self._current_time += self._EXPIRY_TIME / 2 + 1 + + # Call with a different key and make sure we get a different port. +- self.assertEquals(self._pa.Get('test2'), cns._DEFAULT_CNS_PORT_RANGE[0] + 1) ++ self.assertEqual(self._pa.Get('test2'), cns._DEFAULT_CNS_PORT_RANGE[0] + 1) + + # After this sleep the port with key 'test' should expire on the next Get(). + self._current_time += self._EXPIRY_TIME / 2 + 1 + + # Call with a different key and make sure we get the first port. +- self.assertEquals(self._pa.Get('test3'), cns._DEFAULT_CNS_PORT_RANGE[0]) +- self.assertEquals(set(self._pa._ports.keys()), set([ ++ self.assertEqual(self._pa.Get('test3'), cns._DEFAULT_CNS_PORT_RANGE[0]) ++ self.assertEqual(set(self._pa._ports.keys()), set([ + cns._DEFAULT_CNS_PORT_RANGE[0], cns._DEFAULT_CNS_PORT_RANGE[0] + 1])) + + def testPortAllocatorNoExpiration(self): +@@ -139,15 +139,15 @@ class PortAllocatorTest(unittest.TestCase): + self._pa = cns.PortAllocator(cns._DEFAULT_CNS_PORT_RANGE, 0) + + # Ensure Get() succeeds and returns the correct port. +- self.assertEquals(self._pa.Get('test'), cns._DEFAULT_CNS_PORT_RANGE[0]) ++ self.assertEqual(self._pa.Get('test'), cns._DEFAULT_CNS_PORT_RANGE[0]) + + # Update fake time to see if ports expire. + self._current_time += self._EXPIRY_TIME + + # Send second Get() which would normally cause ports to expire. Ensure that + # the ports did not expire. +- self.assertEquals(self._pa.Get('test2'), cns._DEFAULT_CNS_PORT_RANGE[0] + 1) +- self.assertEquals(set(self._pa._ports.keys()), set([ ++ self.assertEqual(self._pa.Get('test2'), cns._DEFAULT_CNS_PORT_RANGE[0] + 1) ++ self.assertEqual(set(self._pa._ports.keys()), set([ + cns._DEFAULT_CNS_PORT_RANGE[0], cns._DEFAULT_CNS_PORT_RANGE[0] + 1])) + + def testPortAllocatorCleanMatchingIP(self): +@@ -155,31 +155,31 @@ class PortAllocatorTest(unittest.TestCase): + self._pa = cns.PortAllocator(cns._DEFAULT_CNS_PORT_RANGE, 0) + + # Ensure Get() succeeds and returns the correct port. +- self.assertEquals(self._pa.Get('ip1', t=1), cns._DEFAULT_CNS_PORT_RANGE[0]) +- self.assertEquals(self._pa.Get('ip1', t=2), ++ self.assertEqual(self._pa.Get('ip1', t=1), cns._DEFAULT_CNS_PORT_RANGE[0]) ++ self.assertEqual(self._pa.Get('ip1', t=2), + cns._DEFAULT_CNS_PORT_RANGE[0] + 1) +- self.assertEquals(self._pa.Get('ip1', t=3), ++ self.assertEqual(self._pa.Get('ip1', t=3), + cns._DEFAULT_CNS_PORT_RANGE[0] + 2) +- self.assertEquals(self._pa.Get('ip2', t=1), ++ self.assertEqual(self._pa.Get('ip2', t=1), + cns._DEFAULT_CNS_PORT_RANGE[0] + 3) + + self._pa.Cleanup(all_ports=False, request_ip='ip1') + +- self.assertEquals(self._pa._ports.keys(), ++ self.assertEqual(list(self._pa._ports.keys()), + [cns._DEFAULT_CNS_PORT_RANGE[0] + 3]) +- self.assertEquals(self._pa.Get('ip2'), cns._DEFAULT_CNS_PORT_RANGE[0]) +- self.assertEquals(self._pa.Get('ip1'), cns._DEFAULT_CNS_PORT_RANGE[0] + 1) ++ self.assertEqual(self._pa.Get('ip2'), cns._DEFAULT_CNS_PORT_RANGE[0]) ++ self.assertEqual(self._pa.Get('ip1'), cns._DEFAULT_CNS_PORT_RANGE[0] + 1) + + self._pa.Cleanup(all_ports=False, request_ip='ip2') +- self.assertEquals(self._pa._ports.keys(), ++ self.assertEqual(list(self._pa._ports.keys()), + [cns._DEFAULT_CNS_PORT_RANGE[0] + 1]) + + self._pa.Cleanup(all_ports=False, request_ip='abc') +- self.assertEquals(self._pa._ports.keys(), ++ self.assertEqual(list(self._pa._ports.keys()), + [cns._DEFAULT_CNS_PORT_RANGE[0] + 1]) + + self._pa.Cleanup(all_ports=False, request_ip='ip1') +- self.assertEquals(self._pa._ports.keys(), []) ++ self.assertEqual(list(self._pa._ports.keys()), []) + + + class ConstrainedNetworkServerTest(unittest.TestCase): +@@ -236,7 +236,7 @@ class ConstrainedNetworkServerTest(unittest.TestCase): + def testServerServesFiles(self): + now = time.time() + +- f = urllib2.urlopen('%sf=%s' % (self._SERVER_URL, self._relative_fn)) ++ f = urllib.request.urlopen('%sf=%s' % (self._SERVER_URL, self._relative_fn)) + + # Verify file data is served correctly. + self.assertEqual(self._TEST_DATA, f.read()) +@@ -253,7 +253,7 @@ class ConstrainedNetworkServerTest(unittest.TestCase): + + base_url = '%sf=%s' % (self._SERVER_URL, self._relative_fn) + url = '%s&latency=%d' % (base_url, self._LATENCY_TEST_SECS * 1000) +- f = urllib2.urlopen(url) ++ f = urllib.request.urlopen(url) + + # Verify file data is served correctly. + self.assertEqual(self._TEST_DATA, f.read()) +diff --git a/src/3rdparty/chromium/media/tools/constrained_network_server/traffic_control.py b/src/3rdparty/chromium/media/tools/constrained_network_server/traffic_control.py +index e94cc8dc9..7a4972296 100644 +--- a/src/3rdparty/chromium/media/tools/constrained_network_server/traffic_control.py ++++ b/src/3rdparty/chromium/media/tools/constrained_network_server/traffic_control.py +@@ -153,7 +153,7 @@ def _CheckArgsExist(config, *args): + TrafficControlError: If any key name does not exist in config or is None. + """ + for key in args: +- if key not in config.keys() or config[key] is None: ++ if key not in list(config.keys()) or config[key] is None: + raise TrafficControlError('Missing "%s" parameter.' % key) + + +@@ -194,7 +194,7 @@ def _ConfigureClass(option, config): + # Use constrained port as class ID so we can attach the qdisc and filter to + # it, as well as delete the class, using only the port number. + class_id = '1:%x' % config['port'] +- if 'bandwidth' not in config.keys() or not config['bandwidth']: ++ if 'bandwidth' not in list(config.keys()) or not config['bandwidth']: + bandwidth = _DEFAULT_MAX_BANDWIDTH_KBIT + else: + bandwidth = config['bandwidth'] +@@ -223,11 +223,11 @@ def _AddSubQdisc(config): + class_id, 'handle', port_hex + ':0', 'netem'] + + # Check if packet-loss is set in the configuration. +- if 'loss' in config.keys() and config['loss']: ++ if 'loss' in list(config.keys()) and config['loss']: + loss = '%d%%' % config['loss'] + command.extend(['loss', loss]) + # Check if latency is set in the configuration. +- if 'latency' in config.keys() and config['latency']: ++ if 'latency' in list(config.keys()) and config['latency']: + latency = '%dms' % config['latency'] + command.extend(['delay', latency]) + +diff --git a/src/3rdparty/chromium/media/tools/constrained_network_server/traffic_control_unittest.py b/src/3rdparty/chromium/media/tools/constrained_network_server/traffic_control_unittest.py +index a6781e9de..4e6941511 100755 +--- a/src/3rdparty/chromium/media/tools/constrained_network_server/traffic_control_unittest.py ++++ b/src/3rdparty/chromium/media/tools/constrained_network_server/traffic_control_unittest.py +@@ -119,7 +119,7 @@ class TrafficControlUnitTests(unittest.TestCase): + # Check seach for handle ID command. + self.assertRaises(traffic_control.TrafficControlError, + traffic_control._GetFilterHandleId, 'fakeeth', 1) +- self.assertEquals(self.commands, ['sudo tc filter list dev fakeeth parent ' ++ self.assertEqual(self.commands, ['sudo tc filter list dev fakeeth parent ' + '1:']) + + # Check with handle ID available. +diff --git a/src/3rdparty/chromium/mojo/public/tools/bindings/BUILD.gn b/src/3rdparty/chromium/mojo/public/tools/bindings/BUILD.gn +index fc04b5dd0..708958e43 100644 +--- a/src/3rdparty/chromium/mojo/public/tools/bindings/BUILD.gn ++++ b/src/3rdparty/chromium/mojo/public/tools/bindings/BUILD.gn +@@ -2,9 +2,11 @@ + # Use of this source code is governed by a BSD-style license that can be + # found in the LICENSE file. + ++import("//build/config/python.gni") + import("//mojo/public/tools/bindings/mojom.gni") + import("//third_party/jinja2/jinja2.gni") + ++# TODO(crbug.com/1194274): Investigate nondeterminism in Py3 builds. + action("precompile_templates") { + sources = mojom_generator_sources + sources += [ +diff --git a/src/3rdparty/chromium/mojo/public/tools/bindings/concatenate-files.py b/src/3rdparty/chromium/mojo/public/tools/bindings/concatenate-files.py +index 48bc66fd0..f783f336d 100755 +--- a/src/3rdparty/chromium/mojo/public/tools/bindings/concatenate-files.py ++++ b/src/3rdparty/chromium/mojo/public/tools/bindings/concatenate-files.py +@@ -12,7 +12,7 @@ + # us with an easy and uniform way of doing this on all platforms. + + # for py2/py3 compatibility +-from __future__ import print_function ++ + + import optparse + +diff --git a/src/3rdparty/chromium/mojo/public/tools/bindings/concatenate_and_replace_closure_exports.py b/src/3rdparty/chromium/mojo/public/tools/bindings/concatenate_and_replace_closure_exports.py +index be8985ced..747047db9 100755 +--- a/src/3rdparty/chromium/mojo/public/tools/bindings/concatenate_and_replace_closure_exports.py ++++ b/src/3rdparty/chromium/mojo/public/tools/bindings/concatenate_and_replace_closure_exports.py +@@ -16,7 +16,7 @@ which is instead replaced with an inlined assignment to initialize the + namespace. + """ + +-from __future__ import print_function ++ + + import optparse + import re +diff --git a/src/3rdparty/chromium/mojo/public/tools/bindings/gen_data_files_list.py b/src/3rdparty/chromium/mojo/public/tools/bindings/gen_data_files_list.py +index 79c9e50ef..84c452925 100644 +--- a/src/3rdparty/chromium/mojo/public/tools/bindings/gen_data_files_list.py ++++ b/src/3rdparty/chromium/mojo/public/tools/bindings/gen_data_files_list.py +@@ -12,13 +12,13 @@ pattern that is passed in. Only files which match the regex + will be written to the list. + """ + +-from __future__ import print_function ++ + + import os + import re + import sys + +-from cStringIO import StringIO ++from io import StringIO + from optparse import OptionParser + + sys.path.insert( +@@ -41,12 +41,8 @@ def main(): + pattern = re.compile(options.pattern) + files = [f for f in os.listdir(options.directory) if pattern.match(f)] + +- stream = StringIO() +- for f in files: +- print(f, file=stream) +- +- WriteFile(stream.getvalue(), options.output) +- stream.close() ++ contents = '\n'.join(f for f in files) + '\n' ++ WriteFile(contents, options.output) + + if __name__ == '__main__': + sys.exit(main()) +diff --git a/src/3rdparty/chromium/mojo/public/tools/bindings/generators/cpp_tracing_support.py b/src/3rdparty/chromium/mojo/public/tools/bindings/generators/cpp_tracing_support.py +index 0eaf6cdfe..c72792519 100644 +--- a/src/3rdparty/chromium/mojo/public/tools/bindings/generators/cpp_tracing_support.py ++++ b/src/3rdparty/chromium/mojo/public/tools/bindings/generators/cpp_tracing_support.py +@@ -9,8 +9,7 @@ from abc import ABCMeta + from abc import abstractmethod + + +-class _OutputContext(object): +- __metaclass__ = ABCMeta ++class _OutputContext(object, metaclass=ABCMeta): + """Represents the context in which |self.value| should be used. + + This is a base class for _ArrayItem, _DictionaryItemWithLiteralKey, and +diff --git a/src/3rdparty/chromium/mojo/public/tools/bindings/generators/mojom_cpp_generator.py b/src/3rdparty/chromium/mojo/public/tools/bindings/generators/mojom_cpp_generator.py +index d73b3f483..95feb718f 100644 +--- a/src/3rdparty/chromium/mojo/public/tools/bindings/generators/mojom_cpp_generator.py ++++ b/src/3rdparty/chromium/mojo/public/tools/bindings/generators/mojom_cpp_generator.py +@@ -278,7 +278,7 @@ class Generator(generator.Generator): + for typename in + self.module.structs + all_enums + self.module.unions) + headers = set() +- for typename, typemap in self.typemap.items(): ++ for typename, typemap in list(self.typemap.items()): + if typename in types: + headers.update(typemap.get("public_headers", [])) + return sorted(headers) +@@ -754,10 +754,10 @@ class Generator(generator.Generator): + # Blink and non-Blink bindings. + return any( + mojom.IsMapKind(k) and k.key_kind == kind +- for k in self.module.kinds.values()) ++ for k in list(self.module.kinds.values())) + return False + +- for spec, kind in imported_module.kinds.items(): ++ for spec, kind in list(imported_module.kinds.items()): + if spec in self.module.imported_kinds and requires_full_header(kind): + return True + return False +diff --git a/src/3rdparty/chromium/mojo/public/tools/bindings/generators/mojom_java_generator.py b/src/3rdparty/chromium/mojo/public/tools/bindings/generators/mojom_java_generator.py +index 96b2fdfae..bd75c80eb 100644 +--- a/src/3rdparty/chromium/mojo/public/tools/bindings/generators/mojom_java_generator.py ++++ b/src/3rdparty/chromium/mojo/public/tools/bindings/generators/mojom_java_generator.py +@@ -142,7 +142,7 @@ def GetInterfaceResponseName(method): + return UpperCamelCase(method.name + '_Response') + + def ParseStringAttribute(attribute): +- assert isinstance(attribute, basestring) ++ assert isinstance(attribute, str) + return attribute + + def GetJavaTrueFalse(value): +@@ -331,7 +331,7 @@ def ExpressionToText(context, token, kind_spec=''): + return _TranslateNamedValue(token) + if kind_spec.startswith('i') or kind_spec.startswith('u'): + number = ast.literal_eval(token.lstrip('+ ')) +- if not isinstance(number, (int, long)): ++ if not isinstance(number, int): + raise ValueError('got unexpected type %r for int literal %r' % ( + type(number), token)) + # If the literal is too large to fit a signed long, convert it to the +@@ -416,8 +416,7 @@ def TempDir(): + def EnumCoversContinuousRange(kind): + if not kind.fields: + return False +- number_of_unique_keys = len(set(map( +- lambda field: field.numeric_value, kind.fields))) ++ number_of_unique_keys = len(set([field.numeric_value for field in kind.fields])) + if kind.max_value - kind.min_value + 1 != number_of_unique_keys: + return False + return True +diff --git a/src/3rdparty/chromium/mojo/public/tools/bindings/generators/mojom_mojolpm_generator.py b/src/3rdparty/chromium/mojo/public/tools/bindings/generators/mojom_mojolpm_generator.py +index e4e8e63bf..586b36a53 100644 +--- a/src/3rdparty/chromium/mojo/public/tools/bindings/generators/mojom_mojolpm_generator.py ++++ b/src/3rdparty/chromium/mojo/public/tools/bindings/generators/mojom_mojolpm_generator.py +@@ -43,7 +43,7 @@ _kind_to_cpp_proto_type = { + + def _IsStrOrUnicode(x): + if sys.version_info[0] < 3: +- return isinstance(x, (unicode, str)) ++ return isinstance(x, str) + return isinstance(x, str) + + +@@ -163,7 +163,7 @@ class Generator(CppGenerator): + AddKind(parameter.kind) + + import_files = list( +- map(lambda x: '{}.mojolpm.proto'.format(x.path), seen_imports)) ++ ['{}.mojolpm.proto'.format(x.path) for x in seen_imports]) + if self.needs_mojolpm_proto: + import_files.append('mojo/public/tools/fuzzers/mojolpm.proto') + import_files.sort() +diff --git a/src/3rdparty/chromium/mojo/public/tools/bindings/mojom_bindings_generator.py b/src/3rdparty/chromium/mojo/public/tools/bindings/mojom_bindings_generator.py +index e8588c675..6da272bdc 100755 +--- a/src/3rdparty/chromium/mojo/public/tools/bindings/mojom_bindings_generator.py ++++ b/src/3rdparty/chromium/mojo/public/tools/bindings/mojom_bindings_generator.py +@@ -5,7 +5,7 @@ + + """The frontend for the Mojo bindings system.""" + +-from __future__ import print_function ++ + + import argparse + +@@ -161,7 +161,7 @@ class MojomProcessor(object): + for filename in typemaps: + with open(filename) as f: + typemaps = json.loads("".join(filter(no_comments, f.readlines()))) +- for language, typemap in typemaps.items(): ++ for language, typemap in list(typemaps.items()): + language_map = self._typemap.get(language, {}) + language_map.update(typemap) + self._typemap[language] = language_map +@@ -189,7 +189,7 @@ class MojomProcessor(object): + ScrambleMethodOrdinals(module.interfaces, salt) + + if self._should_generate(rel_filename.path): +- for language, generator_module in generator_modules.items(): ++ for language, generator_module in list(generator_modules.items()): + generator = generator_module.Generator( + module, args.output_dir, typemap=self._typemap.get(language, {}), + variant=args.variant, bytecode_path=args.bytecode_path, +@@ -252,7 +252,7 @@ def _Generate(args, remaining_args): + + + def _Precompile(args, _): +- generator_modules = LoadGenerators(",".join(_BUILTIN_GENERATORS.keys())) ++ generator_modules = LoadGenerators(",".join(list(_BUILTIN_GENERATORS.keys()))) + + template_expander.PrecompileTemplates(generator_modules, args.output_dir) + return 0 +diff --git a/src/3rdparty/chromium/mojo/public/tools/bindings/mojom_types_downgrader.py b/src/3rdparty/chromium/mojo/public/tools/bindings/mojom_types_downgrader.py +index 15f0e3bac..271d5d9ea 100755 +--- a/src/3rdparty/chromium/mojo/public/tools/bindings/mojom_types_downgrader.py ++++ b/src/3rdparty/chromium/mojo/public/tools/bindings/mojom_types_downgrader.py +@@ -25,13 +25,13 @@ _MOJO_REPLACEMENTS = { + # Pre-compiled regular expression that matches against any of the replacements. + _REGEXP_PATTERN = re.compile( + r'|'.join( +- ['{}\s*<\s*(.*?)\s*>'.format(k) for k in _MOJO_REPLACEMENTS.keys()]), ++ ['{}\s*<\s*(.*?)\s*>'.format(k) for k in list(_MOJO_REPLACEMENTS.keys())]), + flags=re.DOTALL) + + + def ReplaceFunction(match_object): + """Returns the right replacement for the string matched against the regexp.""" +- for index, (match, repl) in enumerate(_MOJO_REPLACEMENTS.items(), 1): ++ for index, (match, repl) in enumerate(list(_MOJO_REPLACEMENTS.items()), 1): + if match_object.group(0).startswith(match): + return repl.format(match_object.group(index)) + +diff --git a/src/3rdparty/chromium/mojo/public/tools/bindings/validate_typemap_config.py b/src/3rdparty/chromium/mojo/public/tools/bindings/validate_typemap_config.py +index f1783d59b..823282ba0 100755 +--- a/src/3rdparty/chromium/mojo/public/tools/bindings/validate_typemap_config.py ++++ b/src/3rdparty/chromium/mojo/public/tools/bindings/validate_typemap_config.py +@@ -21,7 +21,7 @@ def CheckCppTypemapConfigs(target_name, config_filename, out_filename): + ]) + with open(config_filename, 'r') as f: + for config in json.load(f): +- for key in config.keys(): ++ for key in list(config.keys()): + if key not in _SUPPORTED_CONFIG_KEYS: + raise ValueError('Invalid typemap property "%s" when processing %s' % + (key, target_name)) +@@ -32,7 +32,7 @@ def CheckCppTypemapConfigs(target_name, config_filename, out_filename): + % target_name) + + for entry in types: +- for key in entry.keys(): ++ for key in list(entry.keys()): + if key not in _SUPPORTED_TYPE_KEYS: + raise IOError( + 'Invalid type property "%s" in typemap for "%s" on target %s' % +diff --git a/src/3rdparty/chromium/mojo/public/tools/mojom/check_stable_mojom_compatibility.py b/src/3rdparty/chromium/mojo/public/tools/mojom/check_stable_mojom_compatibility.py +index 08bd672f5..92277b8ae 100755 +--- a/src/3rdparty/chromium/mojo/public/tools/mojom/check_stable_mojom_compatibility.py ++++ b/src/3rdparty/chromium/mojo/public/tools/mojom/check_stable_mojom_compatibility.py +@@ -89,10 +89,10 @@ def _ValidateDelta(root, delta): + modules[mojom] = translate.OrderedModule(ast, mojom, all_modules) + + old_modules = {} +- for mojom in old_files.keys(): ++ for mojom in list(old_files.keys()): + parseMojom(mojom, old_files, old_modules) + new_modules = {} +- for mojom in new_files.keys(): ++ for mojom in list(new_files.keys()): + parseMojom(mojom, new_files, new_modules) + + # At this point we have a complete set of translated Modules from both the +@@ -104,7 +104,7 @@ def _ValidateDelta(root, delta): + # checked. + def collectTypes(modules): + types = {} +- for m in modules.values(): ++ for m in list(modules.values()): + for kinds in (m.enums, m.structs, m.unions, m.interfaces): + for kind in kinds: + types[kind.qualified_name] = kind +@@ -115,12 +115,12 @@ def _ValidateDelta(root, delta): + + # Collect any renamed types so they can be compared accordingly. + renamed_types = {} +- for name, kind in new_types.items(): ++ for name, kind in list(new_types.items()): + old_name = kind.attributes and kind.attributes.get('RenamedFrom') + if old_name: + renamed_types[old_name] = name + +- for qualified_name, kind in old_types.items(): ++ for qualified_name, kind in list(old_types.items()): + if not kind.stable: + continue + +diff --git a/src/3rdparty/chromium/mojo/public/tools/mojom/enum_unittest.py b/src/3rdparty/chromium/mojo/public/tools/mojom/enum_unittest.py +index d90050786..74e1bff74 100644 +--- a/src/3rdparty/chromium/mojo/public/tools/mojom/enum_unittest.py ++++ b/src/3rdparty/chromium/mojo/public/tools/mojom/enum_unittest.py +@@ -69,11 +69,11 @@ class EnumTest(MojomParserTestCase): + def testInvalidConstantReference(self): + """Verifies that enum values cannot be assigned from the value of + non-integral constants.""" +- with self.assertRaisesRegexp(ValueError, 'not an integer'): ++ with self.assertRaisesRegex(ValueError, 'not an integer'): + self.ExtractTypes('const float kFoo = 1.0; enum E { kA = kFoo };') +- with self.assertRaisesRegexp(ValueError, 'not an integer'): ++ with self.assertRaisesRegex(ValueError, 'not an integer'): + self.ExtractTypes('const double kFoo = 1.0; enum E { kA = kFoo };') +- with self.assertRaisesRegexp(ValueError, 'not an integer'): ++ with self.assertRaisesRegex(ValueError, 'not an integer'): + self.ExtractTypes('const string kFoo = "lol"; enum E { kA = kFoo };') + + def testImportedConstantReference(self): +diff --git a/src/3rdparty/chromium/mojo/public/tools/mojom/mojom/generate/constant_resolver.py b/src/3rdparty/chromium/mojo/public/tools/mojom/mojom/generate/constant_resolver.py +index 0dfd996e3..4fdd89a16 100644 +--- a/src/3rdparty/chromium/mojo/public/tools/mojom/mojom/generate/constant_resolver.py ++++ b/src/3rdparty/chromium/mojo/public/tools/mojom/mojom/generate/constant_resolver.py +@@ -3,7 +3,7 @@ + # found in the LICENSE file. + """Resolves the values used for constants and enums.""" + +-from itertools import ifilter ++ + + from mojom.generate import module as mojom + +@@ -16,7 +16,7 @@ def ResolveConstants(module, expression_to_text): + assert isinstance(named_value, (mojom.EnumValue, mojom.ConstantValue)) + if isinstance(named_value, mojom.EnumValue): + field = next( +- ifilter(lambda field: field.name == named_value.name, ++ filter(lambda field: field.name == named_value.name, + named_value.enum.fields), None) + if not field: + raise RuntimeError( +diff --git a/src/3rdparty/chromium/mojo/public/tools/mojom/mojom/generate/generator.py b/src/3rdparty/chromium/mojo/public/tools/mojom/mojom/generate/generator.py +index de62260a5..35b70c834 100644 +--- a/src/3rdparty/chromium/mojo/public/tools/mojom/mojom/generate/generator.py ++++ b/src/3rdparty/chromium/mojo/public/tools/mojom/mojom/generate/generator.py +@@ -3,7 +3,7 @@ + # found in the LICENSE file. + """Code shared by the various language-specific code generators.""" + +-from __future__ import print_function ++ + + from functools import partial + import os.path +@@ -76,7 +76,7 @@ def _ToSnakeCase(identifier, upper=False): + + + if upper: +- words = map(lambda x: x.upper(), words) ++ words = [x.upper() for x in words] + + return '_'.join(words) + +@@ -136,9 +136,14 @@ class Stylizer(object): + + def WriteFile(contents, full_path): + # If |contents| is same with the file content, we skip updating. ++ if not isinstance(contents, bytes): ++ data = contents.encode('utf8') ++ else: ++ data = contents ++ + if os.path.isfile(full_path): + with open(full_path, 'rb') as destination_file: +- if destination_file.read() == contents: ++ if destination_file.read() == data: + return + + # Make sure the containing directory exists. +@@ -146,11 +151,8 @@ def WriteFile(contents, full_path): + fileutil.EnsureDirectoryExists(full_dir) + + # Dump the data to disk. +- with open(full_path, "wb") as f: +- if not isinstance(contents, bytes): +- f.write(contents.encode('utf-8')) +- else: +- f.write(contents) ++ with open(full_path, 'wb') as f: ++ f.write(data) + + + def AddComputedData(module): +diff --git a/src/3rdparty/chromium/mojo/public/tools/mojom/mojom/generate/generator_unittest.py b/src/3rdparty/chromium/mojo/public/tools/mojom/mojom/generate/generator_unittest.py +index 32c884a8c..b9decf80a 100644 +--- a/src/3rdparty/chromium/mojo/public/tools/mojom/mojom/generate/generator_unittest.py ++++ b/src/3rdparty/chromium/mojo/public/tools/mojom/mojom/generate/generator_unittest.py +@@ -30,43 +30,43 @@ class StringManipulationTest(unittest.TestCase): + """generator contains some string utilities, this tests only those.""" + + def testSplitCamelCase(self): +- self.assertEquals(["camel", "case"], generator.SplitCamelCase("CamelCase")) +- self.assertEquals(["url", "loader", "factory"], ++ self.assertEqual(["camel", "case"], generator.SplitCamelCase("CamelCase")) ++ self.assertEqual(["url", "loader", "factory"], + generator.SplitCamelCase('URLLoaderFactory')) +- self.assertEquals(["get99", "entries"], ++ self.assertEqual(["get99", "entries"], + generator.SplitCamelCase('Get99Entries')) +- self.assertEquals(["get99entries"], ++ self.assertEqual(["get99entries"], + generator.SplitCamelCase('Get99entries')) + + def testToCamel(self): +- self.assertEquals("CamelCase", generator.ToCamel("camel_case")) +- self.assertEquals("CAMELCASE", generator.ToCamel("CAMEL_CASE")) +- self.assertEquals("camelCase", ++ self.assertEqual("CamelCase", generator.ToCamel("camel_case")) ++ self.assertEqual("CAMELCASE", generator.ToCamel("CAMEL_CASE")) ++ self.assertEqual("camelCase", + generator.ToCamel("camel_case", lower_initial=True)) +- self.assertEquals("CamelCase", generator.ToCamel( ++ self.assertEqual("CamelCase", generator.ToCamel( + "camel case", delimiter=' ')) +- self.assertEquals("CaMelCaSe", generator.ToCamel("caMel_caSe")) +- self.assertEquals("L2Tp", generator.ToCamel("l2tp", digits_split=True)) +- self.assertEquals("l2tp", generator.ToCamel("l2tp", lower_initial=True)) ++ self.assertEqual("CaMelCaSe", generator.ToCamel("caMel_caSe")) ++ self.assertEqual("L2Tp", generator.ToCamel("l2tp", digits_split=True)) ++ self.assertEqual("l2tp", generator.ToCamel("l2tp", lower_initial=True)) + + def testToSnakeCase(self): +- self.assertEquals("snake_case", generator.ToLowerSnakeCase("SnakeCase")) +- self.assertEquals("snake_case", generator.ToLowerSnakeCase("snakeCase")) +- self.assertEquals("snake_case", generator.ToLowerSnakeCase("SnakeCASE")) +- self.assertEquals("snake_d3d11_case", ++ self.assertEqual("snake_case", generator.ToLowerSnakeCase("SnakeCase")) ++ self.assertEqual("snake_case", generator.ToLowerSnakeCase("snakeCase")) ++ self.assertEqual("snake_case", generator.ToLowerSnakeCase("SnakeCASE")) ++ self.assertEqual("snake_d3d11_case", + generator.ToLowerSnakeCase("SnakeD3D11Case")) +- self.assertEquals("snake_d3d11_case", ++ self.assertEqual("snake_d3d11_case", + generator.ToLowerSnakeCase("SnakeD3d11Case")) +- self.assertEquals("snake_d3d11_case", ++ self.assertEqual("snake_d3d11_case", + generator.ToLowerSnakeCase("snakeD3d11Case")) +- self.assertEquals("SNAKE_CASE", generator.ToUpperSnakeCase("SnakeCase")) +- self.assertEquals("SNAKE_CASE", generator.ToUpperSnakeCase("snakeCase")) +- self.assertEquals("SNAKE_CASE", generator.ToUpperSnakeCase("SnakeCASE")) +- self.assertEquals("SNAKE_D3D11_CASE", ++ self.assertEqual("SNAKE_CASE", generator.ToUpperSnakeCase("SnakeCase")) ++ self.assertEqual("SNAKE_CASE", generator.ToUpperSnakeCase("snakeCase")) ++ self.assertEqual("SNAKE_CASE", generator.ToUpperSnakeCase("SnakeCASE")) ++ self.assertEqual("SNAKE_D3D11_CASE", + generator.ToUpperSnakeCase("SnakeD3D11Case")) +- self.assertEquals("SNAKE_D3D11_CASE", ++ self.assertEqual("SNAKE_D3D11_CASE", + generator.ToUpperSnakeCase("SnakeD3d11Case")) +- self.assertEquals("SNAKE_D3D11_CASE", ++ self.assertEqual("SNAKE_D3D11_CASE", + generator.ToUpperSnakeCase("snakeD3d11Case")) + + +diff --git a/src/3rdparty/chromium/mojo/public/tools/mojom/mojom/generate/module.py b/src/3rdparty/chromium/mojo/public/tools/mojom/mojom/generate/module.py +index ebbc9b322..e10022b98 100644 +--- a/src/3rdparty/chromium/mojo/public/tools/mojom/mojom/generate/module.py ++++ b/src/3rdparty/chromium/mojo/public/tools/mojom/mojom/generate/module.py +@@ -73,7 +73,7 @@ def Repr(obj, as_ref=True): + return ('{\n%s\n}' % (',\n'.join( + ' %s: %s' % (Repr(key, as_ref).replace('\n', '\n '), + Repr(val, as_ref).replace('\n', '\n ')) +- for key, val in obj.items()))) ++ for key, val in list(obj.items())))) + else: + return repr(obj) + +@@ -96,7 +96,7 @@ def GenericRepr(obj, names): + '\n', '\n ')) + + return '%s(\n%s\n)' % (obj.__class__.__name__, ',\n'.join( +- ReprIndent(name, as_ref) for (name, as_ref) in names.items())) ++ ReprIndent(name, as_ref) for (name, as_ref) in list(names.items()))) + + + class Kind(object): +@@ -398,7 +398,8 @@ class Field(object): + + + class StructField(Field): +- pass ++ def __hash__(self): ++ return super(Field, self).__hash__() + + + class UnionField(Field): +@@ -645,7 +646,7 @@ class Union(ReferenceKind): + return False + + max_old_min_version = 0 +- for ordinal, old_field in old_fields.items(): ++ for ordinal, old_field in list(old_fields.items()): + new_field = new_fields.get(ordinal) + if not new_field: + # A field was removed, which is not OK. +@@ -1129,7 +1130,7 @@ class Interface(ReferenceKind): + new_methods = buildOrdinalMethodMap(self) + old_methods = buildOrdinalMethodMap(older_interface) + max_old_min_version = 0 +- for ordinal, old_method in old_methods.items(): ++ for ordinal, old_method in list(old_methods.items()): + new_method = new_methods.get(ordinal) + if not new_method: + # A method was removed, which is not OK. +@@ -1311,10 +1312,10 @@ class Enum(Kind): + old_fields = buildVersionFieldMap(older_enum) + new_fields = buildVersionFieldMap(self) + +- if new_fields.keys() != old_fields.keys() and not older_enum.extensible: ++ if list(new_fields.keys()) != list(old_fields.keys()) and not older_enum.extensible: + return False + +- for min_version, valid_values in old_fields.items(): ++ for min_version, valid_values in list(old_fields.items()): + if (min_version not in new_fields + or new_fields[min_version] != valid_values): + return False +diff --git a/src/3rdparty/chromium/mojo/public/tools/mojom/mojom/generate/module_unittest.py b/src/3rdparty/chromium/mojo/public/tools/mojom/mojom/generate/module_unittest.py +index e8fd4936c..c59b9de2f 100644 +--- a/src/3rdparty/chromium/mojo/public/tools/mojom/mojom/generate/module_unittest.py ++++ b/src/3rdparty/chromium/mojo/public/tools/mojom/mojom/generate/module_unittest.py +@@ -15,7 +15,7 @@ class ModuleTest(unittest.TestCase): + struct = mojom.Struct('TestStruct', module=module) + with self.assertRaises(Exception) as e: + mojom.InterfaceRequest(struct) +- self.assertEquals( ++ self.assertEqual( + e.exception.__str__(), + 'Interface request requires \'x:TestStruct\' to be an interface.') + +@@ -26,6 +26,6 @@ class ModuleTest(unittest.TestCase): + struct = mojom.Struct('TestStruct', module=module) + with self.assertRaises(Exception) as e: + mojom.AssociatedInterface(struct) +- self.assertEquals( ++ self.assertEqual( + e.exception.__str__(), + 'Associated interface requires \'x:TestStruct\' to be an interface.') +diff --git a/src/3rdparty/chromium/mojo/public/tools/mojom/mojom/generate/template_expander.py b/src/3rdparty/chromium/mojo/public/tools/mojom/mojom/generate/template_expander.py +index 7a3005602..59dc85b84 100644 +--- a/src/3rdparty/chromium/mojo/public/tools/mojom/mojom/generate/template_expander.py ++++ b/src/3rdparty/chromium/mojo/public/tools/mojom/mojom/generate/template_expander.py +@@ -67,7 +67,7 @@ def UseJinjaForImportedTemplate(func): + + + def PrecompileTemplates(generator_modules, output_dir): +- for module in generator_modules.values(): ++ for module in list(generator_modules.values()): + generator = module.Generator(None) + jinja_env = jinja2.Environment( + loader=jinja2.FileSystemLoader([ +diff --git a/src/3rdparty/chromium/mojo/public/tools/mojom/mojom/generate/translate.py b/src/3rdparty/chromium/mojo/public/tools/mojom/mojom/generate/translate.py +index 68c1843e7..2fe80d221 100644 +--- a/src/3rdparty/chromium/mojo/public/tools/mojom/mojom/generate/translate.py ++++ b/src/3rdparty/chromium/mojo/public/tools/mojom/mojom/generate/translate.py +@@ -21,7 +21,7 @@ from mojom.parse import ast + + def _IsStrOrUnicode(x): + if sys.version_info[0] < 3: +- return isinstance(x, (unicode, str)) ++ return isinstance(x, str) + return isinstance(x, str) + + +@@ -195,7 +195,7 @@ def _LookupValueInScope(module, kind, identifier): + """Given a kind and an identifier, this attempts to resolve the given + identifier to a concrete NamedValue within the scope of the given kind.""" + scope = _GetScopeForKind(module, kind) +- for i in reversed(range(len(scope) + 1)): ++ for i in reversed(list(range(len(scope) + 1))): + qualified_name = '.'.join(scope[:i] + (identifier, )) + value = module.values.get(qualified_name) + if value: +@@ -304,12 +304,12 @@ def _Kind(kinds, spec, scope): + def _Import(module, import_module): + # Copy the struct kinds from our imports into the current module. + importable_kinds = (mojom.Struct, mojom.Union, mojom.Enum, mojom.Interface) +- for kind in import_module.kinds.values(): ++ for kind in list(import_module.kinds.values()): + if (isinstance(kind, importable_kinds) + and kind.module.path == import_module.path): + module.kinds[kind.spec] = kind + # Ditto for values. +- for value in import_module.values.values(): ++ for value in list(import_module.values.values()): + if value.module.path == import_module.path: + module.values[value.GetSpec()] = value + +@@ -458,12 +458,10 @@ def _Method(module, parsed_method, interface): + parsed_method.mojom_name, + ordinal=parsed_method.ordinal.value if parsed_method.ordinal else None) + method.parameters = list( +- map(lambda parameter: _Parameter(module, parameter, interface), +- parsed_method.parameter_list)) ++ [_Parameter(module, parameter, interface) for parameter in parsed_method.parameter_list]) + if parsed_method.response_parameter_list is not None: + method.response_parameters = list( +- map(lambda parameter: _Parameter(module, parameter, interface), +- parsed_method.response_parameter_list)) ++ [_Parameter(module, parameter, interface) for parameter in parsed_method.response_parameter_list]) + method.attributes = _AttributeListToDict(parsed_method.attribute_list) + + # Enforce that only methods with response can have a [Sync] attribute. +@@ -589,8 +587,7 @@ def _Enum(module, parsed_enum, parent_kind): + + if not enum.native_only: + enum.fields = list( +- map(lambda field: _EnumField(module, enum, field), +- parsed_enum.enum_value_list)) ++ [_EnumField(module, enum, field) for field in parsed_enum.enum_value_list]) + _ResolveNumericEnumValues(enum) + + module.kinds[enum.spec] = enum +@@ -785,8 +782,7 @@ def _Module(tree, path, imports): + all_defined_kinds = {} + for struct in module.structs: + struct.fields = list( +- map(lambda field: _StructField(module, field, struct), +- struct.fields_data)) ++ [_StructField(module, field, struct) for field in struct.fields_data]) + _AssignDefaultOrdinals(struct.fields) + del struct.fields_data + all_defined_kinds[struct.spec] = struct +@@ -795,15 +791,14 @@ def _Module(tree, path, imports): + + for union in module.unions: + union.fields = list( +- map(lambda field: _UnionField(module, field, union), union.fields_data)) ++ [_UnionField(module, field, union) for field in union.fields_data]) + _AssignDefaultOrdinals(union.fields) + del union.fields_data + all_defined_kinds[union.spec] = union + + for interface in module.interfaces: + interface.methods = list( +- map(lambda method: _Method(module, method, interface), +- interface.methods_data)) ++ [_Method(module, method, interface) for method in interface.methods_data]) + _AssignDefaultOrdinals(interface.methods) + del interface.methods_data + all_defined_kinds[interface.spec] = interface +@@ -813,7 +808,7 @@ def _Module(tree, path, imports): + all_defined_kinds[enum.spec] = enum + + all_referenced_kinds = _CollectReferencedKinds(module, +- all_defined_kinds.values()) ++ list(all_defined_kinds.values())) + imported_kind_specs = set(all_referenced_kinds.keys()).difference( + set(all_defined_kinds.keys())) + module.imported_kinds = dict( +diff --git a/src/3rdparty/chromium/mojo/public/tools/mojom/mojom/generate/translate_unittest.py b/src/3rdparty/chromium/mojo/public/tools/mojom/mojom/generate/translate_unittest.py +index 19905c8a9..dc9c11dae 100644 +--- a/src/3rdparty/chromium/mojo/public/tools/mojom/mojom/generate/translate_unittest.py ++++ b/src/3rdparty/chromium/mojo/public/tools/mojom/mojom/generate/translate_unittest.py +@@ -18,18 +18,18 @@ class TranslateTest(unittest.TestCase): + def testSimpleArray(self): + """Tests a simple int32[].""" + # pylint: disable=W0212 +- self.assertEquals(translate._MapKind("int32[]"), "a:i32") ++ self.assertEqual(translate._MapKind("int32[]"), "a:i32") + + def testAssociativeArray(self): + """Tests a simple uint8{string}.""" + # pylint: disable=W0212 +- self.assertEquals(translate._MapKind("uint8{string}"), "m[s][u8]") ++ self.assertEqual(translate._MapKind("uint8{string}"), "m[s][u8]") + + def testLeftToRightAssociativeArray(self): + """Makes sure that parsing is done from right to left on the internal kinds + in the presence of an associative array.""" + # pylint: disable=W0212 +- self.assertEquals(translate._MapKind("uint8[]{string}"), "m[s][a:u8]") ++ self.assertEqual(translate._MapKind("uint8[]{string}"), "m[s][a:u8]") + + def testTranslateSimpleUnions(self): + """Makes sure that a simple union is translated correctly.""" +@@ -67,7 +67,7 @@ class TranslateTest(unittest.TestCase): + def testAssociatedKinds(self): + """Tests type spec translation of associated interfaces and requests.""" + # pylint: disable=W0212 +- self.assertEquals( ++ self.assertEqual( + translate._MapKind("asso?"), "?asso:x:SomeInterface") +- self.assertEquals( ++ self.assertEqual( + translate._MapKind("asso?"), "?asso:r:x:SomeInterface") +diff --git a/src/3rdparty/chromium/mojo/public/tools/mojom/mojom/parse/ast.py b/src/3rdparty/chromium/mojo/public/tools/mojom/mojom/parse/ast.py +index 1f0db2005..e0d6afc81 100644 +--- a/src/3rdparty/chromium/mojo/public/tools/mojom/mojom/parse/ast.py ++++ b/src/3rdparty/chromium/mojo/public/tools/mojom/mojom/parse/ast.py +@@ -14,7 +14,7 @@ import sys + + def _IsStrOrUnicode(x): + if sys.version_info[0] < 3: +- return isinstance(x, (unicode, str)) ++ return isinstance(x, str) + return isinstance(x, str) + + +diff --git a/src/3rdparty/chromium/mojo/public/tools/mojom/mojom/parse/ast_unittest.py b/src/3rdparty/chromium/mojo/public/tools/mojom/mojom/parse/ast_unittest.py +index 62798631d..94a089293 100644 +--- a/src/3rdparty/chromium/mojo/public/tools/mojom/mojom/parse/ast_unittest.py ++++ b/src/3rdparty/chromium/mojo/public/tools/mojom/mojom/parse/ast_unittest.py +@@ -35,8 +35,8 @@ class ASTTest(unittest.TestCase): + # comparison by value and ignore filenames/line numbers (for convenience). + node1 = ast.NodeBase(filename="hello.mojom", lineno=123) + node2 = ast.NodeBase() +- self.assertEquals(node1, node2) +- self.assertEquals(node2, node1) ++ self.assertEqual(node1, node2) ++ self.assertEqual(node2, node1) + + # Check that |__ne__()| just defers to |__eq__()| properly. + self.assertFalse(node1 != node2) +@@ -44,8 +44,8 @@ class ASTTest(unittest.TestCase): + + # Check that |filename| and |lineno| are set properly (and are None by + # default). +- self.assertEquals(node1.filename, "hello.mojom") +- self.assertEquals(node1.lineno, 123) ++ self.assertEqual(node1.filename, "hello.mojom") ++ self.assertEqual(node1.lineno, 123) + self.assertIsNone(node2.filename) + self.assertIsNone(node2.lineno) + +@@ -59,9 +59,9 @@ class ASTTest(unittest.TestCase): + self.assertFalse(node3 == node1) + + node4 = _TestNode(123, filename="world.mojom", lineno=123) +- self.assertEquals(node4, node3) ++ self.assertEqual(node4, node3) + node5 = _TestNode(456) +- self.assertNotEquals(node5, node4) ++ self.assertNotEqual(node5, node4) + + def testNodeListBase(self): + node1 = _TestNode(1, filename="foo.mojom", lineno=1) +@@ -70,52 +70,52 @@ class ASTTest(unittest.TestCase): + node2 = _TestNode(2, filename="foo.mojom", lineno=2) + + nodelist1 = _TestNodeList() # Contains: (empty). +- self.assertEquals(nodelist1, nodelist1) +- self.assertEquals(nodelist1.items, []) ++ self.assertEqual(nodelist1, nodelist1) ++ self.assertEqual(nodelist1.items, []) + self.assertIsNone(nodelist1.filename) + self.assertIsNone(nodelist1.lineno) + + nodelist2 = _TestNodeList(node1) # Contains: 1. +- self.assertEquals(nodelist2, nodelist2) +- self.assertEquals(nodelist2.items, [node1]) ++ self.assertEqual(nodelist2, nodelist2) ++ self.assertEqual(nodelist2.items, [node1]) + self.assertNotEqual(nodelist2, nodelist1) +- self.assertEquals(nodelist2.filename, "foo.mojom") +- self.assertEquals(nodelist2.lineno, 1) ++ self.assertEqual(nodelist2.filename, "foo.mojom") ++ self.assertEqual(nodelist2.lineno, 1) + + nodelist3 = _TestNodeList([node2]) # Contains: 2. +- self.assertEquals(nodelist3.items, [node2]) ++ self.assertEqual(nodelist3.items, [node2]) + self.assertNotEqual(nodelist3, nodelist1) + self.assertNotEqual(nodelist3, nodelist2) +- self.assertEquals(nodelist3.filename, "foo.mojom") +- self.assertEquals(nodelist3.lineno, 2) ++ self.assertEqual(nodelist3.filename, "foo.mojom") ++ self.assertEqual(nodelist3.lineno, 2) + + nodelist1.Append(node1b) # Contains: 1. +- self.assertEquals(nodelist1.items, [node1]) +- self.assertEquals(nodelist1, nodelist2) ++ self.assertEqual(nodelist1.items, [node1]) ++ self.assertEqual(nodelist1, nodelist2) + self.assertNotEqual(nodelist1, nodelist3) +- self.assertEquals(nodelist1.filename, "foo.mojom") +- self.assertEquals(nodelist1.lineno, 1) ++ self.assertEqual(nodelist1.filename, "foo.mojom") ++ self.assertEqual(nodelist1.lineno, 1) + + nodelist1.Append(node2) # Contains: 1, 2. +- self.assertEquals(nodelist1.items, [node1, node2]) ++ self.assertEqual(nodelist1.items, [node1, node2]) + self.assertNotEqual(nodelist1, nodelist2) + self.assertNotEqual(nodelist1, nodelist3) +- self.assertEquals(nodelist1.lineno, 1) ++ self.assertEqual(nodelist1.lineno, 1) + + nodelist2.Append(node2) # Contains: 1, 2. +- self.assertEquals(nodelist2.items, [node1, node2]) +- self.assertEquals(nodelist2, nodelist1) ++ self.assertEqual(nodelist2.items, [node1, node2]) ++ self.assertEqual(nodelist2, nodelist1) + self.assertNotEqual(nodelist2, nodelist3) +- self.assertEquals(nodelist2.lineno, 1) ++ self.assertEqual(nodelist2.lineno, 1) + + nodelist3.Insert(node1) # Contains: 1, 2. +- self.assertEquals(nodelist3.items, [node1, node2]) +- self.assertEquals(nodelist3, nodelist1) +- self.assertEquals(nodelist3, nodelist2) +- self.assertEquals(nodelist3.lineno, 1) ++ self.assertEqual(nodelist3.items, [node1, node2]) ++ self.assertEqual(nodelist3, nodelist1) ++ self.assertEqual(nodelist3, nodelist2) ++ self.assertEqual(nodelist3.lineno, 1) + + # Test iteration: + i = 1 + for item in nodelist1: +- self.assertEquals(item.value, i) ++ self.assertEqual(item.value, i) + i += 1 +diff --git a/src/3rdparty/chromium/mojo/public/tools/mojom/mojom/parse/conditional_features_unittest.py b/src/3rdparty/chromium/mojo/public/tools/mojom/mojom/parse/conditional_features_unittest.py +index aa609be73..edf7afef8 100644 +--- a/src/3rdparty/chromium/mojo/public/tools/mojom/mojom/parse/conditional_features_unittest.py ++++ b/src/3rdparty/chromium/mojo/public/tools/mojom/mojom/parse/conditional_features_unittest.py +@@ -37,7 +37,7 @@ class ConditionalFeaturesTest(unittest.TestCase): + definition = parser.Parse(source, "my_file.mojom") + conditional_features.RemoveDisabledDefinitions(definition, ENABLED_FEATURES) + expected = parser.Parse(expected_source, "my_file.mojom") +- self.assertEquals(definition, expected) ++ self.assertEqual(definition, expected) + + def testFilterConst(self): + """Test that Consts are correctly filtered.""" +diff --git a/src/3rdparty/chromium/mojo/public/tools/mojom/mojom/parse/lexer_unittest.py b/src/3rdparty/chromium/mojo/public/tools/mojom/mojom/parse/lexer_unittest.py +index eadc6587c..26b5b1e04 100644 +--- a/src/3rdparty/chromium/mojo/public/tools/mojom/mojom/parse/lexer_unittest.py ++++ b/src/3rdparty/chromium/mojo/public/tools/mojom/mojom/parse/lexer_unittest.py +@@ -63,117 +63,117 @@ class LexerTest(unittest.TestCase): + + def testValidKeywords(self): + """Tests valid keywords.""" +- self.assertEquals( ++ self.assertEqual( + self._SingleTokenForInput("handle"), _MakeLexTokenForKeyword("handle")) +- self.assertEquals( ++ self.assertEqual( + self._SingleTokenForInput("import"), _MakeLexTokenForKeyword("import")) +- self.assertEquals( ++ self.assertEqual( + self._SingleTokenForInput("module"), _MakeLexTokenForKeyword("module")) +- self.assertEquals( ++ self.assertEqual( + self._SingleTokenForInput("struct"), _MakeLexTokenForKeyword("struct")) +- self.assertEquals( ++ self.assertEqual( + self._SingleTokenForInput("union"), _MakeLexTokenForKeyword("union")) +- self.assertEquals( ++ self.assertEqual( + self._SingleTokenForInput("interface"), + _MakeLexTokenForKeyword("interface")) +- self.assertEquals( ++ self.assertEqual( + self._SingleTokenForInput("enum"), _MakeLexTokenForKeyword("enum")) +- self.assertEquals( ++ self.assertEqual( + self._SingleTokenForInput("const"), _MakeLexTokenForKeyword("const")) +- self.assertEquals( ++ self.assertEqual( + self._SingleTokenForInput("true"), _MakeLexTokenForKeyword("true")) +- self.assertEquals( ++ self.assertEqual( + self._SingleTokenForInput("false"), _MakeLexTokenForKeyword("false")) +- self.assertEquals( ++ self.assertEqual( + self._SingleTokenForInput("default"), + _MakeLexTokenForKeyword("default")) +- self.assertEquals( ++ self.assertEqual( + self._SingleTokenForInput("array"), _MakeLexTokenForKeyword("array")) +- self.assertEquals( ++ self.assertEqual( + self._SingleTokenForInput("map"), _MakeLexTokenForKeyword("map")) +- self.assertEquals( ++ self.assertEqual( + self._SingleTokenForInput("associated"), + _MakeLexTokenForKeyword("associated")) + + def testValidIdentifiers(self): + """Tests identifiers.""" +- self.assertEquals( ++ self.assertEqual( + self._SingleTokenForInput("abcd"), _MakeLexToken("NAME", "abcd")) +- self.assertEquals( ++ self.assertEqual( + self._SingleTokenForInput("AbC_d012_"), + _MakeLexToken("NAME", "AbC_d012_")) +- self.assertEquals( ++ self.assertEqual( + self._SingleTokenForInput("_0123"), _MakeLexToken("NAME", "_0123")) + + def testInvalidIdentifiers(self): +- with self.assertRaisesRegexp( ++ with self.assertRaisesRegex( + mojom.parse.lexer.LexError, + r"^my_file\.mojom:1: Error: Illegal character '\$'$"): + self._TokensForInput("$abc") +- with self.assertRaisesRegexp( ++ with self.assertRaisesRegex( + mojom.parse.lexer.LexError, + r"^my_file\.mojom:1: Error: Illegal character '\$'$"): + self._TokensForInput("a$bc") + + def testDecimalIntegerConstants(self): +- self.assertEquals( ++ self.assertEqual( + self._SingleTokenForInput("0"), _MakeLexToken("INT_CONST_DEC", "0")) +- self.assertEquals( ++ self.assertEqual( + self._SingleTokenForInput("1"), _MakeLexToken("INT_CONST_DEC", "1")) +- self.assertEquals( ++ self.assertEqual( + self._SingleTokenForInput("123"), _MakeLexToken("INT_CONST_DEC", "123")) +- self.assertEquals( ++ self.assertEqual( + self._SingleTokenForInput("10"), _MakeLexToken("INT_CONST_DEC", "10")) + + def testValidTokens(self): + """Tests valid tokens (which aren't tested elsewhere).""" + # Keywords tested in |testValidKeywords|. + # NAME tested in |testValidIdentifiers|. +- self.assertEquals( ++ self.assertEqual( + self._SingleTokenForInput("@123"), _MakeLexToken("ORDINAL", "@123")) +- self.assertEquals( ++ self.assertEqual( + self._SingleTokenForInput("456"), _MakeLexToken("INT_CONST_DEC", "456")) +- self.assertEquals( ++ self.assertEqual( + self._SingleTokenForInput("0x01aB2eF3"), + _MakeLexToken("INT_CONST_HEX", "0x01aB2eF3")) +- self.assertEquals( ++ self.assertEqual( + self._SingleTokenForInput("123.456"), + _MakeLexToken("FLOAT_CONST", "123.456")) +- self.assertEquals( ++ self.assertEqual( + self._SingleTokenForInput("\"hello\""), + _MakeLexToken("STRING_LITERAL", "\"hello\"")) +- self.assertEquals( ++ self.assertEqual( + self._SingleTokenForInput("+"), _MakeLexToken("PLUS", "+")) +- self.assertEquals( ++ self.assertEqual( + self._SingleTokenForInput("-"), _MakeLexToken("MINUS", "-")) +- self.assertEquals(self._SingleTokenForInput("&"), _MakeLexToken("AMP", "&")) +- self.assertEquals( ++ self.assertEqual(self._SingleTokenForInput("&"), _MakeLexToken("AMP", "&")) ++ self.assertEqual( + self._SingleTokenForInput("?"), _MakeLexToken("QSTN", "?")) +- self.assertEquals( ++ self.assertEqual( + self._SingleTokenForInput("="), _MakeLexToken("EQUALS", "=")) +- self.assertEquals( ++ self.assertEqual( + self._SingleTokenForInput("=>"), _MakeLexToken("RESPONSE", "=>")) +- self.assertEquals( ++ self.assertEqual( + self._SingleTokenForInput("("), _MakeLexToken("LPAREN", "(")) +- self.assertEquals( ++ self.assertEqual( + self._SingleTokenForInput(")"), _MakeLexToken("RPAREN", ")")) +- self.assertEquals( ++ self.assertEqual( + self._SingleTokenForInput("["), _MakeLexToken("LBRACKET", "[")) +- self.assertEquals( ++ self.assertEqual( + self._SingleTokenForInput("]"), _MakeLexToken("RBRACKET", "]")) +- self.assertEquals( ++ self.assertEqual( + self._SingleTokenForInput("{"), _MakeLexToken("LBRACE", "{")) +- self.assertEquals( ++ self.assertEqual( + self._SingleTokenForInput("}"), _MakeLexToken("RBRACE", "}")) +- self.assertEquals( ++ self.assertEqual( + self._SingleTokenForInput("<"), _MakeLexToken("LANGLE", "<")) +- self.assertEquals( ++ self.assertEqual( + self._SingleTokenForInput(">"), _MakeLexToken("RANGLE", ">")) +- self.assertEquals( ++ self.assertEqual( + self._SingleTokenForInput(";"), _MakeLexToken("SEMI", ";")) +- self.assertEquals( ++ self.assertEqual( + self._SingleTokenForInput(","), _MakeLexToken("COMMA", ",")) +- self.assertEquals(self._SingleTokenForInput("."), _MakeLexToken("DOT", ".")) ++ self.assertEqual(self._SingleTokenForInput("."), _MakeLexToken("DOT", ".")) + + def _TokensForInput(self, input_string): + """Gets a list of tokens for the given input string.""" +diff --git a/src/3rdparty/chromium/mojo/public/tools/mojom/mojom/parse/parser_unittest.py b/src/3rdparty/chromium/mojo/public/tools/mojom/mojom/parse/parser_unittest.py +index 6d6b71532..88cca7e1f 100644 +--- a/src/3rdparty/chromium/mojo/public/tools/mojom/mojom/parse/parser_unittest.py ++++ b/src/3rdparty/chromium/mojo/public/tools/mojom/mojom/parse/parser_unittest.py +@@ -25,7 +25,7 @@ class ParserTest(unittest.TestCase): + """ + expected = ast.Mojom( + ast.Module(('IDENTIFIER', 'my_module'), None), ast.ImportList(), []) +- self.assertEquals(parser.Parse(source, "my_file.mojom"), expected) ++ self.assertEqual(parser.Parse(source, "my_file.mojom"), expected) + + def testSourceWithCrLfs(self): + """Tests a .mojom source with CR-LFs instead of LFs.""" +@@ -33,7 +33,7 @@ class ParserTest(unittest.TestCase): + source = "// This is a comment.\r\n\r\nmodule my_module;\r\n" + expected = ast.Mojom( + ast.Module(('IDENTIFIER', 'my_module'), None), ast.ImportList(), []) +- self.assertEquals(parser.Parse(source, "my_file.mojom"), expected) ++ self.assertEqual(parser.Parse(source, "my_file.mojom"), expected) + + def testUnexpectedEOF(self): + """Tests a "truncated" .mojom source.""" +@@ -43,7 +43,7 @@ class ParserTest(unittest.TestCase): + + module my_module + """ +- with self.assertRaisesRegexp( ++ with self.assertRaisesRegex( + parser.ParseError, r"^my_file\.mojom: Error: Unexpected end of file$"): + parser.Parse(source, "my_file.mojom") + +@@ -57,7 +57,7 @@ class ParserTest(unittest.TestCase): + // Foo. + asdf1 + """ +- with self.assertRaisesRegexp( ++ with self.assertRaisesRegex( + parser.ParseError, + r"^my_file\.mojom:4: Error: Unexpected 'asdf1':\n *asdf1$"): + parser.Parse(source1, "my_file.mojom") +@@ -74,7 +74,7 @@ class ParserTest(unittest.TestCase): + + asdf2 + """ +- with self.assertRaisesRegexp( ++ with self.assertRaisesRegex( + parser.ParseError, + r"^my_file\.mojom:10: Error: Unexpected 'asdf2':\n *asdf2$"): + parser.Parse(source2, "my_file.mojom") +@@ -86,7 +86,7 @@ class ParserTest(unittest.TestCase): + /* Baz. */ + asdf3 + """ +- with self.assertRaisesRegexp( ++ with self.assertRaisesRegex( + parser.ParseError, + r"^my_file\.mojom:5: Error: Unexpected 'asdf3':\n *asdf3$"): + parser.Parse(source3, "my_file.mojom") +@@ -103,7 +103,7 @@ class ParserTest(unittest.TestCase): + Quux. */ + asdf4 + """ +- with self.assertRaisesRegexp( ++ with self.assertRaisesRegex( + parser.ParseError, + r"^my_file\.mojom:10: Error: Unexpected 'asdf4':\n *asdf4$"): + parser.Parse(source4, "my_file.mojom") +@@ -128,7 +128,7 @@ class ParserTest(unittest.TestCase): + ast.StructField('b', None, None, 'double', None) + ])) + ]) +- self.assertEquals(parser.Parse(source, "my_file.mojom"), expected) ++ self.assertEqual(parser.Parse(source, "my_file.mojom"), expected) + + def testSimpleStructWithoutModule(self): + """Tests a simple struct without an explict module statement.""" +@@ -147,7 +147,7 @@ class ParserTest(unittest.TestCase): + ast.StructField('b', None, None, 'double', None) + ])) + ]) +- self.assertEquals(parser.Parse(source, "my_file.mojom"), expected) ++ self.assertEqual(parser.Parse(source, "my_file.mojom"), expected) + + def testValidStructDefinitions(self): + """Tests all types of definitions that can occur in a struct.""" +@@ -171,7 +171,7 @@ class ParserTest(unittest.TestCase): + ast.StructField('b', None, None, 'SomeOtherStruct', None) + ])) + ]) +- self.assertEquals(parser.Parse(source, "my_file.mojom"), expected) ++ self.assertEqual(parser.Parse(source, "my_file.mojom"), expected) + + def testInvalidStructDefinitions(self): + """Tests that definitions that aren't allowed in a struct are correctly +@@ -182,7 +182,7 @@ class ParserTest(unittest.TestCase): + MyMethod(int32 a); + }; + """ +- with self.assertRaisesRegexp( ++ with self.assertRaisesRegex( + parser.ParseError, r"^my_file\.mojom:2: Error: Unexpected '\(':\n" + r" *MyMethod\(int32 a\);$"): + parser.Parse(source1, "my_file.mojom") +@@ -194,7 +194,7 @@ class ParserTest(unittest.TestCase): + }; + }; + """ +- with self.assertRaisesRegexp( ++ with self.assertRaisesRegex( + parser.ParseError, r"^my_file\.mojom:2: Error: Unexpected 'struct':\n" + r" *struct MyInnerStruct {$"): + parser.Parse(source2, "my_file.mojom") +@@ -206,7 +206,7 @@ class ParserTest(unittest.TestCase): + }; + }; + """ +- with self.assertRaisesRegexp( ++ with self.assertRaisesRegex( + parser.ParseError, + r"^my_file\.mojom:2: Error: Unexpected 'interface':\n" + r" *interface MyInterface {$"): +@@ -222,7 +222,7 @@ class ParserTest(unittest.TestCase): + int32 a; + }; + """ +- with self.assertRaisesRegexp( ++ with self.assertRaisesRegex( + parser.ParseError, + r"^my_file\.mojom:2: Error: Unexpected ';':\n *module ;$"): + parser.Parse(source1, "my_file.mojom") +@@ -237,7 +237,7 @@ class ParserTest(unittest.TestCase): + int32 a; + }; + """ +- with self.assertRaisesRegexp( ++ with self.assertRaisesRegex( + parser.ParseError, r"^my_file\.mojom:4: Error: Unexpected 'struct':\n" + r" *struct MyStruct {$"): + parser.Parse(source2, "my_file.mojom") +@@ -249,7 +249,7 @@ class ParserTest(unittest.TestCase): + module foo; + module bar; + """ +- with self.assertRaisesRegexp( ++ with self.assertRaisesRegex( + parser.ParseError, + r"^my_file\.mojom:2: Error: Multiple \"module\" statements not " + r"allowed:\n *module bar;$"): +@@ -262,7 +262,7 @@ class ParserTest(unittest.TestCase): + import "foo.mojom"; + module foo; + """ +- with self.assertRaisesRegexp( ++ with self.assertRaisesRegex( + parser.ParseError, + r"^my_file\.mojom:2: Error: \"module\" statements must precede imports " + r"and definitions:\n *module foo;$"): +@@ -277,7 +277,7 @@ class ParserTest(unittest.TestCase): + }; + module foo; + """ +- with self.assertRaisesRegexp( ++ with self.assertRaisesRegex( + parser.ParseError, + r"^my_file\.mojom:4: Error: \"module\" statements must precede imports " + r"and definitions:\n *module foo;$"): +@@ -292,7 +292,7 @@ class ParserTest(unittest.TestCase): + }; + import "foo.mojom"; + """ +- with self.assertRaisesRegexp( ++ with self.assertRaisesRegex( + parser.ParseError, + r"^my_file\.mojom:4: Error: \"import\" statements must precede " + r"definitions:\n *import \"foo.mojom\";$"): +@@ -334,21 +334,21 @@ class ParserTest(unittest.TestCase): + ast.EnumValue('VALUE7', None, None) + ])) + ]) +- self.assertEquals(parser.Parse(source, "my_file.mojom"), expected) ++ self.assertEqual(parser.Parse(source, "my_file.mojom"), expected) + + def testInvalidEnumInitializers(self): + """Tests that invalid enum initializers are correctly detected.""" + + # Floating point value. + source2 = "enum MyEnum { VALUE = 0.123 };" +- with self.assertRaisesRegexp( ++ with self.assertRaisesRegex( + parser.ParseError, r"^my_file\.mojom:1: Error: Unexpected '0\.123':\n" + r"enum MyEnum { VALUE = 0\.123 };$"): + parser.Parse(source2, "my_file.mojom") + + # Boolean value. + source2 = "enum MyEnum { VALUE = true };" +- with self.assertRaisesRegexp( ++ with self.assertRaisesRegex( + parser.ParseError, r"^my_file\.mojom:1: Error: Unexpected 'true':\n" + r"enum MyEnum { VALUE = true };$"): + parser.Parse(source2, "my_file.mojom") +@@ -374,7 +374,7 @@ class ParserTest(unittest.TestCase): + ('IDENTIFIER', 'kNumber')) + ])) + ]) +- self.assertEquals(parser.Parse(source, "my_file.mojom"), expected) ++ self.assertEqual(parser.Parse(source, "my_file.mojom"), expected) + + def testNoConditionals(self): + """Tests that ?: is not allowed.""" +@@ -386,7 +386,7 @@ class ParserTest(unittest.TestCase): + MY_ENUM_1 = 1 ? 2 : 3 + }; + """ +- with self.assertRaisesRegexp( ++ with self.assertRaisesRegex( + parser.ParseError, r"^my_file\.mojom:4: Error: Unexpected '\?':\n" + r" *MY_ENUM_1 = 1 \? 2 : 3$"): + parser.Parse(source, "my_file.mojom") +@@ -429,7 +429,7 @@ class ParserTest(unittest.TestCase): + ast.Ordinal(1234567890), 'int32', None) + ])) + ]) +- self.assertEquals(parser.Parse(source, "my_file.mojom"), expected) ++ self.assertEqual(parser.Parse(source, "my_file.mojom"), expected) + + def testInvalidOrdinals(self): + """Tests that (lexically) invalid ordinals are correctly detected.""" +@@ -441,7 +441,7 @@ class ParserTest(unittest.TestCase): + int32 a_missing@; + }; + """ +- with self.assertRaisesRegexp( ++ with self.assertRaisesRegex( + lexer.LexError, r"^my_file\.mojom:4: Error: Missing ordinal value$"): + parser.Parse(source1, "my_file.mojom") + +@@ -452,7 +452,7 @@ class ParserTest(unittest.TestCase): + int32 a_octal@01; + }; + """ +- with self.assertRaisesRegexp( ++ with self.assertRaisesRegex( + lexer.LexError, r"^my_file\.mojom:4: Error: " + r"Octal and hexadecimal ordinal values not allowed$"): + parser.Parse(source2, "my_file.mojom") +@@ -460,19 +460,19 @@ class ParserTest(unittest.TestCase): + source3 = """\ + module my_module; struct MyStruct { int32 a_invalid_octal@08; }; + """ +- with self.assertRaisesRegexp( ++ with self.assertRaisesRegex( + lexer.LexError, r"^my_file\.mojom:1: Error: " + r"Octal and hexadecimal ordinal values not allowed$"): + parser.Parse(source3, "my_file.mojom") + + source4 = "module my_module; struct MyStruct { int32 a_hex@0x1aB9; };" +- with self.assertRaisesRegexp( ++ with self.assertRaisesRegex( + lexer.LexError, r"^my_file\.mojom:1: Error: " + r"Octal and hexadecimal ordinal values not allowed$"): + parser.Parse(source4, "my_file.mojom") + + source5 = "module my_module; struct MyStruct { int32 a_hex@0X0; };" +- with self.assertRaisesRegexp( ++ with self.assertRaisesRegex( + lexer.LexError, r"^my_file\.mojom:1: Error: " + r"Octal and hexadecimal ordinal values not allowed$"): + parser.Parse(source5, "my_file.mojom") +@@ -482,7 +482,7 @@ class ParserTest(unittest.TestCase): + int32 a_too_big@999999999999; + }; + """ +- with self.assertRaisesRegexp( ++ with self.assertRaisesRegex( + parser.ParseError, r"^my_file\.mojom:2: Error: " + r"Ordinal value 999999999999 too large:\n" + r" *int32 a_too_big@999999999999;$"): +@@ -504,7 +504,7 @@ class ParserTest(unittest.TestCase): + 'MyStruct', None, + ast.StructBody(ast.StructField('a', None, None, 'int32', None))) + ]) +- self.assertEquals(parser.Parse(source, "my_file.mojom"), expected) ++ self.assertEqual(parser.Parse(source, "my_file.mojom"), expected) + + def testValidHandleTypes(self): + """Tests (valid) handle types.""" +@@ -538,7 +538,7 @@ class ParserTest(unittest.TestCase): + ast.StructField('f', None, None, 'handle', None) + ])) + ]) +- self.assertEquals(parser.Parse(source, "my_file.mojom"), expected) ++ self.assertEqual(parser.Parse(source, "my_file.mojom"), expected) + + def testInvalidHandleType(self): + """Tests an invalid (unknown) handle type.""" +@@ -548,7 +548,7 @@ class ParserTest(unittest.TestCase): + handle foo; + }; + """ +- with self.assertRaisesRegexp( ++ with self.assertRaisesRegex( + parser.ParseError, r"^my_file\.mojom:2: Error: " + r"Invalid handle type 'wtf_is_this':\n" + r" *handle foo;$"): +@@ -613,7 +613,7 @@ class ParserTest(unittest.TestCase): + ast.StructField('a22', None, None, 'double', '+.123E10') + ])) + ]) +- self.assertEquals(parser.Parse(source, "my_file.mojom"), expected) ++ self.assertEqual(parser.Parse(source, "my_file.mojom"), expected) + + def testValidFixedSizeArray(self): + """Tests parsing a fixed size array.""" +@@ -639,7 +639,7 @@ class ParserTest(unittest.TestCase): + None) + ])) + ]) +- self.assertEquals(parser.Parse(source, "my_file.mojom"), expected) ++ self.assertEqual(parser.Parse(source, "my_file.mojom"), expected) + + def testValidNestedArray(self): + """Tests parsing a nested array.""" +@@ -651,7 +651,7 @@ class ParserTest(unittest.TestCase): + ast.StructBody( + ast.StructField('nested_array', None, None, 'int32[][]', None))) + ]) +- self.assertEquals(parser.Parse(source, "my_file.mojom"), expected) ++ self.assertEqual(parser.Parse(source, "my_file.mojom"), expected) + + def testInvalidFixedArraySize(self): + """Tests that invalid fixed array bounds are correctly detected.""" +@@ -661,7 +661,7 @@ class ParserTest(unittest.TestCase): + array zero_size_array; + }; + """ +- with self.assertRaisesRegexp( ++ with self.assertRaisesRegex( + parser.ParseError, + r"^my_file\.mojom:2: Error: Fixed array size 0 invalid:\n" + r" *array zero_size_array;$"): +@@ -672,7 +672,7 @@ class ParserTest(unittest.TestCase): + array too_big_array; + }; + """ +- with self.assertRaisesRegexp( ++ with self.assertRaisesRegex( + parser.ParseError, + r"^my_file\.mojom:2: Error: Fixed array size 999999999999 invalid:\n" + r" *array too_big_array;$"): +@@ -683,7 +683,7 @@ class ParserTest(unittest.TestCase): + array not_a_number; + }; + """ +- with self.assertRaisesRegexp( ++ with self.assertRaisesRegex( + parser.ParseError, r"^my_file\.mojom:2: Error: Unexpected 'abcdefg':\n" + r" *array not_a_number;"): + parser.Parse(source3, "my_file.mojom") +@@ -698,7 +698,7 @@ class ParserTest(unittest.TestCase): + ast.StructBody( + [ast.StructField('data', None, None, 'uint8{string}', None)])) + ]) +- self.assertEquals(parser.Parse(source1, "my_file.mojom"), expected1) ++ self.assertEqual(parser.Parse(source1, "my_file.mojom"), expected1) + + source2 = "interface MyInterface { MyMethod(map a); };" + expected2 = ast.Mojom(None, ast.ImportList(), [ +@@ -711,7 +711,7 @@ class ParserTest(unittest.TestCase): + ast.Parameter('a', None, None, 'uint8{string}')), + None))) + ]) +- self.assertEquals(parser.Parse(source2, "my_file.mojom"), expected2) ++ self.assertEqual(parser.Parse(source2, "my_file.mojom"), expected2) + + source3 = "struct MyStruct { map> data; };" + expected3 = ast.Mojom(None, ast.ImportList(), [ +@@ -720,7 +720,7 @@ class ParserTest(unittest.TestCase): + ast.StructBody( + [ast.StructField('data', None, None, 'uint8[]{string}', None)])) + ]) +- self.assertEquals(parser.Parse(source3, "my_file.mojom"), expected3) ++ self.assertEqual(parser.Parse(source3, "my_file.mojom"), expected3) + + def testValidMethod(self): + """Tests parsing method declarations.""" +@@ -735,7 +735,7 @@ class ParserTest(unittest.TestCase): + ast.ParameterList(ast.Parameter('a', None, None, 'int32')), + None))) + ]) +- self.assertEquals(parser.Parse(source1, "my_file.mojom"), expected1) ++ self.assertEqual(parser.Parse(source1, "my_file.mojom"), expected1) + + source2 = """\ + interface MyInterface { +@@ -757,7 +757,7 @@ class ParserTest(unittest.TestCase): + ast.ParameterList(), ast.ParameterList()) + ])) + ]) +- self.assertEquals(parser.Parse(source2, "my_file.mojom"), expected2) ++ self.assertEqual(parser.Parse(source2, "my_file.mojom"), expected2) + + source3 = """\ + interface MyInterface { +@@ -776,7 +776,7 @@ class ParserTest(unittest.TestCase): + ast.Parameter('b', None, None, 'bool') + ])))) + ]) +- self.assertEquals(parser.Parse(source3, "my_file.mojom"), expected3) ++ self.assertEqual(parser.Parse(source3, "my_file.mojom"), expected3) + + def testInvalidMethods(self): + """Tests that invalid method declarations are correctly detected.""" +@@ -787,7 +787,7 @@ class ParserTest(unittest.TestCase): + MyMethod(string a,); + }; + """ +- with self.assertRaisesRegexp( ++ with self.assertRaisesRegex( + parser.ParseError, r"^my_file\.mojom:2: Error: Unexpected '\)':\n" + r" *MyMethod\(string a,\);$"): + parser.Parse(source1, "my_file.mojom") +@@ -798,7 +798,7 @@ class ParserTest(unittest.TestCase): + MyMethod(, string a); + }; + """ +- with self.assertRaisesRegexp( ++ with self.assertRaisesRegex( + parser.ParseError, r"^my_file\.mojom:2: Error: Unexpected ',':\n" + r" *MyMethod\(, string a\);$"): + parser.Parse(source2, "my_file.mojom") +@@ -826,7 +826,7 @@ class ParserTest(unittest.TestCase): + ast.ParameterList(ast.Parameter('y', None, None, 'MyEnum'))) + ])) + ]) +- self.assertEquals(parser.Parse(source, "my_file.mojom"), expected) ++ self.assertEqual(parser.Parse(source, "my_file.mojom"), expected) + + def testInvalidInterfaceDefinitions(self): + """Tests that definitions that aren't allowed in an interface are correctly +@@ -839,7 +839,7 @@ class ParserTest(unittest.TestCase): + }; + }; + """ +- with self.assertRaisesRegexp( ++ with self.assertRaisesRegex( + parser.ParseError, r"^my_file\.mojom:2: Error: Unexpected 'struct':\n" + r" *struct MyStruct {$"): + parser.Parse(source1, "my_file.mojom") +@@ -851,7 +851,7 @@ class ParserTest(unittest.TestCase): + }; + }; + """ +- with self.assertRaisesRegexp( ++ with self.assertRaisesRegex( + parser.ParseError, + r"^my_file\.mojom:2: Error: Unexpected 'interface':\n" + r" *interface MyInnerInterface {$"): +@@ -864,7 +864,7 @@ class ParserTest(unittest.TestCase): + """ + # The parser thinks that "int32" is a plausible name for a method, so it's + # "my_field" that gives it away. +- with self.assertRaisesRegexp( ++ with self.assertRaisesRegex( + parser.ParseError, r"^my_file\.mojom:2: Error: Unexpected 'my_field':\n" + r" *int32 my_field;$"): + parser.Parse(source3, "my_file.mojom") +@@ -879,7 +879,7 @@ class ParserTest(unittest.TestCase): + expected1 = ast.Mojom( + None, ast.ImportList(), + [ast.Struct('MyStruct', ast.AttributeList(), ast.StructBody())]) +- self.assertEquals(parser.Parse(source1, "my_file.mojom"), expected1) ++ self.assertEqual(parser.Parse(source1, "my_file.mojom"), expected1) + + # One-element attribute list, with name value. + source2 = "[MyAttribute=MyName] struct MyStruct {};" +@@ -888,7 +888,7 @@ class ParserTest(unittest.TestCase): + ast.AttributeList(ast.Attribute("MyAttribute", "MyName")), + ast.StructBody()) + ]) +- self.assertEquals(parser.Parse(source2, "my_file.mojom"), expected2) ++ self.assertEqual(parser.Parse(source2, "my_file.mojom"), expected2) + + # Two-element attribute list, with one string value and one integer value. + source3 = "[MyAttribute1 = \"hello\", MyAttribute2 = 5] struct MyStruct {};" +@@ -900,7 +900,7 @@ class ParserTest(unittest.TestCase): + ast.Attribute("MyAttribute2", 5) + ]), ast.StructBody()) + ]) +- self.assertEquals(parser.Parse(source3, "my_file.mojom"), expected3) ++ self.assertEqual(parser.Parse(source3, "my_file.mojom"), expected3) + + # Various places that attribute list is allowed. + source4 = """\ +@@ -966,7 +966,7 @@ class ParserTest(unittest.TestCase): + ast.Const('kMyConst', ast.AttributeList( + ast.Attribute("Attr12", 12)), 'double', '1.23') + ]) +- self.assertEquals(parser.Parse(source4, "my_file.mojom"), expected4) ++ self.assertEqual(parser.Parse(source4, "my_file.mojom"), expected4) + + # TODO(vtl): Boolean attributes don't work yet. (In fact, we just |eval()| + # literal (non-name) values, which is extremely dubious.) +@@ -977,21 +977,21 @@ class ParserTest(unittest.TestCase): + + # Trailing commas not allowed. + source1 = "[MyAttribute=MyName,] struct MyStruct {};" +- with self.assertRaisesRegexp( ++ with self.assertRaisesRegex( + parser.ParseError, r"^my_file\.mojom:1: Error: Unexpected '\]':\n" + r"\[MyAttribute=MyName,\] struct MyStruct {};$"): + parser.Parse(source1, "my_file.mojom") + + # Missing value. + source2 = "[MyAttribute=] struct MyStruct {};" +- with self.assertRaisesRegexp( ++ with self.assertRaisesRegex( + parser.ParseError, r"^my_file\.mojom:1: Error: Unexpected '\]':\n" + r"\[MyAttribute=\] struct MyStruct {};$"): + parser.Parse(source2, "my_file.mojom") + + # Missing key. + source3 = "[=MyName] struct MyStruct {};" +- with self.assertRaisesRegexp( ++ with self.assertRaisesRegex( + parser.ParseError, r"^my_file\.mojom:1: Error: Unexpected '=':\n" + r"\[=MyName\] struct MyStruct {};$"): + parser.Parse(source3, "my_file.mojom") +@@ -1004,7 +1004,7 @@ class ParserTest(unittest.TestCase): + expected1 = ast.Mojom(None, + ast.ImportList(ast.Import(None, "somedir/my.mojom")), + []) +- self.assertEquals(parser.Parse(source1, "my_file.mojom"), expected1) ++ self.assertEqual(parser.Parse(source1, "my_file.mojom"), expected1) + + # Two imports (no module statement). + source2 = """\ +@@ -1017,7 +1017,7 @@ class ParserTest(unittest.TestCase): + ast.Import(None, "somedir/my1.mojom"), + ast.Import(None, "somedir/my2.mojom") + ]), []) +- self.assertEquals(parser.Parse(source2, "my_file.mojom"), expected2) ++ self.assertEqual(parser.Parse(source2, "my_file.mojom"), expected2) + + # Imports with module statement. + source3 = """\ +@@ -1031,7 +1031,7 @@ class ParserTest(unittest.TestCase): + ast.Import(None, "somedir/my1.mojom"), + ast.Import(None, "somedir/my2.mojom") + ]), []) +- self.assertEquals(parser.Parse(source3, "my_file.mojom"), expected3) ++ self.assertEqual(parser.Parse(source3, "my_file.mojom"), expected3) + + def testInvalidImports(self): + """Tests that invalid import statements are correctly detected.""" +@@ -1040,7 +1040,7 @@ class ParserTest(unittest.TestCase): + // Make the error occur on line 2. + import invalid + """ +- with self.assertRaisesRegexp( ++ with self.assertRaisesRegex( + parser.ParseError, r"^my_file\.mojom:2: Error: Unexpected 'invalid':\n" + r" *import invalid$"): + parser.Parse(source1, "my_file.mojom") +@@ -1051,7 +1051,7 @@ class ParserTest(unittest.TestCase): + int32 a; + }; + """ +- with self.assertRaisesRegexp( ++ with self.assertRaisesRegex( + parser.ParseError, r"^my_file\.mojom:2: Error: Unexpected 'struct':\n" + r" *struct MyStruct {$"): + parser.Parse(source2, "my_file.mojom") +@@ -1062,7 +1062,7 @@ class ParserTest(unittest.TestCase): + int32 a; + }; + """ +- with self.assertRaisesRegexp( ++ with self.assertRaisesRegex( + parser.ParseError, r"^my_file\.mojom:2: Error: Unexpected 'struct':\n" + r" *struct MyStruct {$"): + parser.Parse(source3, "my_file.mojom") +@@ -1114,7 +1114,7 @@ class ParserTest(unittest.TestCase): + ast.StructField('o', None, None, 'handle?', None) + ])) + ]) +- self.assertEquals(parser.Parse(source, "my_file.mojom"), expected) ++ self.assertEqual(parser.Parse(source, "my_file.mojom"), expected) + + def testInvalidNullableTypes(self): + """Tests that invalid nullable types are correctly detected.""" +@@ -1123,7 +1123,7 @@ class ParserTest(unittest.TestCase): + string?? a; + }; + """ +- with self.assertRaisesRegexp( ++ with self.assertRaisesRegex( + parser.ParseError, r"^my_file\.mojom:2: Error: Unexpected '\?':\n" + r" *string\?\? a;$"): + parser.Parse(source1, "my_file.mojom") +@@ -1133,7 +1133,7 @@ class ParserTest(unittest.TestCase): + handle? a; + }; + """ +- with self.assertRaisesRegexp( ++ with self.assertRaisesRegex( + parser.ParseError, r"^my_file\.mojom:2: Error: Unexpected '<':\n" + r" *handle\? a;$"): + parser.Parse(source2, "my_file.mojom") +@@ -1143,7 +1143,7 @@ class ParserTest(unittest.TestCase): + some_interface?& a; + }; + """ +- with self.assertRaisesRegexp( ++ with self.assertRaisesRegex( + parser.ParseError, r"^my_file\.mojom:2: Error: Unexpected '&':\n" + r" *some_interface\?& a;$"): + parser.Parse(source3, "my_file.mojom") +@@ -1168,7 +1168,7 @@ class ParserTest(unittest.TestCase): + ])) + ]) + actual = parser.Parse(source, "my_file.mojom") +- self.assertEquals(actual, expected) ++ self.assertEqual(actual, expected) + + def testUnionWithOrdinals(self): + """Test that ordinals are assigned to fields.""" +@@ -1190,7 +1190,7 @@ class ParserTest(unittest.TestCase): + ])) + ]) + actual = parser.Parse(source, "my_file.mojom") +- self.assertEquals(actual, expected) ++ self.assertEqual(actual, expected) + + def testUnionWithStructMembers(self): + """Test that struct members are accepted.""" +@@ -1208,7 +1208,7 @@ class ParserTest(unittest.TestCase): + ast.UnionBody([ast.UnionField('s', None, None, 'SomeStruct')])) + ]) + actual = parser.Parse(source, "my_file.mojom") +- self.assertEquals(actual, expected) ++ self.assertEqual(actual, expected) + + def testUnionWithArrayMember(self): + """Test that array members are accepted.""" +@@ -1226,7 +1226,7 @@ class ParserTest(unittest.TestCase): + ast.UnionBody([ast.UnionField('a', None, None, 'int32[]')])) + ]) + actual = parser.Parse(source, "my_file.mojom") +- self.assertEquals(actual, expected) ++ self.assertEqual(actual, expected) + + def testUnionWithMapMember(self): + """Test that map members are accepted.""" +@@ -1245,7 +1245,7 @@ class ParserTest(unittest.TestCase): + [ast.UnionField('m', None, None, 'string{int32}')])) + ]) + actual = parser.Parse(source, "my_file.mojom") +- self.assertEquals(actual, expected) ++ self.assertEqual(actual, expected) + + def testUnionDisallowNestedStruct(self): + """Tests that structs cannot be nested in unions.""" +@@ -1258,7 +1258,7 @@ class ParserTest(unittest.TestCase): + }; + }; + """ +- with self.assertRaisesRegexp( ++ with self.assertRaisesRegex( + parser.ParseError, r"^my_file\.mojom:4: Error: Unexpected 'struct':\n" + r" *struct MyStruct {$"): + parser.Parse(source, "my_file.mojom") +@@ -1274,7 +1274,7 @@ class ParserTest(unittest.TestCase): + }; + }; + """ +- with self.assertRaisesRegexp( ++ with self.assertRaisesRegex( + parser.ParseError, + r"^my_file\.mojom:4: Error: Unexpected 'interface':\n" + r" *interface MyInterface {$"): +@@ -1291,7 +1291,7 @@ class ParserTest(unittest.TestCase): + }; + }; + """ +- with self.assertRaisesRegexp( ++ with self.assertRaisesRegex( + parser.ParseError, r"^my_file\.mojom:4: Error: Unexpected 'union':\n" + r" *union MyOtherUnion {$"): + parser.Parse(source, "my_file.mojom") +@@ -1307,7 +1307,7 @@ class ParserTest(unittest.TestCase): + }; + }; + """ +- with self.assertRaisesRegexp( ++ with self.assertRaisesRegex( + parser.ParseError, r"^my_file\.mojom:4: Error: Unexpected 'enum':\n" + r" *enum MyEnum {$"): + parser.Parse(source, "my_file.mojom") +@@ -1332,7 +1332,7 @@ class ParserTest(unittest.TestCase): + ast.StructField('d', None, None, 'asso?', None) + ])) + ]) +- self.assertEquals(parser.Parse(source1, "my_file.mojom"), expected1) ++ self.assertEqual(parser.Parse(source1, "my_file.mojom"), expected1) + + source2 = """\ + interface MyInterface { +@@ -1349,7 +1349,7 @@ class ParserTest(unittest.TestCase): + ast.ParameterList( + ast.Parameter('b', None, None, 'asso'))))) + ]) +- self.assertEquals(parser.Parse(source2, "my_file.mojom"), expected2) ++ self.assertEqual(parser.Parse(source2, "my_file.mojom"), expected2) + + def testInvalidAssociatedKinds(self): + """Tests that invalid associated interfaces and requests are correctly +@@ -1359,7 +1359,7 @@ class ParserTest(unittest.TestCase): + associated associated SomeInterface a; + }; + """ +- with self.assertRaisesRegexp( ++ with self.assertRaisesRegex( + parser.ParseError, + r"^my_file\.mojom:2: Error: Unexpected 'associated':\n" + r" *associated associated SomeInterface a;$"): +@@ -1370,7 +1370,7 @@ class ParserTest(unittest.TestCase): + associated handle a; + }; + """ +- with self.assertRaisesRegexp( ++ with self.assertRaisesRegex( + parser.ParseError, r"^my_file\.mojom:2: Error: Unexpected 'handle':\n" + r" *associated handle a;$"): + parser.Parse(source2, "my_file.mojom") +@@ -1380,7 +1380,7 @@ class ParserTest(unittest.TestCase): + associated? MyInterface& a; + }; + """ +- with self.assertRaisesRegexp( ++ with self.assertRaisesRegex( + parser.ParseError, r"^my_file\.mojom:2: Error: Unexpected '\?':\n" + r" *associated\? MyInterface& a;$"): + parser.Parse(source3, "my_file.mojom") +diff --git a/src/3rdparty/chromium/mojo/public/tools/mojom/mojom_parser.py b/src/3rdparty/chromium/mojo/public/tools/mojom/mojom_parser.py +index 12adbfb9d..0260cab30 100755 +--- a/src/3rdparty/chromium/mojo/public/tools/mojom/mojom_parser.py ++++ b/src/3rdparty/chromium/mojo/public/tools/mojom/mojom_parser.py +@@ -148,7 +148,7 @@ def _CollectAllowedImportsFromBuildMetadata(build_metadata_filename): + with open(metadata_filename) as f: + metadata = json.load(f) + allowed_imports.update( +- map(os.path.normcase, map(os.path.normpath, metadata['sources']))) ++ list(map(os.path.normcase, list(map(os.path.normpath, metadata['sources']))))) + for dep_metadata in metadata['deps']: + if dep_metadata not in processed_deps: + collect(dep_metadata) +@@ -192,8 +192,8 @@ def _ParseMojoms(mojom_files, + _RebaseAbsolutePath(abs_path, input_root_paths)) + for abs_path in mojom_files) + abs_paths = dict( +- (path, abs_path) for abs_path, path in mojom_files_to_parse.items()) +- for mojom_abspath, _ in mojom_files_to_parse.items(): ++ (path, abs_path) for abs_path, path in list(mojom_files_to_parse.items())) ++ for mojom_abspath, _ in list(mojom_files_to_parse.items()): + with codecs.open(mojom_abspath, encoding='utf-8') as f: + ast = parser.Parse(''.join(f.readlines()), mojom_abspath) + conditional_features.RemoveDisabledDefinitions(ast, enabled_features) +@@ -234,7 +234,7 @@ def _ParseMojoms(mojom_files, + # and we have a complete dependency tree of the unprocessed inputs. Now we can + # load all the inputs, resolving dependencies among them recursively as we go. + num_existing_modules_loaded = len(loaded_modules) +- for mojom_abspath, mojom_path in mojom_files_to_parse.items(): ++ for mojom_abspath, mojom_path in list(mojom_files_to_parse.items()): + _EnsureInputLoaded(mojom_abspath, mojom_path, abs_paths, loaded_mojom_asts, + input_dependencies, loaded_modules) + assert (num_existing_modules_loaded + +@@ -242,7 +242,7 @@ def _ParseMojoms(mojom_files, + + # Now we have fully translated modules for every input and every transitive + # dependency. We can dump the modules to disk for other tools to use. +- for mojom_abspath, mojom_path in mojom_files_to_parse.items(): ++ for mojom_abspath, mojom_path in list(mojom_files_to_parse.items()): + module_path = os.path.join(output_root_path, _GetModuleFilename(mojom_path)) + module_dir = os.path.dirname(module_path) + if not os.path.exists(module_dir): +diff --git a/src/3rdparty/chromium/mojo/public/tools/mojom/mojom_parser_test_case.py b/src/3rdparty/chromium/mojo/public/tools/mojom/mojom_parser_test_case.py +index e213fbfa7..ee7ebac0b 100644 +--- a/src/3rdparty/chromium/mojo/public/tools/mojom/mojom_parser_test_case.py ++++ b/src/3rdparty/chromium/mojo/public/tools/mojom/mojom_parser_test_case.py +@@ -56,7 +56,7 @@ class MojomParserTestCase(unittest.TestCase): + args = [ + '--input-root', self._temp_dir, '--input-root', out_dir, + '--output-root', out_dir, '--mojoms' +- ] + list(map(lambda mojom: os.path.join(self._temp_dir, mojom), mojoms)) ++ ] + list([os.path.join(self._temp_dir, mojom) for mojom in mojoms]) + if metadata: + args.extend(['--check-imports', self.GetPath(metadata)]) + mojom_parser.Run(args) +diff --git a/src/3rdparty/chromium/mojo/public/tools/mojom/mojom_parser_unittest.py b/src/3rdparty/chromium/mojo/public/tools/mojom/mojom_parser_unittest.py +index a93f34bac..14f8a1343 100644 +--- a/src/3rdparty/chromium/mojo/public/tools/mojom/mojom_parser_unittest.py ++++ b/src/3rdparty/chromium/mojo/public/tools/mojom/mojom_parser_unittest.py +@@ -86,7 +86,7 @@ class MojomParserTest(MojomParserTestCase): + module a; + import "non-existent.mojom"; + struct Bar {};""") +- with self.assertRaisesRegexp(ValueError, "does not exist"): ++ with self.assertRaisesRegex(ValueError, "does not exist"): + self.ParseMojoms([a]) + + def testUnparsedImport(self): +@@ -106,7 +106,7 @@ class MojomParserTest(MojomParserTestCase): + + # a.mojom has not been parsed yet, so its import will fail when processing + # b.mojom here. +- with self.assertRaisesRegexp(ValueError, "does not exist"): ++ with self.assertRaisesRegex(ValueError, "does not exist"): + self.ParseMojoms([b]) + + def testCheckImportsBasic(self): +@@ -167,5 +167,5 @@ class MojomParserTest(MojomParserTestCase): + struct Foo { a.Bar bar; };""") + + self.ParseMojoms([a], metadata=a_metadata) +- with self.assertRaisesRegexp(ValueError, "not allowed by build"): ++ with self.assertRaisesRegex(ValueError, "not allowed by build"): + self.ParseMojoms([b], metadata=b_metadata) +diff --git a/src/3rdparty/chromium/mojo/public/tools/mojom/stable_attribute_unittest.py b/src/3rdparty/chromium/mojo/public/tools/mojom/stable_attribute_unittest.py +index d45ec5862..c88ded88f 100644 +--- a/src/3rdparty/chromium/mojo/public/tools/mojom/stable_attribute_unittest.py ++++ b/src/3rdparty/chromium/mojo/public/tools/mojom/stable_attribute_unittest.py +@@ -52,20 +52,20 @@ class StableAttributeTest(MojomParserTestCase): + self.ExtractTypes( + '[Stable] interface F {}; [Stable] struct T { pending_remote f; };') + +- with self.assertRaisesRegexp(Exception, 'because it depends on E'): ++ with self.assertRaisesRegex(Exception, 'because it depends on E'): + self.ExtractTypes('enum E { A }; [Stable] struct S { E e; };') +- with self.assertRaisesRegexp(Exception, 'because it depends on X'): ++ with self.assertRaisesRegex(Exception, 'because it depends on X'): + self.ExtractTypes('struct X {}; [Stable] struct S { X x; };') +- with self.assertRaisesRegexp(Exception, 'because it depends on T'): ++ with self.assertRaisesRegex(Exception, 'because it depends on T'): + self.ExtractTypes('struct T {}; [Stable] struct S { array xs; };') +- with self.assertRaisesRegexp(Exception, 'because it depends on T'): ++ with self.assertRaisesRegex(Exception, 'because it depends on T'): + self.ExtractTypes('struct T {}; [Stable] struct S { map xs; };') +- with self.assertRaisesRegexp(Exception, 'because it depends on T'): ++ with self.assertRaisesRegex(Exception, 'because it depends on T'): + self.ExtractTypes('struct T {}; [Stable] struct S { map xs; };') +- with self.assertRaisesRegexp(Exception, 'because it depends on F'): ++ with self.assertRaisesRegex(Exception, 'because it depends on F'): + self.ExtractTypes( + 'interface F {}; [Stable] struct S { pending_remote f; };') +- with self.assertRaisesRegexp(Exception, 'because it depends on F'): ++ with self.assertRaisesRegex(Exception, 'because it depends on F'): + self.ExtractTypes( + 'interface F {}; [Stable] struct S { pending_receiver f; };') + +@@ -80,20 +80,20 @@ class StableAttributeTest(MojomParserTestCase): + self.ExtractTypes( + '[Stable] interface F {}; [Stable] union U { pending_remote f; };') + +- with self.assertRaisesRegexp(Exception, 'because it depends on E'): ++ with self.assertRaisesRegex(Exception, 'because it depends on E'): + self.ExtractTypes('enum E { A }; [Stable] union U { E e; };') +- with self.assertRaisesRegexp(Exception, 'because it depends on X'): ++ with self.assertRaisesRegex(Exception, 'because it depends on X'): + self.ExtractTypes('struct X {}; [Stable] union U { X x; };') +- with self.assertRaisesRegexp(Exception, 'because it depends on T'): ++ with self.assertRaisesRegex(Exception, 'because it depends on T'): + self.ExtractTypes('struct T {}; [Stable] union U { array xs; };') +- with self.assertRaisesRegexp(Exception, 'because it depends on T'): ++ with self.assertRaisesRegex(Exception, 'because it depends on T'): + self.ExtractTypes('struct T {}; [Stable] union U { map xs; };') +- with self.assertRaisesRegexp(Exception, 'because it depends on T'): ++ with self.assertRaisesRegex(Exception, 'because it depends on T'): + self.ExtractTypes('struct T {}; [Stable] union U { map xs; };') +- with self.assertRaisesRegexp(Exception, 'because it depends on F'): ++ with self.assertRaisesRegex(Exception, 'because it depends on F'): + self.ExtractTypes( + 'interface F {}; [Stable] union U { pending_remote f; };') +- with self.assertRaisesRegexp(Exception, 'because it depends on F'): ++ with self.assertRaisesRegex(Exception, 'because it depends on F'): + self.ExtractTypes( + 'interface F {}; [Stable] union U { pending_receiver f; };') + +@@ -109,19 +109,19 @@ class StableAttributeTest(MojomParserTestCase): + [Stable] interface F { A@0(E e, S s) => (bool b, array s); }; + """) + +- with self.assertRaisesRegexp(Exception, 'because it depends on E'): ++ with self.assertRaisesRegex(Exception, 'because it depends on E'): + self.ExtractTypes( + 'enum E { A, B, C }; [Stable] interface F { A@0(E e); };') +- with self.assertRaisesRegexp(Exception, 'because it depends on E'): ++ with self.assertRaisesRegex(Exception, 'because it depends on E'): + self.ExtractTypes( + 'enum E { A, B, C }; [Stable] interface F { A@0(int32 x) => (E e); };' + ) +- with self.assertRaisesRegexp(Exception, 'because it depends on S'): ++ with self.assertRaisesRegex(Exception, 'because it depends on S'): + self.ExtractTypes( + 'struct S {}; [Stable] interface F { A@0(int32 x) => (S s); };') +- with self.assertRaisesRegexp(Exception, 'because it depends on S'): ++ with self.assertRaisesRegex(Exception, 'because it depends on S'): + self.ExtractTypes( + 'struct S {}; [Stable] interface F { A@0(S s) => (bool b); };') + +- with self.assertRaisesRegexp(Exception, 'explicit method ordinals'): ++ with self.assertRaisesRegex(Exception, 'explicit method ordinals'): + self.ExtractTypes('[Stable] interface F { A() => (); };') +diff --git a/src/3rdparty/chromium/mojo/public/tools/mojom/version_compatibility_unittest.py b/src/3rdparty/chromium/mojo/public/tools/mojom/version_compatibility_unittest.py +index 65db4dc9c..4f8ca519b 100644 +--- a/src/3rdparty/chromium/mojo/public/tools/mojom/version_compatibility_unittest.py ++++ b/src/3rdparty/chromium/mojo/public/tools/mojom/version_compatibility_unittest.py +@@ -23,14 +23,14 @@ class VersionCompatibilityTest(MojomParserTestCase): + + checker = module.BackwardCompatibilityChecker() + compatibility_map = {} +- for name in old.keys(): ++ for name in list(old.keys()): + compatibility_map[name] = checker.IsBackwardCompatible( + new[name], old[name]) + return compatibility_map + + def assertBackwardCompatible(self, old_mojom, new_mojom): + compatibility_map = self._GetTypeCompatibilityMap(old_mojom, new_mojom) +- for name, compatible in compatibility_map.items(): ++ for name, compatible in list(compatibility_map.items()): + if not compatible: + raise AssertionError( + 'Given the old mojom:\n\n %s\n\nand the new mojom:\n\n %s\n\n' +diff --git a/src/3rdparty/chromium/net/android/tools/proxy_test_cases.py b/src/3rdparty/chromium/net/android/tools/proxy_test_cases.py +index ab93f6783..f656162ca 100755 +--- a/src/3rdparty/chromium/net/android/tools/proxy_test_cases.py ++++ b/src/3rdparty/chromium/net/android/tools/proxy_test_cases.py +@@ -285,25 +285,25 @@ class GenerateCPlusPlus: + + def Generate(self): + for test_case in test_cases: +- print ("TEST_F(ProxyConfigServiceAndroidTest, %s) {" % test_case["name"]) ++ print("TEST_F(ProxyConfigServiceAndroidTest, %s) {" % test_case["name"]) + if "description" in test_case: + self._GenerateDescription(test_case["description"]); + self._GenerateConfiguration(test_case["properties"]) + self._GenerateMappings(test_case["mappings"]) +- print "}" +- print "" ++ print("}") ++ print("") + + def _GenerateDescription(self, description): +- print " // %s" % description ++ print(" // %s" % description) + + def _GenerateConfiguration(self, properties): +- for key in sorted(properties.iterkeys()): +- print " AddProperty(\"%s\", \"%s\");" % (key, properties[key]) +- print " ProxySettingsChanged();" ++ for key in sorted(properties.keys()): ++ print(" AddProperty(\"%s\", \"%s\");" % (key, properties[key])) ++ print(" ProxySettingsChanged();") + + def _GenerateMappings(self, mappings): +- for url in sorted(mappings.iterkeys()): +- print " TestMapping(\"%s\", \"%s\");" % (url, mappings[url]) ++ for url in sorted(mappings.keys()): ++ print(" TestMapping(\"%s\", \"%s\");" % (url, mappings[url])) + + + class GenerateJava: +@@ -315,32 +315,32 @@ class GenerateJava: + continue + if "description" in test_case: + self._GenerateDescription(test_case["description"]); +- print " @SmallTest" +- print " @Feature({\"AndroidWebView\"})" +- print " public void test%s() throws Exception {" % test_case["name"] ++ print(" @SmallTest") ++ print(" @Feature({\"AndroidWebView\"})") ++ print(" public void test%s() throws Exception {" % test_case["name"]) + self._GenerateConfiguration(test_case["properties"]) + self._GenerateMappings(test_case["mappings"]) +- print " }" +- print "" ++ print(" }") ++ print("") + + def _GenerateDescription(self, description): +- print " /**" +- print " * %s" % description +- print " *" +- print " * @throws Exception" +- print " */" ++ print(" /**") ++ print(" * %s" % description) ++ print(" *") ++ print(" * @throws Exception") ++ print(" */") + + def _GenerateConfiguration(self, properties): +- for key in sorted(properties.iterkeys()): +- print " System.setProperty(\"%s\", \"%s\");" % ( +- key, properties[key]) ++ for key in sorted(properties.keys()): ++ print(" System.setProperty(\"%s\", \"%s\");" % ( ++ key, properties[key])) + + def _GenerateMappings(self, mappings): +- for url in sorted(mappings.iterkeys()): ++ for url in sorted(mappings.keys()): + mapping = mappings[url] + if 'HTTPS' in mapping: + mapping = mapping.replace('HTTPS', 'PROXY') +- print " checkMapping(\"%s\", \"%s\");" % (url, mapping) ++ print(" checkMapping(\"%s\", \"%s\");" % (url, mapping)) + + + def main(): +diff --git a/src/3rdparty/chromium/net/data/gencerts/__init__.py b/src/3rdparty/chromium/net/data/gencerts/__init__.py +index 70445c817..bce57bbd5 100755 +--- a/src/3rdparty/chromium/net/data/gencerts/__init__.py ++++ b/src/3rdparty/chromium/net/data/gencerts/__init__.py +@@ -17,7 +17,7 @@ import shutil + import subprocess + import sys + +-import openssl_conf ++from . import openssl_conf + + # Enum for the "type" of certificate that is to be created. This is used to + # select sane defaults for the .cnf file and command line flags, but they can +diff --git a/src/3rdparty/chromium/net/data/ssl/root_stores/update_root_stores.py b/src/3rdparty/chromium/net/data/ssl/root_stores/update_root_stores.py +index 8aca424fc..9c1908229 100755 +--- a/src/3rdparty/chromium/net/data/ssl/root_stores/update_root_stores.py ++++ b/src/3rdparty/chromium/net/data/ssl/root_stores/update_root_stores.py +@@ -80,7 +80,7 @@ def ClangFormat(filename): + + def main(): + if len(sys.argv) > 1: +- print >>sys.stderr, 'No arguments expected!' ++ print('No arguments expected!', file=sys.stderr) + sys.stderr.write(__doc__) + return 1 + +diff --git a/src/3rdparty/chromium/net/data/ssl/scripts/crlsetutil.py b/src/3rdparty/chromium/net/data/ssl/scripts/crlsetutil.py +index 815fa9ca4..3f1c47694 100755 +--- a/src/3rdparty/chromium/net/data/ssl/scripts/crlsetutil.py ++++ b/src/3rdparty/chromium/net/data/ssl/scripts/crlsetutil.py +@@ -91,7 +91,7 @@ def _parse_asn1_element(der_bytes): + if length & 0x80: + num_length_bytes = length & 0x7f + length = 0 +- for i in xrange(2, 2 + num_length_bytes): ++ for i in range(2, 2 + num_length_bytes): + length <<= 8 + length += ord(der_bytes[i]) + header_length = 2 + num_length_bytes +@@ -263,14 +263,14 @@ def main(): + pem_cert_file_to_serial(issued_cert_file) + for issued_cert_file in issued_certs + ] +- for pem_file, issued_certs in config.get('BlockedByHash', {}).iteritems() ++ for pem_file, issued_certs in config.get('BlockedByHash', {}).items() + } + limited_subjects = { + pem_cert_file_to_subject_hash(pem_file).encode('base64').strip(): [ + pem_cert_file_to_spki_hash(filename).encode('base64').strip() + for filename in allowed_pems + ] +- for pem_file, allowed_pems in config.get('LimitedSubjects', {}).iteritems() ++ for pem_file, allowed_pems in config.get('LimitedSubjects', {}).items() + } + known_interception_spkis = [ + pem_cert_file_to_spki_hash(pem_file).encode('base64').strip() +@@ -291,7 +291,7 @@ def main(): + header = json.dumps(header_json) + outfile.write(struct.pack('''' + + +-class RequestHandler(BaseHTTPServer.BaseHTTPRequestHandler): ++class RequestHandler(http.server.BaseHTTPRequestHandler): + keep_running = True + local_ip = '' + port = 0 +@@ -76,7 +76,7 @@ class RequestHandler(BaseHTTPServer.BaseHTTPRequestHandler): + self.path[6:])) + return + +- params = urlparse.parse_qs(urlparse.urlparse(self.path).query) ++ params = urllib.parse.parse_qs(urllib.parse.urlparse(self.path).query) + + if not params or not 'code' in params or params['code'][0] == '200': + self.send_response(200) +@@ -92,13 +92,13 @@ class RequestHandler(BaseHTTPServer.BaseHTTPRequestHandler): + + def main(): + if len(sys.argv) != 3: +- print "Usage: %s LOCAL_IP PORT" % sys.argv[0] ++ print("Usage: %s LOCAL_IP PORT" % sys.argv[0]) + sys.exit(1) + RequestHandler.local_ip = sys.argv[1] + port = int(sys.argv[2]) + RequestHandler.port = port +- print "To stop the server, go to http://localhost:%d/quitquitquit" % port +- httpd = BaseHTTPServer.HTTPServer(('', port), RequestHandler) ++ print("To stop the server, go to http://localhost:%d/quitquitquit" % port) ++ httpd = http.server.HTTPServer(('', port), RequestHandler) + while RequestHandler.keep_running: + httpd.handle_request() + +diff --git a/src/3rdparty/chromium/net/tools/testserver/echo_message.py b/src/3rdparty/chromium/net/tools/testserver/echo_message.py +index b2f7b04e8..056ec49b6 100644 +--- a/src/3rdparty/chromium/net/tools/testserver/echo_message.py ++++ b/src/3rdparty/chromium/net/tools/testserver/echo_message.py +@@ -26,7 +26,7 @@ __author__ = 'rtenneti@google.com (Raman Tenneti)' + + + from itertools import cycle +-from itertools import izip ++ + import random + + +@@ -261,7 +261,7 @@ def Crypt(payload, key): + Returns: + An encoded/decoded string. + """ +- return ''.join(chr(ord(x) ^ ord(y)) for (x, y) in izip(payload, cycle(key))) ++ return ''.join(chr(ord(x) ^ ord(y)) for (x, y) in zip(payload, cycle(key))) + + + def Checksum(payload, payload_size): +diff --git a/src/3rdparty/chromium/net/tools/testserver/minica.py b/src/3rdparty/chromium/net/tools/testserver/minica.py +index 95d0287dc..d17743c8d 100644 +--- a/src/3rdparty/chromium/net/tools/testserver/minica.py ++++ b/src/3rdparty/chromium/net/tools/testserver/minica.py +@@ -406,7 +406,7 @@ def MakeOCSPResponse( + single_responses = [ + MakeOCSPSingleResponse(issuer_name_hash, issuer_key_hash, serial, + ocsp_state, ocsp_date) +- for ocsp_state, ocsp_date in itertools.izip(ocsp_states, ocsp_dates) ++ for ocsp_state, ocsp_date in zip(ocsp_states, ocsp_dates) + ] + + basic_resp_data_der = asn1.ToDER(asn1.SEQUENCE([ +@@ -557,13 +557,13 @@ if __name__ == '__main__': + + der_root = MakeCertificate(ROOT_CN, ROOT_CN, 1, ROOT_KEY, ROOT_KEY, + is_ca=True, path_len=1) +- print 'ocsp-test-root.pem:' +- print DERToPEM(der_root) ++ print('ocsp-test-root.pem:') ++ print(DERToPEM(der_root)) + +- print +- print 'kOCSPTestCertFingerprint:' +- print bin_to_array(hashlib.sha1(der_root).digest()) ++ print() ++ print('kOCSPTestCertFingerprint:') ++ print(bin_to_array(hashlib.sha1(der_root).digest())) + +- print +- print 'kOCSPTestCertSPKI:' +- print bin_to_array(crlsetutil.der_cert_to_spki_hash(der_root)) ++ print() ++ print('kOCSPTestCertSPKI:') ++ print(bin_to_array(crlsetutil.der_cert_to_spki_hash(der_root))) +diff --git a/src/3rdparty/chromium/net/tools/testserver/testserver.py b/src/3rdparty/chromium/net/tools/testserver/testserver.py +index b21be689e..f60918898 100755 +--- a/src/3rdparty/chromium/net/tools/testserver/testserver.py ++++ b/src/3rdparty/chromium/net/tools/testserver/testserver.py +@@ -15,7 +15,7 @@ to a pem file containing the certificate and private key that should be used. + """ + + import base64 +-import BaseHTTPServer ++import http.server + import cgi + import hashlib + import logging +@@ -26,14 +26,14 @@ import random + import re + import select + import socket +-import SocketServer ++import socketserver + import ssl + import struct + import sys + import threading + import time +-import urllib +-import urlparse ++import urllib.request, urllib.parse, urllib.error ++import urllib.parse + import zlib + + BASE_DIR = os.path.dirname(os.path.abspath(__file__)) +@@ -123,7 +123,7 @@ class HTTPServer(testserver_base.ClientRestrictingServerMixIn, + + pass + +-class ThreadingHTTPServer(SocketServer.ThreadingMixIn, ++class ThreadingHTTPServer(socketserver.ThreadingMixIn, + HTTPServer): + """This variant of HTTPServer creates a new thread for every connection. It + should only be used with handlers that are known to be threadsafe.""" +@@ -132,7 +132,7 @@ class ThreadingHTTPServer(SocketServer.ThreadingMixIn, + + class OCSPServer(testserver_base.ClientRestrictingServerMixIn, + testserver_base.BrokenPipeHandlerMixIn, +- BaseHTTPServer.HTTPServer): ++ http.server.HTTPServer): + """This is a specialization of HTTPServer that serves an + OCSP response""" + +@@ -248,8 +248,8 @@ class HTTPSServer(tlslite.api.TLSSocketServerMixIn, + except tlslite.api.TLSAbruptCloseError: + # Ignore abrupt close. + return True +- except tlslite.api.TLSError, error: +- print "Handshake failure:", str(error) ++ except tlslite.api.TLSError as error: ++ print("Handshake failure:", str(error)) + return False + + +@@ -261,13 +261,13 @@ class FTPServer(testserver_base.ClientRestrictingServerMixIn, + + + class TCPEchoServer(testserver_base.ClientRestrictingServerMixIn, +- SocketServer.TCPServer): ++ socketserver.TCPServer): + """A TCP echo server that echoes back what it has received.""" + + def server_bind(self): + """Override server_bind to store the server name.""" + +- SocketServer.TCPServer.server_bind(self) ++ socketserver.TCPServer.server_bind(self) + host, port = self.socket.getsockname()[:2] + self.server_name = socket.getfqdn(host) + self.server_port = port +@@ -281,13 +281,13 @@ class TCPEchoServer(testserver_base.ClientRestrictingServerMixIn, + + + class UDPEchoServer(testserver_base.ClientRestrictingServerMixIn, +- SocketServer.UDPServer): ++ socketserver.UDPServer): + """A UDP echo server that echoes back what it has received.""" + + def server_bind(self): + """Override server_bind to store the server name.""" + +- SocketServer.UDPServer.server_bind(self) ++ socketserver.UDPServer.server_bind(self) + host, port = self.socket.getsockname()[:2] + self.server_name = socket.getfqdn(host) + self.server_port = port +@@ -684,7 +684,7 @@ class TestPageHandler(testserver_base.BasePageHandler): + if not self._ShouldHandleRequest("/echo"): + return False + +- _, _, _, _, query, _ = urlparse.urlparse(self.path) ++ _, _, _, _, query, _ = urllib.parse.urlparse(self.path) + query_params = cgi.parse_qs(query, True) + if 'status' in query_params: + self.send_response(int(query_params['status'][0])) +@@ -760,7 +760,7 @@ class TestPageHandler(testserver_base.BasePageHandler): + + # Since the data can be binary, we encode them by base64. + post_multipart_base64_encoded = {} +- for field, values in post_multipart.items(): ++ for field, values in list(post_multipart.items()): + post_multipart_base64_encoded[field] = [base64.b64encode(value) + for value in values] + +@@ -871,7 +871,7 @@ class TestPageHandler(testserver_base.BasePageHandler): + if self.command == 'POST' or self.command == 'PUT' : + self.ReadRequestBody() + +- _, _, url_path, _, query, _ = urlparse.urlparse(self.path) ++ _, _, url_path, _, query, _ = urllib.parse.urlparse(self.path) + + if not query in ('C', 'U', 'S', 'M', 'L'): + return False +@@ -883,7 +883,7 @@ class TestPageHandler(testserver_base.BasePageHandler): + file_path = os.path.join(file_path, 'index.html') + + if not os.path.isfile(file_path): +- print "File not found " + sub_path + " full path:" + file_path ++ print("File not found " + sub_path + " full path:" + file_path) + self.send_error(404) + return True + +@@ -930,7 +930,7 @@ class TestPageHandler(testserver_base.BasePageHandler): + def PostOnlyFileHandler(self): + """This handler sends the contents of the requested file on a POST.""" + +- prefix = urlparse.urljoin(self.server.file_root_url, 'post/') ++ prefix = urllib.parse.urljoin(self.server.file_root_url, 'post/') + if not self.path.startswith(prefix): + return False + return self._FileHandlerHelper(prefix) +@@ -941,7 +941,7 @@ class TestPageHandler(testserver_base.BasePageHandler): + # Consume a request body if present. + request_body = self.ReadRequestBody() + +- _, _, url_path, _, query, _ = urlparse.urlparse(self.path) ++ _, _, url_path, _, query, _ = urllib.parse.urlparse(self.path) + query_dict = cgi.parse_qs(query) + + expected_body = query_dict.get('expected_body', []) +@@ -967,7 +967,7 @@ class TestPageHandler(testserver_base.BasePageHandler): + file_path = os.path.join(file_path, 'index.html') + + if not os.path.isfile(file_path): +- print "File not found " + sub_path + " full path:" + file_path ++ print("File not found " + sub_path + " full path:" + file_path) + self.send_error(404) + return True + +@@ -1066,7 +1066,7 @@ class TestPageHandler(testserver_base.BasePageHandler): + if not self._ShouldHandleRequest("/expect-and-set-cookie"): + return False + +- _, _, _, _, query, _ = urlparse.urlparse(self.path) ++ _, _, _, _, query, _ = urllib.parse.urlparse(self.path) + query_dict = cgi.parse_qs(query) + cookies = set() + if 'Cookie' in self.headers: +@@ -1101,7 +1101,7 @@ class TestPageHandler(testserver_base.BasePageHandler): + self.send_response(200) + self.send_header('Content-Type', 'text/html') + for header_value in headers_values: +- header_value = urllib.unquote(header_value) ++ header_value = urllib.parse.unquote(header_value) + (key, value) = header_value.split(': ', 1) + self.send_header(key, value) + self.end_headers() +@@ -1121,7 +1121,7 @@ class TestPageHandler(testserver_base.BasePageHandler): + realm = 'testrealm' + set_cookie_if_challenged = False + +- _, _, url_path, _, query, _ = urlparse.urlparse(self.path) ++ _, _, url_path, _, query, _ = urllib.parse.urlparse(self.path) + query_params = cgi.parse_qs(query, True) + if 'set-cookie-if-challenged' in query_params: + set_cookie_if_challenged = True +@@ -1139,7 +1139,7 @@ class TestPageHandler(testserver_base.BasePageHandler): + username, password = re.findall(r'([^:]+):(\S+)', userpass)[0] + if password != expected_password: + raise Exception('wrong password') +- except Exception, e: ++ except Exception as e: + # Authentication failed. + self.send_response(401) + self.send_header('WWW-Authenticate', 'Basic realm="%s"' % realm) +@@ -1265,7 +1265,7 @@ class TestPageHandler(testserver_base.BasePageHandler): + + if pairs['response'] != response: + raise Exception('wrong password') +- except Exception, e: ++ except Exception as e: + # Authentication failed. + self.send_response(401) + hdr = ('Digest ' +@@ -1387,7 +1387,7 @@ class TestPageHandler(testserver_base.BasePageHandler): + if query_char < 0 or len(self.path) <= query_char + 1: + self.sendRedirectHelp(test_name) + return True +- dest = urllib.unquote(self.path[query_char + 1:]) ++ dest = urllib.parse.unquote(self.path[query_char + 1:]) + + self.send_response(301) # moved permanently + self.send_header('Location', dest) +@@ -1408,7 +1408,7 @@ class TestPageHandler(testserver_base.BasePageHandler): + if not self._ShouldHandleRequest(test_name): + return False + +- params = urllib.unquote(self.path[(len(test_name) + 1):]) ++ params = urllib.parse.unquote(self.path[(len(test_name) + 1):]) + slash = params.find('/') + if slash < 0: + self.sendRedirectHelp(test_name) +@@ -1440,7 +1440,7 @@ class TestPageHandler(testserver_base.BasePageHandler): + if query_char < 0 or len(self.path) <= query_char + 1: + self.sendRedirectHelp(test_name) + return True +- dest = urllib.unquote(self.path[query_char + 1:]) ++ dest = urllib.parse.unquote(self.path[query_char + 1:]) + + self.send_response(200) + self.send_header('Content-Type', 'text/html') +@@ -1484,7 +1484,7 @@ class TestPageHandler(testserver_base.BasePageHandler): + self.end_headers() + + # Write ~26K of data, in 1350 byte chunks +- for i in xrange(20): ++ for i in range(20): + self.wfile.write('*' * 1350) + self.wfile.flush() + return True +@@ -1639,7 +1639,7 @@ class OCSPHandler(testserver_base.BasePageHandler): + response = self.ocsp_response_intermediate + else: + return False +- print 'handling ocsp request' ++ print('handling ocsp request') + self.send_response(200) + self.send_header('Content-Type', 'application/ocsp-response') + self.send_header('Content-Length', str(len(response))) +@@ -1650,7 +1650,7 @@ class OCSPHandler(testserver_base.BasePageHandler): + def CaIssuersResponse(self): + if not self._ShouldHandleRequest("/ca_issuers"): + return False +- print 'handling ca_issuers request' ++ print('handling ca_issuers request') + self.send_response(200) + self.send_header('Content-Type', 'application/pkix-cert') + self.send_header('Content-Length', str(len(self.ca_issuers_response))) +@@ -1659,7 +1659,7 @@ class OCSPHandler(testserver_base.BasePageHandler): + self.wfile.write(self.ca_issuers_response) + + +-class TCPEchoHandler(SocketServer.BaseRequestHandler): ++class TCPEchoHandler(socketserver.BaseRequestHandler): + """The RequestHandler class for TCP echo server. + + It is instantiated once per connection to the server, and overrides the +@@ -1682,7 +1682,7 @@ class TCPEchoHandler(SocketServer.BaseRequestHandler): + self.request.send(return_data) + + +-class UDPEchoHandler(SocketServer.BaseRequestHandler): ++class UDPEchoHandler(socketserver.BaseRequestHandler): + """The RequestHandler class for UDP echo server. + + It is instantiated once per connection to the server, and overrides the +@@ -1705,7 +1705,7 @@ class UDPEchoHandler(SocketServer.BaseRequestHandler): + request_socket.sendto(return_data, self.client_address) + + +-class ProxyRequestHandler(BaseHTTPServer.BaseHTTPRequestHandler): ++class ProxyRequestHandler(http.server.BaseHTTPRequestHandler): + """A request handler that behaves as a proxy server. Only CONNECT, GET and + HEAD methods are supported. + """ +@@ -1737,7 +1737,7 @@ class ProxyRequestHandler(BaseHTTPServer.BaseHTTPRequestHandler): + other.send(received) + + def _do_common_method(self): +- url = urlparse.urlparse(self.path) ++ url = urllib.parse.urlparse(self.path) + port = url.port + if not port: + if url.scheme == 'http': +@@ -1962,7 +1962,7 @@ class ServerRunner(testserver_base.TestServerRunner): + pem_cert_and_key = file(self.options.cert_and_key_file, 'r').read() + elif self.options.aia_intermediate: + self.__ocsp_server = OCSPServer((host, 0), OCSPHandler) +- print ('AIA server started on %s:%d...' % ++ print('AIA server started on %s:%d...' % + (host, self.__ocsp_server.server_port)) + + ocsp_server_port = self.__ocsp_server.server_port +@@ -1984,7 +1984,7 @@ class ServerRunner(testserver_base.TestServerRunner): + else: + # generate a new certificate and run an OCSP server for it. + self.__ocsp_server = OCSPServer((host, 0), OCSPHandler) +- print ('OCSP server started on %s:%d...' % ++ print('OCSP server started on %s:%d...' % + (host, self.__ocsp_server.server_port)) + + ocsp_states, ocsp_dates, ocsp_produced = self.__parse_ocsp_options( +@@ -2063,12 +2063,12 @@ class ServerRunner(testserver_base.TestServerRunner): + self.options.simulate_tls13_downgrade, + self.options.simulate_tls12_downgrade, + self.options.tls_max_version) +- print 'HTTPS server started on https://%s:%d...' % \ +- (host, server.server_port) ++ print('HTTPS server started on https://%s:%d...' % \ ++ (host, server.server_port)) + else: + server = HTTPServer((host, port), TestPageHandler) +- print 'HTTP server started on http://%s:%d...' % \ +- (host, server.server_port) ++ print('HTTP server started on http://%s:%d...' % \ ++ (host, server.server_port)) + + server.data_dir = self.__make_data_dir() + server.file_root_url = self.options.file_root_url +@@ -2101,11 +2101,11 @@ class ServerRunner(testserver_base.TestServerRunner): + 'specified trusted client CA file not found: ' + + self.options.ssl_client_ca[0] + ' exiting...') + websocket_options.tls_client_ca = self.options.ssl_client_ca[0] +- print 'Trying to start websocket server on %s://%s:%d...' % \ +- (scheme, websocket_options.server_host, websocket_options.port) ++ print('Trying to start websocket server on %s://%s:%d...' % \ ++ (scheme, websocket_options.server_host, websocket_options.port)) + server = WebSocketServer(websocket_options) +- print 'WebSocket server started on %s://%s:%d...' % \ +- (scheme, host, server.server_port) ++ print('WebSocket server started on %s://%s:%d...' % \ ++ (scheme, host, server.server_port)) + server_data['port'] = server.server_port + websocket_options.use_basic_auth = self.options.ws_basic_auth + elif self.options.server_type == SERVER_TCP_ECHO: +@@ -2113,26 +2113,26 @@ class ServerRunner(testserver_base.TestServerRunner): + # message. + random.seed() + server = TCPEchoServer((host, port), TCPEchoHandler) +- print 'Echo TCP server started on port %d...' % server.server_port ++ print('Echo TCP server started on port %d...' % server.server_port) + server_data['port'] = server.server_port + elif self.options.server_type == SERVER_UDP_ECHO: + # Used for generating the key (randomly) that encodes the "echo request" + # message. + random.seed() + server = UDPEchoServer((host, port), UDPEchoHandler) +- print 'Echo UDP server started on port %d...' % server.server_port ++ print('Echo UDP server started on port %d...' % server.server_port) + server_data['port'] = server.server_port + elif self.options.server_type == SERVER_PROXY: + ProxyRequestHandler.redirect_connect_to_localhost = \ + self.options.redirect_connect_to_localhost + server = ThreadingHTTPServer((host, port), ProxyRequestHandler) +- print 'Proxy server started on port %d...' % server.server_port ++ print('Proxy server started on port %d...' % server.server_port) + server_data['port'] = server.server_port + elif self.options.server_type == SERVER_BASIC_AUTH_PROXY: + ProxyRequestHandler.redirect_connect_to_localhost = \ + self.options.redirect_connect_to_localhost + server = ThreadingHTTPServer((host, port), BasicAuthProxyRequestHandler) +- print 'BasicAuthProxy server started on port %d...' % server.server_port ++ print('BasicAuthProxy server started on port %d...' % server.server_port) + server_data['port'] = server.server_port + elif self.options.server_type == SERVER_FTP: + my_data_dir = self.__make_data_dir() +@@ -2158,7 +2158,7 @@ class ServerRunner(testserver_base.TestServerRunner): + # Instantiate FTP server class and listen to address:port + server = pyftpdlib.ftpserver.FTPServer((host, port), ftp_handler) + server_data['port'] = server.socket.getsockname()[1] +- print 'FTP server started on port %d...' % server_data['port'] ++ print('FTP server started on port %d...' % server_data['port']) + else: + raise testserver_base.OptionError('unknown server type' + + self.options.server_type) +diff --git a/src/3rdparty/chromium/net/tools/update_ios_bundle_data.py b/src/3rdparty/chromium/net/tools/update_ios_bundle_data.py +index 32cf0fd27..d3d6955c3 100755 +--- a/src/3rdparty/chromium/net/tools/update_ios_bundle_data.py ++++ b/src/3rdparty/chromium/net/tools/update_ios_bundle_data.py +@@ -109,7 +109,7 @@ def write_string_to_file(data, path): + + + def fatal(message): +- print "FATAL: " + message ++ print("FATAL: " + message) + sys.exit(1) + + +@@ -147,7 +147,7 @@ def main(): + net_unittest_bundle_data_globs) + + write_string_to_file(data, path) +- print "Wrote %s" % path ++ print("Wrote %s" % path) + + + if __name__ == '__main__': +diff --git a/src/3rdparty/chromium/ppapi/c/documentation/doxy_cleanup.py b/src/3rdparty/chromium/ppapi/c/documentation/doxy_cleanup.py +index be94342d4..baa6a20da 100755 +--- a/src/3rdparty/chromium/ppapi/c/documentation/doxy_cleanup.py ++++ b/src/3rdparty/chromium/ppapi/c/documentation/doxy_cleanup.py +@@ -8,7 +8,7 @@ + that they are suitable for publication on a Google documentation site. + ''' + +-from __future__ import print_function ++ + + import optparse + import os +@@ -60,7 +60,7 @@ class HTMLFixer(object): + for tag in self.soup.findAll('tr'): + if tag.td and tag.td.h2 and tag.td.h2.a and tag.td.h2.a['name']: + #tag['id'] = tag.td.h2.a['name'] +- tag.string = tag.td.h2.a.next ++ tag.string = tag.td.h2.a.__next__ + tag.name = 'h2' + table_headers.append(tag) + +diff --git a/src/3rdparty/chromium/ppapi/cpp/documentation/doxy_cleanup.py b/src/3rdparty/chromium/ppapi/cpp/documentation/doxy_cleanup.py +index fbbc2f645..6cd7ed39e 100755 +--- a/src/3rdparty/chromium/ppapi/cpp/documentation/doxy_cleanup.py ++++ b/src/3rdparty/chromium/ppapi/cpp/documentation/doxy_cleanup.py +@@ -7,7 +7,7 @@ + that they are suitable for publication on a Google documentation site. + ''' + +-from __future__ import print_function ++ + + import optparse + import os +@@ -59,7 +59,7 @@ class HTMLFixer(object): + for tag in self.soup.findAll('tr'): + if tag.td and tag.td.h2 and tag.td.h2.a and tag.td.h2.a['name']: + #tag['id'] = tag.td.h2.a['name'] +- tag.string = tag.td.h2.a.next ++ tag.string = tag.td.h2.a.__next__ + tag.name = 'h2' + table_headers.append(tag) + +diff --git a/src/3rdparty/chromium/ppapi/generate_ppapi_include_tests.py b/src/3rdparty/chromium/ppapi/generate_ppapi_include_tests.py +index f690f3eac..b1995c5b1 100755 +--- a/src/3rdparty/chromium/ppapi/generate_ppapi_include_tests.py ++++ b/src/3rdparty/chromium/ppapi/generate_ppapi_include_tests.py +@@ -17,7 +17,7 @@ These tests are checked in to SVN. + # tests to some 'generated' area, and remove them from version + # control. + +-from __future__ import print_function ++ + + import re + import os +diff --git a/src/3rdparty/chromium/ppapi/generate_ppapi_size_checks.py b/src/3rdparty/chromium/ppapi/generate_ppapi_size_checks.py +index a2b8e7db7..df5d02fe7 100755 +--- a/src/3rdparty/chromium/ppapi/generate_ppapi_size_checks.py ++++ b/src/3rdparty/chromium/ppapi/generate_ppapi_size_checks.py +@@ -7,7 +7,7 @@ + have appropriate size checking. + """ + +-from __future__ import print_function ++ + + import optparse + import os +@@ -130,7 +130,7 @@ class FilePatch(object): + # contain any number of lines (0 or more) delimited by carriage returns. + for linenum_to_delete in self.linenums_to_delete: + file_lines[linenum_to_delete] = ""; +- for linenum, sourcelines in self.lines_to_add.items(): ++ for linenum, sourcelines in list(self.lines_to_add.items()): + # Sort the lines we're adding so we get relatively consistent results. + sourcelines.sort() + # Prepend the new lines. When we output +@@ -331,7 +331,7 @@ def main(argv): + # their structure. If we find types which could easily be consistent but + # aren't, spit out an error and exit. + types_independent = {} +- for typename, typeinfo32 in types32.items(): ++ for typename, typeinfo32 in list(types32.items()): + if (typename in types64): + typeinfo64 = types64[typename] + if (typeinfo64.size == typeinfo32.size): +@@ -377,7 +377,7 @@ def main(argv): + # to be arch-independent has changed to now be arch-dependent (e.g., because + # a pointer was added), and we want to delete the old check in that case. + for name, typeinfo in \ +- types_independent.items() + types32.items() + types64.items(): ++ list(types_independent.items()) + list(types32.items()) + list(types64.items()): + if IsMacroDefinedName(name): + sourcefile = typeinfo.source_location.filename + if sourcefile not in file_patches: +@@ -387,7 +387,7 @@ def main(argv): + + # Add a compile-time assertion for each type whose size is independent of + # architecture. These assertions go immediately after the class definition. +- for name, typeinfo in types_independent.items(): ++ for name, typeinfo in list(types_independent.items()): + # Ignore dummy types that were defined by macros and also ignore types that + # are 0 bytes (i.e., typedefs to void). + if not IsMacroDefinedName(name) and typeinfo.size > 0: +@@ -405,17 +405,17 @@ def main(argv): + + # Apply our patches. This actually edits the files containing the definitions + # for the types in types_independent. +- for filename, patch in file_patches.items(): ++ for filename, patch in list(file_patches.items()): + patch.Apply() + + # Write out a file of checks for 32-bit architectures and a separate file for + # 64-bit architectures. These only have checks for types that are + # architecture-dependent. + c_source_root = os.path.join(options.ppapi_root, "tests") +- WriteArchSpecificCode(types32.values(), ++ WriteArchSpecificCode(list(types32.values()), + c_source_root, + "arch_dependent_sizes_32.h") +- WriteArchSpecificCode(types64.values(), ++ WriteArchSpecificCode(list(types64.values()), + c_source_root, + "arch_dependent_sizes_64.h") + +diff --git a/src/3rdparty/chromium/ppapi/generators/generator.py b/src/3rdparty/chromium/ppapi/generators/generator.py +index 702bc1b7e..352fe319a 100755 +--- a/src/3rdparty/chromium/ppapi/generators/generator.py ++++ b/src/3rdparty/chromium/ppapi/generators/generator.py +@@ -3,7 +3,7 @@ + # Use of this source code is governed by a BSD-style license that can be + # found in the LICENSE file. + +-from __future__ import print_function ++ + + import os + import sys +diff --git a/src/3rdparty/chromium/ppapi/generators/idl_ast.py b/src/3rdparty/chromium/ppapi/generators/idl_ast.py +index 19be3d48a..f3724150c 100644 +--- a/src/3rdparty/chromium/ppapi/generators/idl_ast.py ++++ b/src/3rdparty/chromium/ppapi/generators/idl_ast.py +@@ -4,7 +4,7 @@ + + """Nodes for PPAPI IDL AST.""" + +-from __future__ import print_function ++ + + from idl_namespace import IDLNamespace + from idl_node import IDLNode +diff --git a/src/3rdparty/chromium/ppapi/generators/idl_c_header.py b/src/3rdparty/chromium/ppapi/generators/idl_c_header.py +index d597f7121..f40eb629c 100755 +--- a/src/3rdparty/chromium/ppapi/generators/idl_c_header.py ++++ b/src/3rdparty/chromium/ppapi/generators/idl_c_header.py +@@ -5,7 +5,7 @@ + + """ Generator for C style prototypes and definitions """ + +-from __future__ import print_function ++ + + import glob + import os +diff --git a/src/3rdparty/chromium/ppapi/generators/idl_c_proto.py b/src/3rdparty/chromium/ppapi/generators/idl_c_proto.py +index 8404b79e7..31051d039 100755 +--- a/src/3rdparty/chromium/ppapi/generators/idl_c_proto.py ++++ b/src/3rdparty/chromium/ppapi/generators/idl_c_proto.py +@@ -5,7 +5,7 @@ + + """ Generator for C style prototypes and definitions """ + +-from __future__ import print_function ++ + + import glob + import os +@@ -665,7 +665,7 @@ class CGen(object): + def Copyright(self, node, cpp_style=False): + lines = node.GetName().split('\n') + if cpp_style: +- return '//' + '\n//'.join(filter(lambda f: f != '', lines)) + '\n' ++ return '//' + '\n//'.join([f for f in lines if f != '']) + '\n' + return CommentLines(lines) + + +diff --git a/src/3rdparty/chromium/ppapi/generators/idl_diff.py b/src/3rdparty/chromium/ppapi/generators/idl_diff.py +index 7fd83cb8d..b7870cccf 100755 +--- a/src/3rdparty/chromium/ppapi/generators/idl_diff.py ++++ b/src/3rdparty/chromium/ppapi/generators/idl_diff.py +@@ -3,7 +3,7 @@ + # Use of this source code is governed by a BSD-style license that can be + # found in the LICENSE file. + +-from __future__ import print_function ++ + + import glob + import os +@@ -49,7 +49,7 @@ class Change(object): + print('src: >>%s<<' % line) + for line in self.now: + print('gen: >>%s<<' % line) +- print ++ print() + + # + # IsCopyright +diff --git a/src/3rdparty/chromium/ppapi/generators/idl_generator.py b/src/3rdparty/chromium/ppapi/generators/idl_generator.py +index 047710049..a254b1d7f 100755 +--- a/src/3rdparty/chromium/ppapi/generators/idl_generator.py ++++ b/src/3rdparty/chromium/ppapi/generators/idl_generator.py +@@ -3,7 +3,7 @@ + # Use of this source code is governed by a BSD-style license that can be + # found in the LICENSE file. + +-from __future__ import print_function ++ + + import sys + +diff --git a/src/3rdparty/chromium/ppapi/generators/idl_lexer.py b/src/3rdparty/chromium/ppapi/generators/idl_lexer.py +index 1c9492eb4..3a26771ff 100755 +--- a/src/3rdparty/chromium/ppapi/generators/idl_lexer.py ++++ b/src/3rdparty/chromium/ppapi/generators/idl_lexer.py +@@ -16,7 +16,7 @@ + # PLY can be found at: + # http://www.dabeaz.com/ply/ + +-from __future__ import print_function ++ + + import os.path + import re +diff --git a/src/3rdparty/chromium/ppapi/generators/idl_namespace.py b/src/3rdparty/chromium/ppapi/generators/idl_namespace.py +index 5630ae282..05435ffeb 100755 +--- a/src/3rdparty/chromium/ppapi/generators/idl_namespace.py ++++ b/src/3rdparty/chromium/ppapi/generators/idl_namespace.py +@@ -10,7 +10,7 @@ This file defines the behavior of the AST namespace which allows for resolving + a symbol as one or more AST nodes given a release or range of releases. + """ + +-from __future__ import print_function ++ + + import sys + +diff --git a/src/3rdparty/chromium/ppapi/generators/idl_propertynode.py b/src/3rdparty/chromium/ppapi/generators/idl_propertynode.py +index 9152ed719..f2224eda0 100755 +--- a/src/3rdparty/chromium/ppapi/generators/idl_propertynode.py ++++ b/src/3rdparty/chromium/ppapi/generators/idl_propertynode.py +@@ -46,7 +46,7 @@ class IDLPropertyNode(object): + return self.property_map.get(name, None) + + def GetPropertyList(self): +- return self.property_map.keys() ++ return list(self.property_map.keys()) + + # + # Testing functions +diff --git a/src/3rdparty/chromium/ppapi/generators/idl_release.py b/src/3rdparty/chromium/ppapi/generators/idl_release.py +index ce95ec87a..02150a1e6 100755 +--- a/src/3rdparty/chromium/ppapi/generators/idl_release.py ++++ b/src/3rdparty/chromium/ppapi/generators/idl_release.py +@@ -10,7 +10,7 @@ This file defines the behavior of the AST namespace which allows for resolving + a symbol as one or more AST nodes given a Release or range of Releases. + """ + +-from __future__ import print_function ++ + + import sys + +diff --git a/src/3rdparty/chromium/ppapi/generators/idl_tests.py b/src/3rdparty/chromium/ppapi/generators/idl_tests.py +index 7b732bcc3..9ea951b44 100755 +--- a/src/3rdparty/chromium/ppapi/generators/idl_tests.py ++++ b/src/3rdparty/chromium/ppapi/generators/idl_tests.py +@@ -5,7 +5,7 @@ + + """ Test runner for IDL Generator changes """ + +-from __future__ import print_function ++ + + import subprocess + import sys +diff --git a/src/3rdparty/chromium/ppapi/generators/idl_thunk.py b/src/3rdparty/chromium/ppapi/generators/idl_thunk.py +index ff7bdfd16..72a9b7619 100755 +--- a/src/3rdparty/chromium/ppapi/generators/idl_thunk.py ++++ b/src/3rdparty/chromium/ppapi/generators/idl_thunk.py +@@ -5,7 +5,7 @@ + + """ Generator for C++ style thunks """ + +-from __future__ import print_function ++ + + import glob + import os +@@ -405,8 +405,7 @@ def _IsNewestMember(member, members, releases): + """ + build_list = member.GetUniqueReleases(releases) + release = build_list[0] # Pick the oldest release. +- same_name_siblings = filter( +- lambda n: str(n) == str(member) and n != member, members) ++ same_name_siblings = [n for n in members if str(n) == str(member) and n != member] + + for s in same_name_siblings: + sibling_build_list = s.GetUniqueReleases(releases) +@@ -504,7 +503,7 @@ class TGen(GeneratorByFile): + for child in members: + build_list = child.GetUniqueReleases(releases) + # We have to filter out releases this node isn't in. +- build_list = filter(lambda r: child.InReleases([r]), build_list) ++ build_list = [r for r in build_list if child.InReleases([r])] + if len(build_list) == 0: + continue + release = build_list[-1] +diff --git a/src/3rdparty/chromium/ppapi/native_client/tools/browser_tester/browser_tester.py b/src/3rdparty/chromium/ppapi/native_client/tools/browser_tester/browser_tester.py +index c9b7ce660..872305f90 100755 +--- a/src/3rdparty/chromium/ppapi/native_client/tools/browser_tester/browser_tester.py ++++ b/src/3rdparty/chromium/ppapi/native_client/tools/browser_tester/browser_tester.py +@@ -3,16 +3,16 @@ + # Use of this source code is governed by a BSD-style license that can be + # found in the LICENSE file. + +-from __future__ import print_function ++ + + import glob + import optparse + import os.path + import socket + import sys +-import thread ++import _thread + import time +-import urllib ++import urllib.request, urllib.parse, urllib.error + + # Allow the import of third party modules + script_dir = os.path.dirname(os.path.abspath(__file__)) +@@ -228,7 +228,7 @@ def RunTestsOnce(url, options): + file_mapping = dict(options.map_files) + for filename in options.files: + file_mapping[os.path.basename(filename)] = filename +- for _, real_path in file_mapping.items(): ++ for _, real_path in list(file_mapping.items()): + if not os.path.exists(real_path): + raise AssertionError('\'%s\' does not exist.' % real_path) + mime_types = {} +@@ -254,7 +254,7 @@ def RunTestsOnce(url, options): + + full_url = 'http://%s:%d/%s' % (host, port, url) + if len(options.test_args) > 0: +- full_url += '?' + urllib.urlencode(options.test_args) ++ full_url += '?' + urllib.parse.urlencode(options.test_args) + browser.Run(full_url, host, port) + server.TestingBegun(0.125) + +@@ -263,7 +263,7 @@ def RunTestsOnce(url, options): + def Serve(): + while server.test_in_progress or options.interactive: + server.handle_request() +- thread.start_new_thread(Serve, ()) ++ _thread.start_new_thread(Serve, ()) + + tool_failed = False + time_started = time.time() +diff --git a/src/3rdparty/chromium/ppapi/native_client/tools/browser_tester/browsertester/browserlauncher.py b/src/3rdparty/chromium/ppapi/native_client/tools/browser_tester/browsertester/browserlauncher.py +index dd94434a1..7296922fc 100755 +--- a/src/3rdparty/chromium/ppapi/native_client/tools/browser_tester/browsertester/browserlauncher.py ++++ b/src/3rdparty/chromium/ppapi/native_client/tools/browser_tester/browsertester/browserlauncher.py +@@ -3,7 +3,7 @@ + # Use of this source code is governed by a BSD-style license that can be + # found in the LICENSE file. + +-from __future__ import print_function ++ + + import os.path + import re +@@ -11,9 +11,9 @@ import shutil + import sys + import tempfile + import time +-import urlparse ++import urllib.parse + +-import browserprocess ++from . import browserprocess + + class LaunchFailure(Exception): + pass +@@ -195,7 +195,7 @@ class BrowserLauncher(object): + self.options.nacl_exe_stdout, True) + self.SetStandardStream(env, 'NACL_EXE_STDERR', + self.options.nacl_exe_stderr, True) +- print('ENV:', ' '.join(['='.join(pair) for pair in env.items()])) ++ print('ENV:', ' '.join(['='.join(pair) for pair in list(env.items())])) + print('LAUNCHING: %s' % ' '.join(cmd)) + sys.stdout.flush() + self.browser_process = RunCommand(cmd, env=env) +diff --git a/src/3rdparty/chromium/ppapi/native_client/tools/browser_tester/browsertester/browserprocess.py b/src/3rdparty/chromium/ppapi/native_client/tools/browser_tester/browsertester/browserprocess.py +index d012eb213..b75308625 100755 +--- a/src/3rdparty/chromium/ppapi/native_client/tools/browser_tester/browsertester/browserprocess.py ++++ b/src/3rdparty/chromium/ppapi/native_client/tools/browser_tester/browsertester/browserprocess.py +@@ -3,7 +3,7 @@ + # Use of this source code is governed by a BSD-style license that can be + # found in the LICENSE file. + +-from __future__ import print_function ++ + + import os + import signal +diff --git a/src/3rdparty/chromium/ppapi/native_client/tools/browser_tester/browsertester/server.py b/src/3rdparty/chromium/ppapi/native_client/tools/browser_tester/browsertester/server.py +index 454f6ee28..a64c559ea 100644 +--- a/src/3rdparty/chromium/ppapi/native_client/tools/browser_tester/browsertester/server.py ++++ b/src/3rdparty/chromium/ppapi/native_client/tools/browser_tester/browsertester/server.py +@@ -2,25 +2,25 @@ + # Use of this source code is governed by a BSD-style license that can be + # found in the LICENSE file. + +-import BaseHTTPServer ++import http.server + import cgi + import mimetypes + import os + import os.path + import posixpath +-import SimpleHTTPServer +-import SocketServer ++import http.server ++import socketserver + import threading + import time +-import urllib +-import urlparse ++import urllib.request, urllib.parse, urllib.error ++import urllib.parse + +-class RequestHandler(SimpleHTTPServer.SimpleHTTPRequestHandler): ++class RequestHandler(http.server.SimpleHTTPRequestHandler): + + def NormalizePath(self, path): + path = path.split('?', 1)[0] + path = path.split('#', 1)[0] +- path = posixpath.normpath(urllib.unquote(path)) ++ path = posixpath.normpath(urllib.parse.unquote(path)) + words = path.split('/') + + bad = set((os.curdir, os.pardir, '')) +@@ -84,7 +84,7 @@ class RequestHandler(SimpleHTTPServer.SimpleHTTPRequestHandler): + + def HandleRPC(self, name, query): + kargs = {} +- for k, v in query.items(): ++ for k, v in list(query.items()): + assert len(v) == 1, k + kargs[k] = v[0] + +@@ -110,13 +110,13 @@ class RequestHandler(SimpleHTTPServer.SimpleHTTPRequestHandler): + new_value_in_secs = old_value_in_secs - 360 + value = time.strftime(last_mod_format, + time.localtime(new_value_in_secs)) +- SimpleHTTPServer.SimpleHTTPRequestHandler.send_header(self, ++ http.server.SimpleHTTPRequestHandler.send_header(self, + keyword, + value) + + def do_POST(self): + # Backwards compatible - treat result as tuple without named fields. +- _, _, path, _, query, _ = urlparse.urlparse(self.path) ++ _, _, path, _, query, _ = urllib.parse.urlparse(self.path) + + self.server.listener.Log('POST %s (%s)' % (self.path, path)) + if path == '/echo': +@@ -166,7 +166,7 @@ class RequestHandler(SimpleHTTPServer.SimpleHTTPRequestHandler): + + def do_GET(self): + # Backwards compatible - treat result as tuple without named fields. +- _, _, path, _, query, _ = urlparse.urlparse(self.path) ++ _, _, path, _, query, _ = urllib.parse.urlparse(self.path) + + tester = '/TESTER/' + if path.startswith(tester): +@@ -211,7 +211,7 @@ class RequestHandler(SimpleHTTPServer.SimpleHTTPRequestHandler): + def copyfile(self, source, outputfile): + # Bandwidth values <= 0.0 are considered infinite + if self.server.bandwidth <= 0.0: +- return SimpleHTTPServer.SimpleHTTPRequestHandler.copyfile( ++ return http.server.SimpleHTTPRequestHandler.copyfile( + self, source, outputfile) + + self.server.listener.Log('Simulating %f mbps server BW' % +@@ -246,7 +246,7 @@ class RequestHandler(SimpleHTTPServer.SimpleHTTPRequestHandler): + # helps reduce the chance this will happen. + # There were apparently some problems using this Mixin with Python 2.5, but we + # are no longer using anything older than 2.6. +-class Server(SocketServer.ThreadingMixIn, BaseHTTPServer.HTTPServer): ++class Server(socketserver.ThreadingMixIn, http.server.HTTPServer): + + def Configure( + self, file_mapping, redirect_mapping, extensions_mapping, allow_404, +diff --git a/src/3rdparty/chromium/printing/cups_config_helper.py b/src/3rdparty/chromium/printing/cups_config_helper.py +index 373c244a1..50bcb4597 100755 +--- a/src/3rdparty/chromium/printing/cups_config_helper.py ++++ b/src/3rdparty/chromium/printing/cups_config_helper.py +@@ -17,7 +17,7 @@ requirements) when this is fixed: + is fixed. + """ + +-from __future__ import print_function ++ + + import os + import subprocess +diff --git a/src/3rdparty/chromium/sandbox/policy/mac/package_sb_file.py b/src/3rdparty/chromium/sandbox/policy/mac/package_sb_file.py +index 934fd2261..928432df6 100755 +--- a/src/3rdparty/chromium/sandbox/policy/mac/package_sb_file.py ++++ b/src/3rdparty/chromium/sandbox/policy/mac/package_sb_file.py +@@ -28,7 +28,7 @@ def escape_for_c(line): + + def pack_file(argv): + if len(argv) != 2: +- print >> sys.stderr, 'usage: package_sb_file.py input_filename output_dir' ++ print('usage: package_sb_file.py input_filename output_dir', file=sys.stderr) + return 1 + input_filename = argv[0] + output_directory = argv[1] +@@ -56,7 +56,7 @@ def pack_file(argv): + outfile.write(cc_definition_end) + outfile.write(namespace_end) + except IOError: +- print >> sys.stderr, 'Failed to process %s' % input_filename ++ print('Failed to process %s' % input_filename, file=sys.stderr) + return 1 + return 0 + +diff --git a/src/3rdparty/chromium/services/device/public/cpp/usb/tools/usb_ids.py b/src/3rdparty/chromium/services/device/public/cpp/usb/tools/usb_ids.py +index 0576a001a..3c1659c0a 100644 +--- a/src/3rdparty/chromium/services/device/public/cpp/usb/tools/usb_ids.py ++++ b/src/3rdparty/chromium/services/device/public/cpp/usb/tools/usb_ids.py +@@ -64,7 +64,7 @@ def GenerateDeviceDefinitions(table): + return output + + def GenerateVendorDefinitions(table): +- output = "const size_t UsbIds::vendor_size_ = %d;\n" % len(table.keys()) ++ output = "const size_t UsbIds::vendor_size_ = %d;\n" % len(list(table.keys())) + output += "const UsbVendor UsbIds::vendors_[] = {\n" + + for vendor_id in sorted(table.keys()): +diff --git a/src/3rdparty/chromium/testing/chromoting/browser_tests_launcher.py b/src/3rdparty/chromium/testing/chromoting/browser_tests_launcher.py +index 566b3344c..c5c6464a3 100644 +--- a/src/3rdparty/chromium/testing/chromoting/browser_tests_launcher.py ++++ b/src/3rdparty/chromium/testing/chromoting/browser_tests_launcher.py +@@ -67,10 +67,10 @@ def LaunchBTCommand(args, command): + # record instances where a test passed despite a JID mismatch. + if jids_used and host_jid.rstrip() not in jids_used: + host_jid_mismatch = True +- print 'Host JID mismatch. JID in host log = %s.' % host_jid.rstrip() +- print 'Host JIDs used by test:' ++ print('Host JID mismatch. JID in host log = %s.' % host_jid.rstrip()) ++ print('Host JIDs used by test:') + for jid in jids_used: +- print jid ++ print(jid) + + if host_jid_mismatch: + # The JID for the remote-host did not match the JID that was used for this +@@ -83,7 +83,7 @@ def LaunchBTCommand(args, command): + time.sleep(30) + continue + elif jids_used: +- print 'JID used by test matched me2me host JID: %s' % host_jid ++ print('JID used by test matched me2me host JID: %s' % host_jid) + else: + # There wasn't a mismatch and no JIDs were returned. If no JIDs were + # returned, that means the test didn't use any JIDs, so there is nothing +@@ -102,9 +102,9 @@ def LaunchBTCommand(args, command): + # and, because sometimes that line gets logged even if the test + # eventually passes, we'll also look for "(TIMED OUT)", before retrying. + if BROWSER_NOT_STARTED_ERROR in results and TIME_OUT_INDICATOR in results: +- print 'Browser-instance not started (http://crbug/480025). Retrying.' ++ print('Browser-instance not started (http://crbug/480025). Retrying.') + else: +- print 'Test failed for unknown reason. Retrying.' ++ print('Test failed for unknown reason. Retrying.') + + retries += 1 + +@@ -156,9 +156,9 @@ if __name__ == '__main__': + try: + host_logs = main(command_line_args) + if TEST_FAILURE: +- print '++++++++++AT LEAST 1 TEST FAILED++++++++++' +- print FAILING_TESTS.rstrip('\n') +- print '++++++++++++++++++++++++++++++++++++++++++' ++ print('++++++++++AT LEAST 1 TEST FAILED++++++++++') ++ print(FAILING_TESTS.rstrip('\n')) ++ print('++++++++++++++++++++++++++++++++++++++++++') + raise Exception('At least one test failed.') + finally: + # Stop host and cleanup user-profile-dir. +diff --git a/src/3rdparty/chromium/testing/chromoting/chromoting_test_driver_launcher.py b/src/3rdparty/chromium/testing/chromoting/chromoting_test_driver_launcher.py +index 73373441f..578a93a64 100644 +--- a/src/3rdparty/chromium/testing/chromoting/chromoting_test_driver_launcher.py ++++ b/src/3rdparty/chromium/testing/chromoting/chromoting_test_driver_launcher.py +@@ -38,7 +38,7 @@ def LaunchCTDCommand(args, command): + + if not host_jid: + # Host-JID not found in log. Let's not attempt to run this test. +- print 'Host-JID not found in log %s.' % host_log_file_names[-1] ++ print('Host-JID not found in log %s.' % host_log_file_names[-1]) + return '[Command failed]: %s, %s' % (command, host_log_file_names) + + retries = 0 +@@ -123,9 +123,9 @@ if __name__ == '__main__': + try: + failing_tests, host_logs = main(command_line_args) + if failing_tests: +- print '++++++++++FAILED TESTS++++++++++' +- print failing_tests.rstrip('\n') +- print '++++++++++++++++++++++++++++++++' ++ print('++++++++++FAILED TESTS++++++++++') ++ print(failing_tests.rstrip('\n')) ++ print('++++++++++++++++++++++++++++++++') + raise Exception('At least one test failed.') + finally: + # Stop host and cleanup user-profile-dir. +diff --git a/src/3rdparty/chromium/testing/chromoting/chromoting_test_utilities.py b/src/3rdparty/chromium/testing/chromoting/chromoting_test_utilities.py +index 0b2505611..7531f0a3c 100644 +--- a/src/3rdparty/chromium/testing/chromoting/chromoting_test_utilities.py ++++ b/src/3rdparty/chromium/testing/chromoting/chromoting_test_utilities.py +@@ -47,13 +47,13 @@ def RunCommandInSubProcess(command): + + cmd_line = [command] + try: +- print 'Going to run:\n%s' % command ++ print('Going to run:\n%s' % command) + results = subprocess.check_output(cmd_line, stderr=subprocess.STDOUT, + shell=True) +- except subprocess.CalledProcessError, e: ++ except subprocess.CalledProcessError as e: + results = e.output + finally: +- print results ++ print(results) + return results + + +@@ -133,7 +133,7 @@ def RestartMe2MeHost(): + # Stop chromoting host. + RunCommandInSubProcess(CHROMOTING_HOST_PATH + ' --stop') + # Start chromoting host. +- print 'Starting chromoting host from %s' % CHROMOTING_HOST_PATH ++ print('Starting chromoting host from %s' % CHROMOTING_HOST_PATH) + results = RunCommandInSubProcess(CHROMOTING_HOST_PATH + ' --start') + + os.chdir(previous_directory) +@@ -148,7 +148,7 @@ def RestartMe2MeHost(): + if HOST_READY_INDICATOR not in results: + # Host start failed. Print out host-log. Don't run any tests. + with open(log_file, 'r') as f: +- print f.read() ++ print(f.read()) + raise HostOperationFailedException('Host restart failed.') + + return log_file +@@ -194,9 +194,9 @@ def PrintRunningProcesses(): + processes = psutil.get_process_list() + processes = sorted(processes, key=lambda process: process.name) + +- print 'List of running processes:\n' ++ print('List of running processes:\n') + for process in processes: +- print process.name ++ print(process.name) + + + def PrintHostLogContents(host_log_files=None): +@@ -206,7 +206,7 @@ def PrintHostLogContents(host_log_files=None): + with open(log_file, 'r') as log: + host_log_contents += '\nHOST LOG %s\n CONTENTS:\n%s' % ( + log_file, log.read()) +- print host_log_contents ++ print(host_log_contents) + + + def TestCaseSetup(args): +diff --git a/src/3rdparty/chromium/testing/chromoting/download_test_files.py b/src/3rdparty/chromium/testing/chromoting/download_test_files.py +index 57fb9a19e..7802feee5 100644 +--- a/src/3rdparty/chromium/testing/chromoting/download_test_files.py ++++ b/src/3rdparty/chromium/testing/chromoting/download_test_files.py +@@ -49,8 +49,8 @@ def main(): + cp_cmd = ['gsutil.py', 'cp', line, output_file] + try: + subprocess.check_call(cp_cmd) +- except subprocess.CalledProcessError, e: +- print e.output ++ except subprocess.CalledProcessError as e: ++ print(e.output) + sys.exit(1) + + if __name__ == '__main__': +diff --git a/src/3rdparty/chromium/testing/clusterfuzz/common/fuzzy_types.py b/src/3rdparty/chromium/testing/clusterfuzz/common/fuzzy_types.py +index 08bfea673..f81837c7c 100644 +--- a/src/3rdparty/chromium/testing/clusterfuzz/common/fuzzy_types.py ++++ b/src/3rdparty/chromium/testing/clusterfuzz/common/fuzzy_types.py +@@ -19,8 +19,8 @@ def FuzzyInt(n): + """Returns an integer derived from the input by one of several mutations.""" + int_sizes = [8, 16, 32, 64, 128] + mutations = [ +- lambda n: utils.UniformExpoInteger(0, sys.maxint.bit_length() + 1), +- lambda n: -utils.UniformExpoInteger(0, sys.maxint.bit_length()), ++ lambda n: utils.UniformExpoInteger(0, sys.maxsize.bit_length() + 1), ++ lambda n: -utils.UniformExpoInteger(0, sys.maxsize.bit_length()), + lambda n: 2 ** random.choice(int_sizes) - 1, + lambda n: 2 ** random.choice(int_sizes), + lambda n: 0, +@@ -55,9 +55,9 @@ def FuzzyString(s): + # If we're still here, apply a more generic mutation + mutations = [ + lambda s: "".join(random.choice(string.printable) for i in +- xrange(utils.UniformExpoInteger(0, 14))), +- lambda s: "".join(unichr(random.randint(0, sys.maxunicode)) for i in +- xrange(utils.UniformExpoInteger(0, 14))).encode("utf-8"), ++ range(utils.UniformExpoInteger(0, 14))), ++ lambda s: "".join(chr(random.randint(0, sys.maxunicode)) for i in ++ range(utils.UniformExpoInteger(0, 14))).encode("utf-8"), + lambda s: os.urandom(utils.UniformExpoInteger(0, 14)), + lambda s: s * utils.UniformExpoInteger(1, 5), + lambda s: s + "A" * utils.UniformExpoInteger(0, 14), +@@ -124,7 +124,7 @@ class FuzzySequence(object): + if amount is None: + amount = utils.RandomLowInteger(min(1, len(self)), len(self) - location) + if hasattr(value, "__call__"): +- new_elements = (value() for i in xrange(amount)) ++ new_elements = (value() for i in range(amount)) + else: + new_elements = itertools.repeat(value, amount) + self[location:location+amount] = new_elements +@@ -140,7 +140,7 @@ class FuzzySequence(object): + if amount is None: + amount = utils.UniformExpoInteger(0, max_exponent) + if hasattr(value, "__call__"): +- new_elements = (value() for i in xrange(amount)) ++ new_elements = (value() for i in range(amount)) + else: + new_elements = itertools.repeat(value, amount) + self[location:location] = new_elements +@@ -171,7 +171,7 @@ class FuzzyList(list, FuzzySequence): + ] + if count is None: + count = utils.RandomLowInteger(1, 5, beta=3.0) +- for _ in xrange(count): ++ for _ in range(count): + random.choice(mutations)() + + +@@ -185,7 +185,7 @@ class FuzzyBuffer(bytearray, FuzzySequence): + """Flip num_bits bits in the buffer at random.""" + if num_bits is None: + num_bits = utils.RandomLowInteger(min(1, len(self)), len(self) * 8) +- for bit in random.sample(xrange(len(self) * 8), num_bits): ++ for bit in random.sample(range(len(self) * 8), num_bits): + self[bit / 8] ^= 1 << (bit % 8) + + def RandomMutation(self, count=None): +@@ -203,5 +203,5 @@ class FuzzyBuffer(bytearray, FuzzySequence): + ] + if count is None: + count = utils.RandomLowInteger(1, 5, beta=3.0) +- for _ in xrange(count): ++ for _ in range(count): + utils.WeightedChoice(mutations)() +diff --git a/src/3rdparty/chromium/testing/clusterfuzz/common/utils.py b/src/3rdparty/chromium/testing/clusterfuzz/common/utils.py +index e499ad2c5..6e2c7a999 100644 +--- a/src/3rdparty/chromium/testing/clusterfuzz/common/utils.py ++++ b/src/3rdparty/chromium/testing/clusterfuzz/common/utils.py +@@ -6,6 +6,7 @@ import copy + import functools + import math + import random ++from functools import reduce + + + def RandomLowInteger(low, high, beta=31.0): +diff --git a/src/3rdparty/chromium/testing/libfuzzer/archive_corpus.py b/src/3rdparty/chromium/testing/libfuzzer/archive_corpus.py +index dddc18ffa..67a6d91c3 100755 +--- a/src/3rdparty/chromium/testing/libfuzzer/archive_corpus.py ++++ b/src/3rdparty/chromium/testing/libfuzzer/archive_corpus.py +@@ -1,4 +1,4 @@ +-#!/usr/bin/python2 ++#!/usr/bin/python3 + # + # Copyright 2016 The Chromium Authors. All rights reserved. + # Use of this source code is governed by a BSD-style license that can be +@@ -9,7 +9,7 @@ + Invoked by GN from fuzzer_test.gni. + """ + +-from __future__ import print_function ++ + import argparse + import os + import sys +diff --git a/src/3rdparty/chromium/testing/libfuzzer/dictionary_generator.py b/src/3rdparty/chromium/testing/libfuzzer/dictionary_generator.py +index b720e8e75..7d17e3e70 100755 +--- a/src/3rdparty/chromium/testing/libfuzzer/dictionary_generator.py ++++ b/src/3rdparty/chromium/testing/libfuzzer/dictionary_generator.py +@@ -1,4 +1,4 @@ +-#!/usr/bin/python2 ++#!/usr/bin/python3 + # + # Copyright 2016 The Chromium Authors. All rights reserved. + # Use of this source code is governed by a BSD-style license that can be +@@ -11,7 +11,7 @@ Works better for text formats or protocols. For binary ones may be useless. + """ + + import argparse +-import HTMLParser ++import html.parser + import io + import logging + import os +@@ -29,7 +29,7 @@ MIN_STRING_LENGTH = 4 + + def DecodeHTML(html_data): + """HTML-decoding of the data.""" +- html_parser = HTMLParser.HTMLParser() ++ html_parser = html.parser.HTMLParser() + data = html_parser.unescape(html_data.decode('ascii', 'ignore')) + return data.encode('ascii', 'ignore') + +@@ -54,7 +54,7 @@ def ExtractWordsFromBinary(filepath, min_length=MIN_STRING_LENGTH): + for encoding in ENCODING_TYPES: + data = rodata.decode(encoding, 'ignore').encode('ascii', 'ignore') + raw_strings = strings_re.findall(data) +- for splitted_line in map(lambda line: line.split(), raw_strings): ++ for splitted_line in [line.split() for line in raw_strings]: + words += splitted_line + + return set(words) +@@ -85,7 +85,7 @@ def FindIndentedText(text): + previous_number_of_spaces = 0 + + # Go through every line and concatenate space-indented blocks into lines. +- for i in xrange(0, len(lines), 1): ++ for i in range(0, len(lines), 1): + if not lines[i]: + # Ignore empty lines. + continue +diff --git a/src/3rdparty/chromium/testing/libfuzzer/gen_fuzzer_config.py b/src/3rdparty/chromium/testing/libfuzzer/gen_fuzzer_config.py +index a77007b12..7037e9797 100755 +--- a/src/3rdparty/chromium/testing/libfuzzer/gen_fuzzer_config.py ++++ b/src/3rdparty/chromium/testing/libfuzzer/gen_fuzzer_config.py +@@ -1,4 +1,4 @@ +-#!/usr/bin/python2 ++#!/usr/bin/python3 + # + # Copyright (c) 2015 The Chromium Authors. All rights reserved. + # Use of this source code is governed by a BSD-style license that can be +@@ -8,7 +8,7 @@ + Invoked by GN from fuzzer_test.gni. + """ + +-import ConfigParser ++import configparser + import argparse + import os + import sys +@@ -54,7 +54,7 @@ def main(): + args.grammar_options): + return + +- config = ConfigParser.ConfigParser() ++ config = configparser.ConfigParser() + libfuzzer_options = [] + if args.dict: + libfuzzer_options.append(('dict', os.path.basename(args.dict))) +diff --git a/src/3rdparty/chromium/testing/libfuzzer/gen_fuzzer_owners.py b/src/3rdparty/chromium/testing/libfuzzer/gen_fuzzer_owners.py +index f3f6fcad0..1436e6173 100755 +--- a/src/3rdparty/chromium/testing/libfuzzer/gen_fuzzer_owners.py ++++ b/src/3rdparty/chromium/testing/libfuzzer/gen_fuzzer_owners.py +@@ -1,4 +1,4 @@ +-#!/usr/bin/env python2.7 ++#!/usr/bin/env python3 + # + # Copyright 2018 The Chromium Authors. All rights reserved. + # Use of this source code is governed by a BSD-style license that can be +diff --git a/src/3rdparty/chromium/testing/libfuzzer/zip_sources.py b/src/3rdparty/chromium/testing/libfuzzer/zip_sources.py +index 946a7de9f..2066ede6b 100755 +--- a/src/3rdparty/chromium/testing/libfuzzer/zip_sources.py ++++ b/src/3rdparty/chromium/testing/libfuzzer/zip_sources.py +@@ -1,4 +1,4 @@ +-#!/usr/bin/python2 ++#!/usr/bin/python3 + # + # Copyright 2016 The Chromium Authors. All rights reserved. + # Use of this source code is governed by a BSD-style license that can be +@@ -9,7 +9,7 @@ + Invoked by libfuzzer buildbots. Executes dwarfdump to parse debug info. + """ + +-from __future__ import print_function ++ + + import argparse + import os +diff --git a/src/3rdparty/chromium/testing/merge_scripts/common_merge_script_tests.py b/src/3rdparty/chromium/testing/merge_scripts/common_merge_script_tests.py +index fd56905fb..c6067fbf5 100644 +--- a/src/3rdparty/chromium/testing/merge_scripts/common_merge_script_tests.py ++++ b/src/3rdparty/chromium/testing/merge_scripts/common_merge_script_tests.py +@@ -28,9 +28,9 @@ class CommandLineTest(unittest.TestCase): + summary_json = os.path.join(task_output_dir, 'summary.json') + with open(summary_json, 'w') as summary_file: + summary_contents = { +- u'shards': [ ++ 'shards': [ + { +- u'state': u'COMPLETED', ++ 'state': 'COMPLETED', + }, + ], + } +@@ -48,4 +48,4 @@ class CommandLineTest(unittest.TestCase): + '--output-json', output_json, + shard0_json, + ] +- self.assertEquals(0, self._module.main(raw_args)) ++ self.assertEqual(0, self._module.main(raw_args)) +diff --git a/src/3rdparty/chromium/testing/merge_scripts/noop_merge.py b/src/3rdparty/chromium/testing/merge_scripts/noop_merge.py +index f7a93cd31..4c607288a 100755 +--- a/src/3rdparty/chromium/testing/merge_scripts/noop_merge.py ++++ b/src/3rdparty/chromium/testing/merge_scripts/noop_merge.py +@@ -21,8 +21,8 @@ def noop_merge(output_json, jsons_to_merge): + jsons_to_merge: A list of paths to JSON files. + """ + if len(jsons_to_merge) > 1: +- print >> sys.stderr, ( +- 'Multiple JSONs provided: %s' % ','.join(jsons_to_merge)) ++ print(( ++ 'Multiple JSONs provided: %s' % ','.join(jsons_to_merge)), file=sys.stderr) + return 1 + if jsons_to_merge: + shutil.copyfile(jsons_to_merge[0], output_json) +diff --git a/src/3rdparty/chromium/testing/merge_scripts/results_merger.py b/src/3rdparty/chromium/testing/merge_scripts/results_merger.py +index 3b23d828d..287459c79 100755 +--- a/src/3rdparty/chromium/testing/merge_scripts/results_merger.py ++++ b/src/3rdparty/chromium/testing/merge_scripts/results_merger.py +@@ -179,7 +179,7 @@ def _merge_json_test_result_format(shard_results_list): + if result_json: + raise MergeException( # pragma: no cover (covered by + # results_merger_unittest). +- 'Unmergable values %s' % result_json.keys()) ++ 'Unmergable values %s' % list(result_json.keys())) + + return merged_results + +@@ -202,7 +202,7 @@ def merge_tries(source, dest): + pending_nodes = [('', dest, source)] + while pending_nodes: + prefix, dest_node, curr_node = pending_nodes.pop() +- for k, v in curr_node.iteritems(): ++ for k, v in curr_node.items(): + if k in dest_node: + if not isinstance(v, dict): + raise MergeException( +@@ -234,7 +234,7 @@ def sum_dicts(source, dest): + + This is intended for use as a merge_func parameter to merge_value. + """ +- for k, v in source.iteritems(): ++ for k, v in source.items(): + dest.setdefault(k, 0) + dest[k] += v + +@@ -274,7 +274,7 @@ def main(files): + for f in files[1:]: + sys.stderr.write('Merging %s\n' % f) + result = merge_test_results([result, json.load(open(f))]) +- print json.dumps(result) ++ print(json.dumps(result)) + return 0 + + +diff --git a/src/3rdparty/chromium/testing/merge_scripts/results_merger_test.py b/src/3rdparty/chromium/testing/merge_scripts/results_merger_test.py +index e01f7898c..0c1770d33 100755 +--- a/src/3rdparty/chromium/testing/merge_scripts/results_merger_test.py ++++ b/src/3rdparty/chromium/testing/merge_scripts/results_merger_test.py +@@ -162,13 +162,13 @@ class MergingTest(unittest.TestCase): # pragma: no cover + maxDiff = None # Show full diff if assertion fail + + def test_merge_tries(self): +- self.assertEquals( ++ self.assertEqual( + {'a': 'A', 'b': {'c': 'C'}}, + results_merger.merge_tries( + {'a': 'A', 'b': {}}, {'b': {'c': 'C'}})) + + def test_merge_tries_unmergable(self): +- with self.assertRaisesRegexp(results_merger.MergeException, "a:b"): ++ with self.assertRaisesRegex(results_merger.MergeException, "a:b"): + results_merger.merge_tries( + {'a': {'b': 'A'}}, {'a': {'b': 'C'}}) + +@@ -178,7 +178,7 @@ class MergingTest(unittest.TestCase): # pragma: no cover + merged_results = results_merger.merge_test_results( + [extend(GOOD_JSON_TEST_RESULT_0, metadata1), + extend(GOOD_JSON_TEST_RESULT_1, metadata2)]) +- self.assertEquals( ++ self.assertEqual( + merged_results['metadata']['tags'], ['foo', 'bat']) + + def test_merge_json_test_results_nop(self): +@@ -190,8 +190,8 @@ class MergingTest(unittest.TestCase): # pragma: no cover + for j in good_json_results: + # Clone so we can check the input dictionaries are not modified + a = copy.deepcopy(j) +- self.assertEquals(results_merger.merge_test_results([a]), j) +- self.assertEquals(a, j) ++ self.assertEqual(results_merger.merge_test_results([a]), j) ++ self.assertEqual(a, j) + + def test_merge_json_test_results_invalid_version(self): + with self.assertRaises(results_merger.MergeException): +@@ -242,7 +242,7 @@ class MergingTest(unittest.TestCase): # pragma: no cover + ]) + + def test_merge_json_test_results_multiple(self): +- self.assertEquals( ++ self.assertEqual( + results_merger.merge_test_results([ + GOOD_JSON_TEST_RESULT_0, + GOOD_JSON_TEST_RESULT_1, +@@ -251,7 +251,7 @@ class MergingTest(unittest.TestCase): # pragma: no cover + GOOD_JSON_TEST_RESULT_MERGED) + + def test_merge_json_test_results_optional_matches(self): +- self.assertEquals( ++ self.assertEqual( + results_merger.merge_test_results([ + extend(GOOD_JSON_TEST_RESULT_0, {'path_delimiter': '.'}), + extend(GOOD_JSON_TEST_RESULT_1, {'path_delimiter': '.'}), +@@ -268,7 +268,7 @@ class MergingTest(unittest.TestCase): # pragma: no cover + ]) + + def test_merge_json_test_results_optional_count(self): +- self.assertEquals( ++ self.assertEqual( + results_merger.merge_test_results([ + extend(GOOD_JSON_TEST_RESULT_0, {'fixable': 1}), + extend(GOOD_JSON_TEST_RESULT_1, {'fixable': 2}), +@@ -277,7 +277,7 @@ class MergingTest(unittest.TestCase): # pragma: no cover + extend(GOOD_JSON_TEST_RESULT_MERGED, {'fixable': 6})) + + def test_merge_nothing(self): +- self.assertEquals( ++ self.assertEqual( + results_merger.merge_test_results([]), + {}) + +diff --git a/src/3rdparty/chromium/testing/merge_scripts/standard_gtest_merge.py b/src/3rdparty/chromium/testing/merge_scripts/standard_gtest_merge.py +index 99adf7bc7..ab3875da3 100755 +--- a/src/3rdparty/chromium/testing/merge_scripts/standard_gtest_merge.py ++++ b/src/3rdparty/chromium/testing/merge_scripts/standard_gtest_merge.py +@@ -27,13 +27,13 @@ Please examine logs to figure out what happened. + + + def emit_warning(title, log=None): +- print '@@@STEP_WARNINGS@@@' +- print title ++ print('@@@STEP_WARNINGS@@@') ++ print(title) + if log: + title = title.rstrip() + for line in log.splitlines(): +- print '@@@STEP_LOG_LINE@%s@%s@@@' % (title, line.rstrip()) +- print '@@@STEP_LOG_END@%s@@@' % title ++ print('@@@STEP_LOG_LINE@%s@%s@@@' % (title, line.rstrip())) ++ print('@@@STEP_LOG_END@%s@@@' % title) + + + def merge_shard_results(summary_json, jsons_to_merge): +@@ -72,16 +72,16 @@ def merge_shard_results(summary_json, jsons_to_merge): + # client/swarming.py, which means the state enum is saved in its string + # name form, not in the number form. + state = result.get('state') +- if state == u'BOT_DIED': ++ if state == 'BOT_DIED': + emit_warning('Shard #%d had a Swarming internal failure' % index) +- elif state == u'EXPIRED': ++ elif state == 'EXPIRED': + emit_warning('There wasn\'t enough capacity to run your test') +- elif state == u'TIMED_OUT': ++ elif state == 'TIMED_OUT': + emit_warning( + 'Test runtime exceeded allocated time', + 'Either it ran for too long (hard timeout) or it didn\'t produce ' + 'I/O for an extended period of time (I/O timeout)') +- elif state != u'COMPLETED': ++ elif state != 'COMPLETED': + emit_warning('Invalid Swarming task state: %s' % state) + + json_data, err_msg = load_shard_json(index, result.get('task_id'), +@@ -145,10 +145,10 @@ def load_shard_json(index, task_id, jsons_to_merge): + os.path.basename(os.path.dirname(j)) == task_id))] + + if not matching_json_files: +- print >> sys.stderr, 'shard %s test output missing' % index ++ print('shard %s test output missing' % index, file=sys.stderr) + return (None, 'shard %s test output was missing' % index) + elif len(matching_json_files) > 1: +- print >> sys.stderr, 'duplicate test output for shard %s' % index ++ print('duplicate test output for shard %s' % index, file=sys.stderr) + return (None, 'shard %s test output was duplicated' % index) + + path = matching_json_files[0] +@@ -156,15 +156,15 @@ def load_shard_json(index, task_id, jsons_to_merge): + try: + filesize = os.stat(path).st_size + if filesize > OUTPUT_JSON_SIZE_LIMIT: +- print >> sys.stderr, 'output.json is %d bytes. Max size is %d' % ( +- filesize, OUTPUT_JSON_SIZE_LIMIT) ++ print('output.json is %d bytes. Max size is %d' % ( ++ filesize, OUTPUT_JSON_SIZE_LIMIT), file=sys.stderr) + return (None, 'shard %s test output exceeded the size limit' % index) + + with open(path) as f: + return (json.load(f), None) + except (IOError, ValueError, OSError) as e: +- print >> sys.stderr, 'Missing or invalid gtest JSON file: %s' % path +- print >> sys.stderr, '%s: %s' % (type(e).__name__, e) ++ print('Missing or invalid gtest JSON file: %s' % path, file=sys.stderr) ++ print('%s: %s' % (type(e).__name__, e), file=sys.stderr) + + return (None, 'shard %s test output was missing or invalid' % index) + +@@ -172,7 +172,7 @@ def load_shard_json(index, task_id, jsons_to_merge): + def merge_list_of_dicts(left, right): + """Merges dicts left[0] with right[0], left[1] with right[1], etc.""" + output = [] +- for i in xrange(max(len(left), len(right))): ++ for i in range(max(len(left), len(right))): + left_dict = left[i] if i < len(left) else {} + right_dict = right[i] if i < len(right) else {} + merged_dict = left_dict.copy() +diff --git a/src/3rdparty/chromium/testing/merge_scripts/standard_gtest_merge_test.py b/src/3rdparty/chromium/testing/merge_scripts/standard_gtest_merge_test.py +index 586534a8d..79e123beb 100755 +--- a/src/3rdparty/chromium/testing/merge_scripts/standard_gtest_merge_test.py ++++ b/src/3rdparty/chromium/testing/merge_scripts/standard_gtest_merge_test.py +@@ -3,7 +3,7 @@ + # Use of this source code is governed by a BSD-style license that can be + # found in the LICENSE file. + +-import cStringIO ++import io + import json + import logging + import os +@@ -200,11 +200,11 @@ GOOD_GTEST_JSON_MERGED = { + }], + }], + 'swarming_summary': { +- u'shards': [ ++ 'shards': [ + { +- u'state': u'COMPLETED', +- u'outputs_ref': { +- u'view_url': u'blah', ++ 'state': 'COMPLETED', ++ 'outputs_ref': { ++ 'view_url': 'blah', + }, + } + ], +@@ -326,12 +326,12 @@ TIMED_OUT_GTEST_JSON_MERGED = { + }], + }], + 'swarming_summary': { +- u'shards': [ ++ 'shards': [ + { +- u'state': u'COMPLETED', ++ 'state': 'COMPLETED', + }, + { +- u'state': u'TIMED_OUT', ++ 'state': 'TIMED_OUT', + }, + ], + }, +@@ -381,7 +381,7 @@ class LoadShardJsonTest(_StandardGtestMergeTest): + + def test_double_digit_jsons(self): + jsons_to_merge = [] +- for i in xrange(15): ++ for i in range(15): + json_dir = os.path.join(self.temp_dir, str(i)) + json_path = os.path.join(json_dir, 'output.json') + if not os.path.exists(json_dir): +@@ -402,7 +402,7 @@ class LoadShardJsonTest(_StandardGtestMergeTest): + + def test_double_task_id_jsons(self): + jsons_to_merge = [] +- for i in xrange(15): ++ for i in range(15): + json_dir = os.path.join(self.temp_dir, 'deadbeef%d' % i) + json_path = os.path.join(json_dir, 'output.json') + if not os.path.exists(json_dir): +@@ -434,12 +434,12 @@ class MergeShardResultsTest(_StandardGtestMergeTest): + + def stage(self, summary, files): + self.summary = self._write_temp_file('summary.json', summary) +- for path, content in files.iteritems(): ++ for path, content in files.items(): + abs_path = self._write_temp_file(path, content) + self.test_files.append(abs_path) + + def call(self): +- stdout = cStringIO.StringIO() ++ stdout = io.StringIO() + with mock.patch('sys.stdout', stdout): + merged = standard_gtest_merge.merge_shard_results( + self.summary, self.test_files) +@@ -448,27 +448,27 @@ class MergeShardResultsTest(_StandardGtestMergeTest): + def assertUnicodeEquals(self, expectation, result): + def convert_to_unicode(key_or_value): + if isinstance(key_or_value, str): +- return unicode(key_or_value) ++ return str(key_or_value) + if isinstance(key_or_value, dict): + return {convert_to_unicode(k): convert_to_unicode(v) +- for k, v in key_or_value.items()} ++ for k, v in list(key_or_value.items())} + if isinstance(key_or_value, list): + return [convert_to_unicode(x) for x in key_or_value] + return key_or_value + + unicode_expectations = convert_to_unicode(expectation) + unicode_result = convert_to_unicode(result) +- self.assertEquals(unicode_expectations, unicode_result) ++ self.assertEqual(unicode_expectations, unicode_result) + + def test_ok(self): + # Two shards, both successfully finished. + self.stage({ +- u'shards': [ ++ 'shards': [ + { +- u'state': u'COMPLETED', ++ 'state': 'COMPLETED', + }, + { +- u'state': u'COMPLETED', ++ 'state': 'COMPLETED', + }, + ], + }, +@@ -480,9 +480,9 @@ class MergeShardResultsTest(_StandardGtestMergeTest): + merged['swarming_summary'] = { + 'shards': [ + { +- u'state': u'COMPLETED', +- u'outputs_ref': { +- u'view_url': u'blah', ++ 'state': 'COMPLETED', ++ 'outputs_ref': { ++ 'view_url': 'blah', + }, + } + ], +@@ -523,15 +523,15 @@ class MergeShardResultsTest(_StandardGtestMergeTest): + def test_unfinished_shards(self): + # Only one shard (#1) finished. Shard #0 did not. + self.stage({ +- u'shards': [ ++ 'shards': [ + None, + { +- u'state': u'COMPLETED', ++ 'state': 'COMPLETED', + }, + ], + }, + { +- u'1/output.json': GOOD_GTEST_JSON_1, ++ '1/output.json': GOOD_GTEST_JSON_1, + }) + merged, stdout = self.call() + merged.pop('swarming_summary') +@@ -545,17 +545,17 @@ class MergeShardResultsTest(_StandardGtestMergeTest): + def test_missing_output_json(self): + # Shard #0 output json is missing. + self.stage({ +- u'shards': [ ++ 'shards': [ + { +- u'state': u'COMPLETED', ++ 'state': 'COMPLETED', + }, + { +- u'state': u'COMPLETED', ++ 'state': 'COMPLETED', + }, + ], + }, + { +- u'1/output.json': GOOD_GTEST_JSON_1, ++ '1/output.json': GOOD_GTEST_JSON_1, + }) + merged, stdout = self.call() + merged.pop('swarming_summary') +@@ -567,12 +567,12 @@ class MergeShardResultsTest(_StandardGtestMergeTest): + def test_large_output_json(self): + # a shard is too large. + self.stage({ +- u'shards': [ ++ 'shards': [ + { +- u'state': u'COMPLETED', ++ 'state': 'COMPLETED', + }, + { +- u'state': u'COMPLETED', ++ 'state': 'COMPLETED', + }, + ], + }, +diff --git a/src/3rdparty/chromium/testing/merge_scripts/standard_isolated_script_merge.py b/src/3rdparty/chromium/testing/merge_scripts/standard_isolated_script_merge.py +index cd4829a29..fc24aba72 100755 +--- a/src/3rdparty/chromium/testing/merge_scripts/standard_isolated_script_merge.py ++++ b/src/3rdparty/chromium/testing/merge_scripts/standard_isolated_script_merge.py +@@ -26,9 +26,9 @@ def StandardIsolatedScriptMerge(output_json, summary_json, jsons_to_merge): + with open(summary_json) as f: + summary = json.load(f) + except (IOError, ValueError): +- print >> sys.stderr, ( ++ print(( + 'summary.json is missing or can not be read', +- 'Something is seriously wrong with swarming client or the bot.') ++ 'Something is seriously wrong with swarming client or the bot.'), file=sys.stderr) + return 1 + + missing_shards = [] +@@ -81,10 +81,10 @@ def find_shard_output_path(index, task_id, jsons_to_merge): + os.path.basename(os.path.dirname(j)) == task_id))] + + if not matching_json_files: +- print >> sys.stderr, 'shard %s test output missing' % index ++ print('shard %s test output missing' % index, file=sys.stderr) + return None + elif len(matching_json_files) > 1: +- print >> sys.stderr, 'duplicate test output for shard %s' % index ++ print('duplicate test output for shard %s' % index, file=sys.stderr) + return None + + return matching_json_files[0] +diff --git a/src/3rdparty/chromium/testing/merge_scripts/standard_isolated_script_merge_test.py b/src/3rdparty/chromium/testing/merge_scripts/standard_isolated_script_merge_test.py +index 7cd143600..8ef09e36b 100755 +--- a/src/3rdparty/chromium/testing/merge_scripts/standard_isolated_script_merge_test.py ++++ b/src/3rdparty/chromium/testing/merge_scripts/standard_isolated_script_merge_test.py +@@ -21,12 +21,12 @@ import standard_isolated_script_merge + + + TWO_COMPLETED_SHARDS = { +- u'shards': [ ++ 'shards': [ + { +- u'state': u'COMPLETED', ++ 'state': 'COMPLETED', + }, + { +- u'state': u'COMPLETED', ++ 'state': 'COMPLETED', + }, + ], + } +@@ -55,7 +55,7 @@ class StandardIsolatedScriptMergeTest(unittest.TestCase): + + def _stage(self, summary, files): + self.summary = self._write_temp_file('summary.json', summary) +- for path, content in files.iteritems(): ++ for path, content in files.items(): + abs_path = self._write_temp_file(path, content) + self.test_files.append(abs_path) + +@@ -80,8 +80,8 @@ class OutputTest(StandardIsolatedScriptMergeTest): + + with open(output_json_file, 'r') as f: + results = json.load(f) +- self.assertEquals(results['successes'], ['fizz', 'baz', 'buzz', 'bar']) +- self.assertEquals(results['failures'], ['failing_test_one']) ++ self.assertEqual(results['successes'], ['fizz', 'baz', 'buzz', 'bar']) ++ self.assertEqual(results['failures'], ['failing_test_one']) + self.assertTrue(results['valid']) + + def test_missing_shard(self): +@@ -98,11 +98,11 @@ class OutputTest(StandardIsolatedScriptMergeTest): + + with open(output_json_file, 'r') as f: + results = json.load(f) +- self.assertEquals(results['successes'], ['fizz', 'baz']) +- self.assertEquals(results['failures'], []) ++ self.assertEqual(results['successes'], ['fizz', 'baz']) ++ self.assertEqual(results['failures'], []) + self.assertTrue(results['valid']) +- self.assertEquals(results['global_tags'], ['UNRELIABLE_RESULTS']) +- self.assertEquals(results['missing_shards'], [1]) ++ self.assertEqual(results['global_tags'], ['UNRELIABLE_RESULTS']) ++ self.assertEqual(results['missing_shards'], [1]) + + class InputParsingTest(StandardIsolatedScriptMergeTest): + def setUp(self): +@@ -141,8 +141,8 @@ class InputParsingTest(StandardIsolatedScriptMergeTest): + exit_code = standard_isolated_script_merge.StandardIsolatedScriptMerge( + output_json_file, self.summary, self.test_files) + +- self.assertEquals(0, exit_code) +- self.assertEquals( ++ self.assertEqual(0, exit_code) ++ self.assertEqual( + [ + [ + { +@@ -161,7 +161,7 @@ class InputParsingTest(StandardIsolatedScriptMergeTest): + + def test_no_jsons(self): + self._stage({ +- u'shards': [], ++ 'shards': [], + }, {}) + + json_files = [] +@@ -169,8 +169,8 @@ class InputParsingTest(StandardIsolatedScriptMergeTest): + exit_code = standard_isolated_script_merge.StandardIsolatedScriptMerge( + output_json_file, self.summary, json_files) + +- self.assertEquals(0, exit_code) +- self.assertEquals([[]], self.merge_test_results_args) ++ self.assertEqual(0, exit_code) ++ self.assertEqual([[]], self.merge_test_results_args) + + + class CommandLineTest(common_merge_script_tests.CommandLineTest): +diff --git a/src/3rdparty/chromium/testing/run_with_dummy_home.py b/src/3rdparty/chromium/testing/run_with_dummy_home.py +index 3d28e060f..a82b9d3ac 100755 +--- a/src/3rdparty/chromium/testing/run_with_dummy_home.py ++++ b/src/3rdparty/chromium/testing/run_with_dummy_home.py +@@ -44,7 +44,7 @@ def _set_up_dummy_home(original_home, dummy_home): + def main(): + try: + dummy_home = tempfile.mkdtemp() +- print 'Creating dummy home in %s' % dummy_home ++ print('Creating dummy home in %s' % dummy_home) + + original_home = os.environ['HOME'] + os.environ['HOME'] = dummy_home +diff --git a/src/3rdparty/chromium/testing/scripts/blink_python_tests.py b/src/3rdparty/chromium/testing/scripts/blink_python_tests.py +index c7f14e686..454444639 100755 +--- a/src/3rdparty/chromium/testing/scripts/blink_python_tests.py ++++ b/src/3rdparty/chromium/testing/scripts/blink_python_tests.py +@@ -29,7 +29,7 @@ def main_run(args): + json.dump({ + 'valid': bool(rc <= common.MAX_FAILURES_EXIT_STATUS and + ((rc == 0) or failures)), +- 'failures': failures.keys(), ++ 'failures': list(failures.keys()), + }, args.output) + + return rc +diff --git a/src/3rdparty/chromium/testing/scripts/check_static_initializers.py b/src/3rdparty/chromium/testing/scripts/check_static_initializers.py +index 76ab89628..54021e3c7 100755 +--- a/src/3rdparty/chromium/testing/scripts/check_static_initializers.py ++++ b/src/3rdparty/chromium/testing/scripts/check_static_initializers.py +@@ -121,8 +121,8 @@ def main_mac(src_dir): + if re.match('0x[0-9a-f]+', line) and not any( + f in line for f in _MAC_SI_FILE_ALLOWLIST): + ret = 1 +- print 'Found invalid static initializer: {}'.format(line) +- print stdout ++ print('Found invalid static initializer: {}'.format(line)) ++ print(stdout) + elif si_count > FALLBACK_EXPECTED_MAC_SI_COUNT: + print('Expected <= %d static initializers in %s, but found %d' % + (FALLBACK_EXPECTED_MAC_SI_COUNT, chromium_framework_executable, +@@ -134,14 +134,14 @@ def main_mac(src_dir): + if os.path.exists(framework_unstripped_name): + args.append(framework_unstripped_name) + else: +- print '# Warning: Falling back to potentially stripped output.' ++ print('# Warning: Falling back to potentially stripped output.') + args.append(chromium_framework_executable) + + if os.path.exists(hermetic_xcode_path): + args.extend(['--xcode-path', hermetic_xcode_path]) + + stdout = run_process(args) +- print stdout ++ print(stdout) + return ret + + +@@ -178,8 +178,8 @@ def main_linux(src_dir, is_chromeos): + print('Error: file "%s" is not expected to have static initializers in' + ' binary "%s"') % (f, binary_name) + +- print '\n# Static initializers in %s:' % binary_name +- print stdout ++ print('\n# Static initializers in %s:' % binary_name) ++ print(stdout) + + return ret + +diff --git a/src/3rdparty/chromium/testing/scripts/common.py b/src/3rdparty/chromium/testing/scripts/common.py +index 0d0811a41..4a93a0f66 100644 +--- a/src/3rdparty/chromium/testing/scripts/common.py ++++ b/src/3rdparty/chromium/testing/scripts/common.py +@@ -74,9 +74,9 @@ def run_script(argv, funcs): + + + def run_command(argv, env=None, cwd=None): +- print 'Running %r in %r (env: %r)' % (argv, cwd, env) ++ print('Running %r in %r (env: %r)' % (argv, cwd, env)) + rc = test_env.run_command(argv, env=env, cwd=cwd) +- print 'Command %r returned exit code %d' % (argv, rc) ++ print('Command %r returned exit code %d' % (argv, rc)) + return rc + + +@@ -94,7 +94,7 @@ def parse_common_test_results(json_results, test_separator='/'): + def convert_trie_to_flat_paths(trie, prefix=None): + # Also see blinkpy.web_tests.layout_package.json_results_generator + result = {} +- for name, data in trie.iteritems(): ++ for name, data in trie.items(): + if prefix: + name = prefix + test_separator + name + if len(data) and not 'actual' in data and not 'expected' in data: +@@ -118,7 +118,7 @@ def parse_common_test_results(json_results, test_separator='/'): + passing_statuses = ('PASS', 'SLOW', 'NEEDSREBASELINE') + + for test, result in convert_trie_to_flat_paths( +- json_results['tests']).iteritems(): ++ json_results['tests']).items(): + key = 'unexpected_' if result.get('is_unexpected') else '' + data = result['actual'] + actual_results = data.split() +@@ -177,7 +177,7 @@ def get_gtest_summary_passes(output): + mapping = {} + + for cur_iteration_data in output.get('per_iteration_data', []): +- for test_fullname, results in cur_iteration_data.iteritems(): ++ for test_fullname, results in cur_iteration_data.items(): + # Results is a list with one entry per test try. Last one is the final + # result. + last_result = results[-1] +@@ -345,13 +345,13 @@ class BaseIsolatedScriptArgsAdapter(object): + valid = True + try: + env['CHROME_HEADLESS'] = '1' +- print 'Running command: %s\nwith env: %r' % ( +- ' '.join(cmd), env) ++ print('Running command: %s\nwith env: %r' % ( ++ ' '.join(cmd), env)) + if self.options.xvfb: + exit_code = xvfb.run_executable(cmd, env) + else: + exit_code = test_env.run_command(cmd, env=env) +- print 'Command returned exit code %d' % exit_code ++ print('Command returned exit code %d' % exit_code) + self.do_post_test_run_tasks() + return exit_code + except Exception: +diff --git a/src/3rdparty/chromium/testing/scripts/headless_python_unittests.py b/src/3rdparty/chromium/testing/scripts/headless_python_unittests.py +index 05a2f94bb..6889a531f 100755 +--- a/src/3rdparty/chromium/testing/scripts/headless_python_unittests.py ++++ b/src/3rdparty/chromium/testing/scripts/headless_python_unittests.py +@@ -36,7 +36,7 @@ def main_run(args): + json.dump({ + 'valid': bool(rc <= common.MAX_FAILURES_EXIT_STATUS and + ((rc == 0) or failures)), +- 'failures': failures.keys(), ++ 'failures': list(failures.keys()), + }, args.output) + + return rc +diff --git a/src/3rdparty/chromium/testing/scripts/host_info.py b/src/3rdparty/chromium/testing/scripts/host_info.py +index 6aa9b8a28..618f35b1e 100755 +--- a/src/3rdparty/chromium/testing/scripts/host_info.py ++++ b/src/3rdparty/chromium/testing/scripts/host_info.py +@@ -97,7 +97,7 @@ def get_device_info(args, failures): + 'build_types': unique_build_details(2), + } + +- for k, v in parsed_details.iteritems(): ++ for k, v in parsed_details.items(): + if len(v) == 1: + results[k] = v[0] + else: +diff --git a/src/3rdparty/chromium/testing/scripts/run_performance_tests.py b/src/3rdparty/chromium/testing/scripts/run_performance_tests.py +index 15696cbac..45c99c27e 100755 +--- a/src/3rdparty/chromium/testing/scripts/run_performance_tests.py ++++ b/src/3rdparty/chromium/testing/scripts/run_performance_tests.py +@@ -160,7 +160,7 @@ class OutputFilePaths(object): + + + def print_duration(step, start): +- print 'Duration of %s: %d seconds' % (step, time.time() - start) ++ print('Duration of %s: %d seconds' % (step, time.time() - start)) + + + def IsWindows(): +@@ -460,7 +460,7 @@ def execute_telemetry_benchmark( + except Exception: + print ('The following exception may have prevented the code from ' + 'outputing structured test results and perf results output:') +- print traceback.format_exc() ++ print(traceback.format_exc()) + finally: + # Add ignore_errors=True because otherwise rmtree may fail due to leaky + # processes of tests are still holding opened handles to files under +@@ -475,7 +475,7 @@ def execute_telemetry_benchmark( + # TODO(crbug.com/1019139): Make 111 be the exit code that means + # "no stories were run.". + if return_code in (111, -1, 255): +- print ('Exit code %s indicates that no stories were run, so we are marking ' ++ print('Exit code %s indicates that no stories were run, so we are marking ' + 'this as a success.' % return_code) + return 0 + if return_code: +@@ -620,7 +620,7 @@ def main(sys_args): + if 'benchmarks' in shard_configuration: + benchmarks_and_configs = shard_configuration['benchmarks'] + for (benchmark, story_selection_config +- ) in benchmarks_and_configs.iteritems(): ++ ) in benchmarks_and_configs.items(): + # Need to run the benchmark on both latest browser and reference + # build. + output_paths = OutputFilePaths(isolated_out_dir, benchmark).SetUp() +@@ -650,7 +650,7 @@ def main(sys_args): + if 'executables' in shard_configuration: + names_and_configs = shard_configuration['executables'] + for (name, configuration +- ) in names_and_configs.iteritems(): ++ ) in names_and_configs.items(): + additional_flags = [] + if 'arguments' in configuration: + additional_flags = configuration['arguments'] +diff --git a/src/3rdparty/chromium/testing/scripts/run_rendering_benchmark_with_gated_performance.py b/src/3rdparty/chromium/testing/scripts/run_rendering_benchmark_with_gated_performance.py +index 0721aeb9f..7bcb6c97b 100755 +--- a/src/3rdparty/chromium/testing/scripts/run_rendering_benchmark_with_gated_performance.py ++++ b/src/3rdparty/chromium/testing/scripts/run_rendering_benchmark_with_gated_performance.py +@@ -15,7 +15,7 @@ The name and some functionalities of this script should be adjusted for + use with other benchmarks. + """ + +-from __future__ import print_function ++ + + import argparse + import csv +diff --git a/src/3rdparty/chromium/testing/scripts/test_buildbucket_api_gpu_use_cases.py b/src/3rdparty/chromium/testing/scripts/test_buildbucket_api_gpu_use_cases.py +index 648d8d40c..20e2e9a96 100755 +--- a/src/3rdparty/chromium/testing/scripts/test_buildbucket_api_gpu_use_cases.py ++++ b/src/3rdparty/chromium/testing/scripts/test_buildbucket_api_gpu_use_cases.py +@@ -95,11 +95,11 @@ def main(argv): + error_msg = test() + if error_msg is not None: + result = '%s: %s' % (test_name, error_msg) +- print 'FAIL: %s' % result ++ print('FAIL: %s' % result) + failures.append(result) + + if not failures: +- print 'PASS: test_buildbucket_api_gpu_use_cases ran successfully.' ++ print('PASS: test_buildbucket_api_gpu_use_cases ran successfully.') + retval = 0 + + with open(args.isolated_script_test_output, 'w') as json_file: +diff --git a/src/3rdparty/chromium/testing/scripts/wpt_common.py b/src/3rdparty/chromium/testing/scripts/wpt_common.py +index 940a47c9a..2ac0f5f24 100644 +--- a/src/3rdparty/chromium/testing/scripts/wpt_common.py ++++ b/src/3rdparty/chromium/testing/scripts/wpt_common.py +@@ -143,7 +143,7 @@ class BaseWptScriptAdapter(common.BaseIsolatedScriptArgsAdapter): + # UnicodeDecodeErrors when writing to file. This can happen if + # the diff contains unicode characters but the file is written + # as ascii because of the default system-level encoding. +- html_diff_content = unicode(html_diff_content, 'utf-8') ++ html_diff_content = str(html_diff_content, 'utf-8') + html_diff_subpath = self._write_text_artifact( + test_failures.FILENAME_SUFFIX_HTML_DIFF, results_dir, + path_so_far, html_diff_content, extension=".html") +@@ -154,7 +154,7 @@ class BaseWptScriptAdapter(common.BaseIsolatedScriptArgsAdapter): + if screenshot_artifact: + screenshot_paths_dict = self._write_screenshot_artifact( + results_dir, path_so_far, screenshot_artifact) +- for screenshot_key, path in screenshot_paths_dict.items(): ++ for screenshot_key, path in list(screenshot_paths_dict.items()): + root_node["artifacts"][screenshot_key] = [path] + + crashlog_artifact = root_node["artifacts"].pop("wpt_crash_log", +diff --git a/src/3rdparty/chromium/testing/test_env.py b/src/3rdparty/chromium/testing/test_env.py +index 9d6092628..efcf8ed15 100755 +--- a/src/3rdparty/chromium/testing/test_env.py ++++ b/src/3rdparty/chromium/testing/test_env.py +@@ -5,7 +5,7 @@ + + """Sets environment variables needed to run a chromium unit test.""" + +-from __future__ import print_function ++ + import io + import os + import signal +diff --git a/src/3rdparty/chromium/testing/test_env_test_script.py b/src/3rdparty/chromium/testing/test_env_test_script.py +index 4957ee656..680931ce8 100755 +--- a/src/3rdparty/chromium/testing/test_env_test_script.py ++++ b/src/3rdparty/chromium/testing/test_env_test_script.py +@@ -5,7 +5,7 @@ + + """Script for use in test_env unittests.""" + +-from __future__ import print_function ++ + import signal + import sys + import time +diff --git a/src/3rdparty/chromium/testing/trigger_scripts/base_test_triggerer.py b/src/3rdparty/chromium/testing/trigger_scripts/base_test_triggerer.py +index d16ab381a..9c2e4cc8d 100755 +--- a/src/3rdparty/chromium/testing/trigger_scripts/base_test_triggerer.py ++++ b/src/3rdparty/chromium/testing/trigger_scripts/base_test_triggerer.py +@@ -20,7 +20,7 @@ import os + import subprocess + import sys + import tempfile +-import urllib ++import urllib.request, urllib.parse, urllib.error + import logging + + SRC_DIR = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath( +@@ -31,14 +31,14 @@ SWARMING_PY = os.path.join(SRC_DIR, 'tools', 'swarming_client', 'swarming.py') + def strip_unicode(obj): + """Recursively re-encodes strings as utf-8 inside |obj|. Returns the result. + """ +- if isinstance(obj, unicode): ++ if isinstance(obj, str): + return obj.encode('utf-8', 'replace') + if isinstance(obj, list): + return list(map(strip_unicode, obj)) + + if isinstance(obj, dict): + new_obj = type(obj)( +- (strip_unicode(k), strip_unicode(v)) for k, v in obj.iteritems() ) ++ (strip_unicode(k), strip_unicode(v)) for k, v in obj.items() ) + return new_obj + return obj + +@@ -78,7 +78,7 @@ class BaseTestTriggerer(object): + bot_args.append('GTEST_TOTAL_SHARDS') + bot_args.append(str(total_shards)) + if self._bot_configs: +- for key, val in sorted(self._bot_configs[bot_index].iteritems()): ++ for key, val in sorted(self._bot_configs[bot_index].items()): + bot_args.append('--dimension') + bot_args.append(key) + bot_args.append(val) +@@ -121,7 +121,7 @@ class BaseTestTriggerer(object): + try: + temp_file = self.make_temp_file(prefix='base_trigger_dimensions', + suffix='.json') +- encoded_args = urllib.urlencode(query_args) ++ encoded_args = urllib.parse.urlencode(query_args) + args =['query', + '-S', + server, +@@ -146,7 +146,7 @@ class BaseTestTriggerer(object): + # Query Swarming to figure out which bots are available. + for config in self._bot_configs: + values = [] +- for key, value in sorted(config.iteritems()): ++ for key, value in sorted(config.items()): + values.append(('dimensions', '%s:%s' % (key, value))) + # Ignore dead and quarantined bots. + values.append(('is_dead', 'FALSE')) +@@ -169,7 +169,7 @@ class BaseTestTriggerer(object): + 'Total bots: %d' % (self._total_bots)) + + def remove_swarming_dimension(self, args, dimension): +- for i in xrange(len(args)): ++ for i in range(len(args)): + if args[i] == '--dimension' and args[i+1] == dimension: + return args[:i] + args[i+3:] + return args +@@ -216,7 +216,7 @@ class BaseTestTriggerer(object): + def indices_to_trigger(self, args): + """Returns the indices of the swarming shards that should be triggered.""" + if args.shard_index is None: +- return range(args.shards) ++ return list(range(args.shards)) + else: + return [args.shard_index] + +@@ -244,7 +244,7 @@ class BaseTestTriggerer(object): + # dimensions on the command line. + filtered_remaining_args = copy.deepcopy(remaining) + for config in self._bot_configs: +- for k in config.iterkeys(): ++ for k in config.keys(): + filtered_remaining_args = self.remove_swarming_dimension( + filtered_remaining_args, k) + +@@ -276,7 +276,7 @@ class BaseTestTriggerer(object): + # However, reset the "tasks" entry to an empty dictionary, + # which will be handled specially. + merged_json['tasks'] = {} +- for k, v in result_json['tasks'].items(): ++ for k, v in list(result_json['tasks'].items()): + v['shard_index'] = shard_index + merged_json['tasks'][k + ':%d:%d' % (shard_index, args.shards)] = v + finally: +diff --git a/src/3rdparty/chromium/testing/trigger_scripts/perf_device_trigger.py b/src/3rdparty/chromium/testing/trigger_scripts/perf_device_trigger.py +index 748dcf9db..66c62b5bf 100755 +--- a/src/3rdparty/chromium/testing/trigger_scripts/perf_device_trigger.py ++++ b/src/3rdparty/chromium/testing/trigger_scripts/perf_device_trigger.py +@@ -44,7 +44,7 @@ This script is normally called from the swarming recipe module in tools/build. + + """ + +-from __future__ import print_function ++ + + import argparse + import copy +@@ -53,7 +53,7 @@ import os + import subprocess + import sys + import tempfile +-import urllib ++import urllib.request, urllib.parse, urllib.error + import logging + + import base_test_triggerer +@@ -113,7 +113,7 @@ class PerfDeviceTriggerer(base_test_triggerer.BaseTestTriggerer): + self._bot_configs = [] + # For each eligible bot, append the dimension + # to the eligible bot_configs +- for _, bot in self._eligible_bots_by_ids.iteritems(): ++ for _, bot in self._eligible_bots_by_ids.items(): + self._bot_configs.append(bot.as_json_config()) + + def select_config_indices(self, args, verbose): +@@ -154,12 +154,12 @@ class PerfDeviceTriggerer(base_test_triggerer.BaseTestTriggerer): + existing_shard_bot_to_shard_map = copy.deepcopy(shard_to_bot_assignment_map) + # Now create sets of remaining healthy and bad bots + unallocated_healthy_bots = { +- b for b in unallocated_bots_by_ids.values() if b.is_alive()} ++ b for b in list(unallocated_bots_by_ids.values()) if b.is_alive()} + unallocated_bad_bots = { +- b for b in unallocated_bots_by_ids.values() if not b.is_alive()} ++ b for b in list(unallocated_bots_by_ids.values()) if not b.is_alive()} + + # Try assigning healthy bots for new shards first. +- for shard_index, bot in sorted(shard_to_bot_assignment_map.iteritems()): ++ for shard_index, bot in sorted(shard_to_bot_assignment_map.items()): + if not bot and unallocated_healthy_bots: + shard_to_bot_assignment_map[shard_index] = \ + unallocated_healthy_bots.pop() +@@ -169,7 +169,7 @@ class PerfDeviceTriggerer(base_test_triggerer.BaseTestTriggerer): + shard_to_bot_assignment_map[shard_index] = unallocated_bad_bots.pop() + + # Handle the rest of shards that were assigned dead bots: +- for shard_index, bot in sorted(shard_to_bot_assignment_map.iteritems()): ++ for shard_index, bot in sorted(shard_to_bot_assignment_map.items()): + if not bot.is_alive() and unallocated_healthy_bots: + dead_bot = bot + healthy_bot = unallocated_healthy_bots.pop() +@@ -195,7 +195,7 @@ class PerfDeviceTriggerer(base_test_triggerer.BaseTestTriggerer): + def _print_device_affinity_info( + self, new_map, existing_map, health_map, num_shards): + print() +- for shard_index in xrange(num_shards): ++ for shard_index in range(num_shards): + existing = existing_map.get(shard_index, None) + new = new_map.get(shard_index, None) + existing_id = '' +@@ -209,7 +209,7 @@ class PerfDeviceTriggerer(base_test_triggerer.BaseTestTriggerer): + + healthy_bots = [] + dead_bots = [] +- for _, b in health_map.iteritems(): ++ for _, b in health_map.items(): + if b.is_alive(): + healthy_bots.append(b.id()) + else: +@@ -230,7 +230,7 @@ class PerfDeviceTriggerer(base_test_triggerer.BaseTestTriggerer): + of the bots. + """ + values = [] +- for key, value in sorted(dimensions.iteritems()): ++ for key, value in sorted(dimensions.items()): + values.append(('dimensions', '%s:%s' % (key, value))) + + query_result = self.query_swarming( +@@ -257,7 +257,7 @@ class PerfDeviceTriggerer(base_test_triggerer.BaseTestTriggerer): + # Example: swarming.py query -S server-url.com --limit 1 \\ + # 'tasks/list?tags=os:Windows&tags=pool:chrome.tests.perf&tags=shard:12' + values = [ +- ('tags', '%s:%s' % (k, v)) for k, v in self._dimensions.iteritems() ++ ('tags', '%s:%s' % (k, v)) for k, v in self._dimensions.items() + ] + # Append the shard as a tag + values.append(('tags', '%s:%s' % ('shard', str(shard_index)))) +@@ -286,13 +286,13 @@ class PerfDeviceTriggerer(base_test_triggerer.BaseTestTriggerer): + + def _get_swarming_dimensions(self, args): + dimensions = {} +- for i in xrange(len(args) - 2): ++ for i in range(len(args) - 2): + if args[i] == '--dimension': + dimensions[args[i+1]] = args[i+2] + return dimensions + + def _get_swarming_server(self, args): +- for i in xrange(len(args)): ++ for i in range(len(args)): + if '--swarming' in args[i]: + server = args[i+1] + slashes_index = server.index('//') + 2 +@@ -300,7 +300,7 @@ class PerfDeviceTriggerer(base_test_triggerer.BaseTestTriggerer): + return server[slashes_index:] + + def _get_service_account(self, args): +- for i in xrange(len(args) - 1): ++ for i in range(len(args) - 1): + if '--auth-service-account-json' in args[i]: + return args[i+1] + +diff --git a/src/3rdparty/chromium/testing/trigger_scripts/perf_device_trigger_unittest.py b/src/3rdparty/chromium/testing/trigger_scripts/perf_device_trigger_unittest.py +index c2dbd5339..0808dedc5 100755 +--- a/src/3rdparty/chromium/testing/trigger_scripts/perf_device_trigger_unittest.py ++++ b/src/3rdparty/chromium/testing/trigger_scripts/perf_device_trigger_unittest.py +@@ -98,12 +98,12 @@ class UnitTest(unittest.TestCase): + # the last build that ran the shard that corresponds to that + # index. If that shard hasn't been run before the entry + # should be an empty string. +- for i in xrange(num_shards): ++ for i in range(num_shards): + bot_id = previous_task_assignment_map.get(i) + files['base_trigger_dimensions%d.json' % file_index] = ( + self.generate_last_task_to_shard_query_response(i, bot_id)) + file_index = file_index + 1 +- for i in xrange(num_shards): ++ for i in range(num_shards): + task = { + 'base_task_name': 'webgl_conformance_tests', + 'request': { +@@ -154,7 +154,7 @@ class UnitTest(unittest.TestCase): + + def list_contains_sublist(self, main_list, sub_list): + return any(sub_list == main_list[offset:offset + len(sub_list)] +- for offset in xrange(len(main_list) - (len(sub_list) - 1))) ++ for offset in range(len(main_list) - (len(sub_list) - 1))) + + def assert_query_swarming_args(self, triggerer, num_shards): + # Assert the calls to query swarming send the right args +@@ -186,13 +186,13 @@ class UnitTest(unittest.TestCase): + dead_bots=['build1', 'build2']) + expected_task_assignment = self.get_triggered_shard_to_bot( + triggerer, num_shards=3) +- self.assertEquals(len(set(expected_task_assignment.values())), 3) ++ self.assertEqual(len(set(expected_task_assignment.values())), 3) + + # All three bots were healthy so we should expect the task assignment to + # stay the same +- self.assertEquals(expected_task_assignment.get(0), 'build3') +- self.assertEquals(expected_task_assignment.get(1), 'build4') +- self.assertEquals(expected_task_assignment.get(2), 'build5') ++ self.assertEqual(expected_task_assignment.get(0), 'build3') ++ self.assertEqual(expected_task_assignment.get(1), 'build4') ++ self.assertEqual(expected_task_assignment.get(2), 'build5') + + def test_no_bot_returned(self): + with self.assertRaises(ValueError) as context: +@@ -212,13 +212,13 @@ class UnitTest(unittest.TestCase): + dead_bots=['build1', 'build2']) + expected_task_assignment = self.get_triggered_shard_to_bot( + triggerer, num_shards=3) +- self.assertEquals(len(set(expected_task_assignment.values())), 3) ++ self.assertEqual(len(set(expected_task_assignment.values())), 3) + + # The first two should be assigned to one of the unassigned healthy bots + new_healthy_bots = ['build4', 'build5'] + self.assertIn(expected_task_assignment.get(0), new_healthy_bots) + self.assertIn(expected_task_assignment.get(1), new_healthy_bots) +- self.assertEquals(expected_task_assignment.get(2), 'build3') ++ self.assertEqual(expected_task_assignment.get(2), 'build3') + + def test_not_enough_healthy_bots(self): + triggerer = self.setup_and_trigger( +@@ -228,17 +228,17 @@ class UnitTest(unittest.TestCase): + dead_bots=['build1', 'build2']) + expected_task_assignment = self.get_triggered_shard_to_bot( + triggerer, num_shards=5) +- self.assertEquals(len(set(expected_task_assignment.values())), 5) ++ self.assertEqual(len(set(expected_task_assignment.values())), 5) + + # We have 5 shards and 5 bots that ran them, but two + # are now dead and there aren't any other healthy bots + # to swap out to. Make sure they still assign to the + # same shards. +- self.assertEquals(expected_task_assignment.get(0), 'build1') +- self.assertEquals(expected_task_assignment.get(1), 'build2') +- self.assertEquals(expected_task_assignment.get(2), 'build3') +- self.assertEquals(expected_task_assignment.get(3), 'build4') +- self.assertEquals(expected_task_assignment.get(4), 'build5') ++ self.assertEqual(expected_task_assignment.get(0), 'build1') ++ self.assertEqual(expected_task_assignment.get(1), 'build2') ++ self.assertEqual(expected_task_assignment.get(2), 'build3') ++ self.assertEqual(expected_task_assignment.get(3), 'build4') ++ self.assertEqual(expected_task_assignment.get(4), 'build5') + + def test_not_enough_healthy_bots_shard_not_seen(self): + triggerer = self.setup_and_trigger( +@@ -248,18 +248,18 @@ class UnitTest(unittest.TestCase): + dead_bots=['build1', 'build2']) + expected_task_assignment = self.get_triggered_shard_to_bot( + triggerer, num_shards=5) +- self.assertEquals(len(set(expected_task_assignment.values())), 5) ++ self.assertEqual(len(set(expected_task_assignment.values())), 5) + + # Not enough healthy bots so make sure shard 0 is still assigned to its + # same dead bot. +- self.assertEquals(expected_task_assignment.get(0), 'build1') ++ self.assertEqual(expected_task_assignment.get(0), 'build1') + # Shard 1 had not been triggered yet, but there weren't enough + # healthy bots. Make sure it got assigned to the other dead bot. +- self.assertEquals(expected_task_assignment.get(1), 'build2') ++ self.assertEqual(expected_task_assignment.get(1), 'build2') + # The rest of the assignments should stay the same. +- self.assertEquals(expected_task_assignment.get(2), 'build3') +- self.assertEquals(expected_task_assignment.get(3), 'build4') +- self.assertEquals(expected_task_assignment.get(4), 'build5') ++ self.assertEqual(expected_task_assignment.get(2), 'build3') ++ self.assertEqual(expected_task_assignment.get(3), 'build4') ++ self.assertEqual(expected_task_assignment.get(4), 'build5') + + def test_shards_not_triggered_yet(self): + # First time this configuration has been seen. Choose three +@@ -270,7 +270,7 @@ class UnitTest(unittest.TestCase): + dead_bots=['build1', 'build2']) + expected_task_assignment = self.get_triggered_shard_to_bot( + triggerer, num_shards=3) +- self.assertEquals(len(set(expected_task_assignment.values())), 3) ++ self.assertEqual(len(set(expected_task_assignment.values())), 3) + new_healthy_bots = ['build3', 'build4', 'build5'] + self.assertIn(expected_task_assignment.get(0), new_healthy_bots) + self.assertIn(expected_task_assignment.get(1), new_healthy_bots) +@@ -288,7 +288,7 @@ class UnitTest(unittest.TestCase): + # Test that the new assignment will add a new bot to avoid + # assign 'build3' to both shard 0 & shard 1 as before. + # It also replaces the dead 'build6' bot. +- self.assertEquals(set(expected_task_assignment.values()), ++ self.assertEqual(set(expected_task_assignment.values()), + {'build3', 'build4', 'build5', 'build7'}) + + +diff --git a/src/3rdparty/chromium/testing/variations/PRESUBMIT.py b/src/3rdparty/chromium/testing/variations/PRESUBMIT.py +index 96c52d894..405dcb6e9 100644 +--- a/src/3rdparty/chromium/testing/variations/PRESUBMIT.py ++++ b/src/3rdparty/chromium/testing/variations/PRESUBMIT.py +@@ -77,7 +77,7 @@ def PrettyPrint(contents): + ('experiments', [])]) + for experiment in experiment_config['experiments']: + ordered_experiment = OrderedDict() +- for index in xrange(0, 10): ++ for index in range(0, 10): + comment_key = '//' + str(index) + if comment_key in experiment: + ordered_experiment[comment_key] = experiment[comment_key] +@@ -86,7 +86,7 @@ def PrettyPrint(contents): + ordered_experiment['forcing_flag'] = experiment['forcing_flag'] + if 'params' in experiment: + ordered_experiment['params'] = OrderedDict( +- sorted(experiment['params'].items(), key=lambda t: t[0])) ++ sorted(list(experiment['params'].items()), key=lambda t: t[0])) + if 'enable_features' in experiment: + ordered_experiment['enable_features'] = \ + sorted(experiment['enable_features']) +@@ -127,7 +127,7 @@ def ValidateData(input_api, json_data, file_path, message_type): + + if not isinstance(json_data, dict): + return _CreateMessage('Expecting dict') +- for (study, experiment_configs) in json_data.iteritems(): ++ for (study, experiment_configs) in json_data.items(): + warnings = _ValidateEntry(study, experiment_configs, _CreateMessage) + if warnings: + return warnings +@@ -137,7 +137,7 @@ def ValidateData(input_api, json_data, file_path, message_type): + + def _ValidateEntry(study, experiment_configs, create_message_fn): + """Validates one entry of the field trial configuration.""" +- if not isinstance(study, unicode): ++ if not isinstance(study, str): + return create_message_fn('Expecting keys to be string, got %s', type(study)) + if not isinstance(experiment_configs, list): + return create_message_fn('Expecting list for study %s', study) +@@ -185,7 +185,7 @@ def _ValidateExperimentConfig(experiment_config, create_message_fn): + def _ValidateExperimentGroup(experiment_group, create_message_fn): + """Validates one group of one config in a configuration entry.""" + name = experiment_group.get('name', '') +- if not name or not isinstance(name, unicode): ++ if not name or not isinstance(name, str): + return create_message_fn('Missing valid name for experiment') + + # Add context to other messages. +@@ -197,10 +197,10 @@ def _ValidateExperimentGroup(experiment_group, create_message_fn): + params = experiment_group['params'] + if not isinstance(params, dict): + return _CreateGroupMessage('Expected dict for params') +- for (key, value) in params.iteritems(): +- if not isinstance(key, unicode) or not isinstance(value, unicode): ++ for (key, value) in params.items(): ++ if not isinstance(key, str) or not isinstance(value, str): + return _CreateGroupMessage('Invalid param (%s: %s)', key, value) +- for key in experiment_group.keys(): ++ for key in list(experiment_group.keys()): + if key not in VALID_EXPERIMENT_KEYS: + return _CreateGroupMessage('Key[%s] is not a valid key', key) + return [] +diff --git a/src/3rdparty/chromium/testing/xvfb.py b/src/3rdparty/chromium/testing/xvfb.py +index d868072aa..bf74bfc13 100755 +--- a/src/3rdparty/chromium/testing/xvfb.py ++++ b/src/3rdparty/chromium/testing/xvfb.py +@@ -40,13 +40,12 @@ def kill(proc, name, timeout_in_seconds=10): + + thread.join(timeout_in_seconds) + if thread.is_alive(): +- print >> sys.stderr, '%s running after SIGTERM, trying SIGKILL.' % name ++ print('%s running after SIGTERM, trying SIGKILL.' % name, file=sys.stderr) + proc.kill() + + thread.join(timeout_in_seconds) + if thread.is_alive(): +- print >> sys.stderr, \ +- '%s running after SIGTERM and SIGKILL; good luck!' % name ++ print('%s running after SIGTERM and SIGKILL; good luck!' % name, file=sys.stderr) + + + def launch_dbus(env): +@@ -79,7 +78,7 @@ def launch_dbus(env): + env[m.group(1)] = m.group(2) + return int(env['DBUS_SESSION_BUS_PID']) + except (subprocess.CalledProcessError, OSError, KeyError, ValueError) as e: +- print 'Exception while running dbus_launch: %s' % e ++ print('Exception while running dbus_launch: %s' % e) + + + # TODO(crbug.com/949194): Encourage setting flags to False. +@@ -124,7 +123,7 @@ def run_executable( + use_weston = False + if '--use-weston' in cmd: + if use_xvfb: +- print >> sys.stderr, 'Unable to use Weston with xvfb.' ++ print('Unable to use Weston with xvfb.', file=sys.stderr) + return 1 + use_weston = True + cmd.remove('--use-weston') +@@ -217,10 +216,10 @@ def _run_with_xvfb(cmd, env, stdoutfile, use_openbox, use_xcompmgr): + + return test_env.run_executable(cmd, env, stdoutfile) + except OSError as e: +- print >> sys.stderr, 'Failed to start Xvfb or Openbox: %s' % str(e) ++ print('Failed to start Xvfb or Openbox: %s' % str(e), file=sys.stderr) + return 1 + except _XvfbProcessError as e: +- print >> sys.stderr, 'Xvfb fail: %s' % str(e) ++ print('Xvfb fail: %s' % str(e), file=sys.stderr) + return 1 + finally: + kill(openbox_proc, 'openbox') +@@ -278,10 +277,10 @@ def _run_with_weston(cmd, env, stdoutfile): + env['WAYLAND_DISPLAY'] = weston_proc_display + return test_env.run_executable(cmd, env, stdoutfile) + except OSError as e: +- print >> sys.stderr, 'Failed to start Weston: %s' % str(e) ++ print('Failed to start Weston: %s' % str(e), file=sys.stderr) + return 1 + except _WestonProcessError as e: +- print >> sys.stderr, 'Weston fail: %s' % str(e) ++ print('Weston fail: %s' % str(e), file=sys.stderr) + return 1 + finally: + kill(weston_proc, 'weston') +@@ -379,22 +378,22 @@ def _set_xdg_runtime_dir(env): + if not runtime_dir: + runtime_dir = '/tmp/xdg-tmp-dir/' + if not os.path.exists(runtime_dir): +- os.makedirs(runtime_dir, 0700) ++ os.makedirs(runtime_dir, 0o700) + env['XDG_RUNTIME_DIR'] = runtime_dir + + + def main(): + usage = 'Usage: xvfb.py [command [--no-xvfb or --use-weston] args...]' + if len(sys.argv) < 2: +- print >> sys.stderr, usage ++ print(usage, file=sys.stderr) + return 2 + + # If the user still thinks the first argument is the execution directory then + # print a friendly error message and quit. + if os.path.isdir(sys.argv[1]): +- print >> sys.stderr, ( +- 'Invalid command: \"%s\" is a directory' % sys.argv[1]) +- print >> sys.stderr, usage ++ print(( ++ 'Invalid command: \"%s\" is a directory' % sys.argv[1]), file=sys.stderr) ++ print(usage, file=sys.stderr) + return 3 + + return run_executable(sys.argv[1:], os.environ.copy()) +diff --git a/src/3rdparty/chromium/testing/xvfb_test_script.py b/src/3rdparty/chromium/testing/xvfb_test_script.py +index e1dcdae4a..f391dd14c 100755 +--- a/src/3rdparty/chromium/testing/xvfb_test_script.py ++++ b/src/3rdparty/chromium/testing/xvfb_test_script.py +@@ -16,7 +16,7 @@ import time + + + def print_signal(sig, *_): +- print 'Signal :{}'.format(sig) ++ print('Signal :{}'.format(sig)) + + + if __name__ == '__main__': +@@ -24,10 +24,10 @@ if __name__ == '__main__': + signal.signal(signal.SIGINT, print_signal) + + # test if inside xvfb flag is set. +- print 'Inside_xvfb :{}'.format( +- os.environ.get('_CHROMIUM_INSIDE_XVFB', 'None')) ++ print('Inside_xvfb :{}'.format( ++ os.environ.get('_CHROMIUM_INSIDE_XVFB', 'None'))) + # test the subprocess display number. +- print 'Display :{}'.format(os.environ.get('DISPLAY', 'None')) ++ print('Display :{}'.format(os.environ.get('DISPLAY', 'None'))) + + if len(sys.argv) > 1 and sys.argv[1] == '--sleep': + time.sleep(2) # gives process time to receive signal. +diff --git a/src/3rdparty/chromium/third_party/Python-Markdown/markdown/__init__.py b/src/3rdparty/chromium/third_party/Python-Markdown/markdown/__init__.py +index 1b8655313..2015a80aa 100644 +--- a/src/3rdparty/chromium/third_party/Python-Markdown/markdown/__init__.py ++++ b/src/3rdparty/chromium/third_party/Python-Markdown/markdown/__init__.py +@@ -30,8 +30,8 @@ Copyright 2004 Manfred Stienstra (the original version) + License: BSD (see LICENSE for details). + """ + +-from __future__ import absolute_import +-from __future__ import unicode_literals ++ ++ + from .__version__ import version, version_info # noqa + import codecs + import sys +@@ -126,7 +126,7 @@ class Markdown(object): + DeprecationWarning) + + # Loop through kwargs and assign defaults +- for option, default in self.option_defaults.items(): ++ for option, default in list(self.option_defaults.items()): + setattr(self, option, kwargs.get(option, default)) + + self.safeMode = kwargs.get('safe_mode', False) +@@ -364,14 +364,14 @@ class Markdown(object): + + # Split into lines and run the line preprocessors. + self.lines = source.split("\n") +- for prep in self.preprocessors.values(): ++ for prep in list(self.preprocessors.values()): + self.lines = prep.run(self.lines) + + # Parse the high-level elements. + root = self.parser.parseDocument(self.lines).getroot() + + # Run the tree-processors +- for treeprocessor in self.treeprocessors.values(): ++ for treeprocessor in list(self.treeprocessors.values()): + newRoot = treeprocessor.run(root) + if newRoot is not None: + root = newRoot +@@ -394,7 +394,7 @@ class Markdown(object): + 'tags. Document=%r' % output.strip()) + + # Run the text post-processors +- for pp in self.postprocessors.values(): ++ for pp in list(self.postprocessors.values()): + output = pp.run(output) + + return output.strip() +diff --git a/src/3rdparty/chromium/third_party/Python-Markdown/markdown/blockparser.py b/src/3rdparty/chromium/third_party/Python-Markdown/markdown/blockparser.py +index 32d3254cd..661333446 100644 +--- a/src/3rdparty/chromium/third_party/Python-Markdown/markdown/blockparser.py ++++ b/src/3rdparty/chromium/third_party/Python-Markdown/markdown/blockparser.py +@@ -1,5 +1,5 @@ +-from __future__ import unicode_literals +-from __future__ import absolute_import ++ ++ + from . import util + from . import odict + +@@ -93,7 +93,7 @@ class BlockParser: + + """ + while blocks: +- for processor in self.blockprocessors.values(): ++ for processor in list(self.blockprocessors.values()): + if processor.test(parent, blocks[0]): + if processor.run(parent, blocks) is not False: + # run returns True or None +diff --git a/src/3rdparty/chromium/third_party/Python-Markdown/markdown/blockprocessors.py b/src/3rdparty/chromium/third_party/Python-Markdown/markdown/blockprocessors.py +index 29db022ce..10dc33295 100644 +--- a/src/3rdparty/chromium/third_party/Python-Markdown/markdown/blockprocessors.py ++++ b/src/3rdparty/chromium/third_party/Python-Markdown/markdown/blockprocessors.py +@@ -11,9 +11,9 @@ different type of block. Extensions may add/replace/remove BlockProcessors + as they need to alter how markdown blocks are parsed. + """ + +-from __future__ import absolute_import +-from __future__ import division +-from __future__ import unicode_literals ++ ++ ++ + import logging + import re + from . import util +diff --git a/src/3rdparty/chromium/third_party/Python-Markdown/markdown/extensions/__init__.py b/src/3rdparty/chromium/third_party/Python-Markdown/markdown/extensions/__init__.py +index 6e7a08a1e..5bd195fac 100644 +--- a/src/3rdparty/chromium/third_party/Python-Markdown/markdown/extensions/__init__.py ++++ b/src/3rdparty/chromium/third_party/Python-Markdown/markdown/extensions/__init__.py +@@ -3,7 +3,7 @@ Extensions + ----------------------------------------------------------------------------- + """ + +-from __future__ import unicode_literals ++ + from ..util import parseBoolValue + import warnings + +@@ -36,7 +36,7 @@ class Extension(object): + 'Python-Markdown version 2.6 for more info.', + DeprecationWarning) + # check for configs kwarg for backward compat. +- if 'configs' in kwargs.keys(): ++ if 'configs' in list(kwargs.keys()): + if kwargs['configs'] is not None: + self.setConfigs(kwargs.pop('configs', {})) + warnings.warn('Extension classes accepting a dict on the single ' +@@ -59,11 +59,11 @@ class Extension(object): + + def getConfigs(self): + """ Return all configs settings as a dict. """ +- return dict([(key, self.getConfig(key)) for key in self.config.keys()]) ++ return dict([(key, self.getConfig(key)) for key in list(self.config.keys())]) + + def getConfigInfo(self): + """ Return all config descriptions as a list of tuples. """ +- return [(key, self.config[key][1]) for key in self.config.keys()] ++ return [(key, self.config[key][1]) for key in list(self.config.keys())] + + def setConfig(self, key, value): + """ Set a config setting for `key` with the given `value`. """ +@@ -77,7 +77,7 @@ class Extension(object): + """ Set multiple config settings given a dict or list of tuples. """ + if hasattr(items, 'items'): + # it's a dict +- items = items.items() ++ items = list(items.items()) + for key, value in items: + self.setConfig(key, value) + +diff --git a/src/3rdparty/chromium/third_party/Python-Markdown/markdown/extensions/abbr.py b/src/3rdparty/chromium/third_party/Python-Markdown/markdown/extensions/abbr.py +index 353d126f6..6a5113d2b 100644 +--- a/src/3rdparty/chromium/third_party/Python-Markdown/markdown/extensions/abbr.py ++++ b/src/3rdparty/chromium/third_party/Python-Markdown/markdown/extensions/abbr.py +@@ -16,8 +16,8 @@ License: [BSD](http://www.opensource.org/licenses/bsd-license.php) + + ''' + +-from __future__ import absolute_import +-from __future__ import unicode_literals ++ ++ + from . import Extension + from ..preprocessors import Preprocessor + from ..inlinepatterns import Pattern +diff --git a/src/3rdparty/chromium/third_party/Python-Markdown/markdown/extensions/admonition.py b/src/3rdparty/chromium/third_party/Python-Markdown/markdown/extensions/admonition.py +index 76e0fb588..fe8dcbf30 100644 +--- a/src/3rdparty/chromium/third_party/Python-Markdown/markdown/extensions/admonition.py ++++ b/src/3rdparty/chromium/third_party/Python-Markdown/markdown/extensions/admonition.py +@@ -17,8 +17,8 @@ License: [BSD](http://www.opensource.org/licenses/bsd-license.php) + + """ + +-from __future__ import absolute_import +-from __future__ import unicode_literals ++ ++ + from . import Extension + from ..blockprocessors import BlockProcessor + from ..util import etree +diff --git a/src/3rdparty/chromium/third_party/Python-Markdown/markdown/extensions/attr_list.py b/src/3rdparty/chromium/third_party/Python-Markdown/markdown/extensions/attr_list.py +index 683bdf831..27f607bc8 100644 +--- a/src/3rdparty/chromium/third_party/Python-Markdown/markdown/extensions/attr_list.py ++++ b/src/3rdparty/chromium/third_party/Python-Markdown/markdown/extensions/attr_list.py +@@ -17,8 +17,8 @@ License: [BSD](http://www.opensource.org/licenses/bsd-license.php) + + """ + +-from __future__ import absolute_import +-from __future__ import unicode_literals ++ ++ + from . import Extension + from ..treeprocessors import Treeprocessor + from ..util import isBlockLevel +diff --git a/src/3rdparty/chromium/third_party/Python-Markdown/markdown/extensions/codehilite.py b/src/3rdparty/chromium/third_party/Python-Markdown/markdown/extensions/codehilite.py +index 0657c3768..91bdfac52 100644 +--- a/src/3rdparty/chromium/third_party/Python-Markdown/markdown/extensions/codehilite.py ++++ b/src/3rdparty/chromium/third_party/Python-Markdown/markdown/extensions/codehilite.py +@@ -15,8 +15,8 @@ License: [BSD](http://www.opensource.org/licenses/bsd-license.php) + + """ + +-from __future__ import absolute_import +-from __future__ import unicode_literals ++ ++ + from . import Extension + from ..treeprocessors import Treeprocessor + +diff --git a/src/3rdparty/chromium/third_party/Python-Markdown/markdown/extensions/def_list.py b/src/3rdparty/chromium/third_party/Python-Markdown/markdown/extensions/def_list.py +index 77cca6eb8..9a1f7241a 100644 +--- a/src/3rdparty/chromium/third_party/Python-Markdown/markdown/extensions/def_list.py ++++ b/src/3rdparty/chromium/third_party/Python-Markdown/markdown/extensions/def_list.py +@@ -15,8 +15,8 @@ License: [BSD](http://www.opensource.org/licenses/bsd-license.php) + + """ + +-from __future__ import absolute_import +-from __future__ import unicode_literals ++ ++ + from . import Extension + from ..blockprocessors import BlockProcessor, ListIndentProcessor + from ..util import etree +diff --git a/src/3rdparty/chromium/third_party/Python-Markdown/markdown/extensions/extra.py b/src/3rdparty/chromium/third_party/Python-Markdown/markdown/extensions/extra.py +index de5db03cd..6f12868e6 100644 +--- a/src/3rdparty/chromium/third_party/Python-Markdown/markdown/extensions/extra.py ++++ b/src/3rdparty/chromium/third_party/Python-Markdown/markdown/extensions/extra.py +@@ -29,8 +29,8 @@ License: [BSD](http://www.opensource.org/licenses/bsd-license.php) + + """ + +-from __future__ import absolute_import +-from __future__ import unicode_literals ++ ++ + from . import Extension + from ..blockprocessors import BlockProcessor + from .. import util +diff --git a/src/3rdparty/chromium/third_party/Python-Markdown/markdown/extensions/fenced_code.py b/src/3rdparty/chromium/third_party/Python-Markdown/markdown/extensions/fenced_code.py +index 4af8891a8..95e552c1c 100644 +--- a/src/3rdparty/chromium/third_party/Python-Markdown/markdown/extensions/fenced_code.py ++++ b/src/3rdparty/chromium/third_party/Python-Markdown/markdown/extensions/fenced_code.py +@@ -15,8 +15,8 @@ All changes Copyright 2008-2014 The Python Markdown Project + License: [BSD](http://www.opensource.org/licenses/bsd-license.php) + """ + +-from __future__ import absolute_import +-from __future__ import unicode_literals ++ ++ + from . import Extension + from ..preprocessors import Preprocessor + from .codehilite import CodeHilite, CodeHiliteExtension, parse_hl_lines +diff --git a/src/3rdparty/chromium/third_party/Python-Markdown/markdown/extensions/footnotes.py b/src/3rdparty/chromium/third_party/Python-Markdown/markdown/extensions/footnotes.py +index d8caae27c..486c3c3c3 100644 +--- a/src/3rdparty/chromium/third_party/Python-Markdown/markdown/extensions/footnotes.py ++++ b/src/3rdparty/chromium/third_party/Python-Markdown/markdown/extensions/footnotes.py +@@ -13,8 +13,8 @@ License: [BSD](http://www.opensource.org/licenses/bsd-license.php) + + """ + +-from __future__ import absolute_import +-from __future__ import unicode_literals ++ ++ + from . import Extension + from ..preprocessors import Preprocessor + from ..inlinepatterns import Pattern +@@ -137,7 +137,7 @@ class FootnoteExtension(Extension): + etree.SubElement(div, "hr") + ol = etree.SubElement(div, "ol") + +- for id in self.footnotes.keys(): ++ for id in list(self.footnotes.keys()): + li = etree.SubElement(ol, "li") + li.set("id", self.makeFootnoteId(id)) + self.parser.parseChunk(li, self.footnotes[id]) +@@ -265,7 +265,7 @@ class FootnotePattern(Pattern): + + def handleMatch(self, m): + id = m.group(2) +- if id in self.footnotes.footnotes.keys(): ++ if id in list(self.footnotes.footnotes.keys()): + sup = etree.Element("sup") + a = etree.SubElement(sup, "a") + sup.set('id', self.footnotes.makeFootnoteRefId(id)) +diff --git a/src/3rdparty/chromium/third_party/Python-Markdown/markdown/extensions/headerid.py b/src/3rdparty/chromium/third_party/Python-Markdown/markdown/extensions/headerid.py +index 2cb20b97a..90ee15c8b 100644 +--- a/src/3rdparty/chromium/third_party/Python-Markdown/markdown/extensions/headerid.py ++++ b/src/3rdparty/chromium/third_party/Python-Markdown/markdown/extensions/headerid.py +@@ -15,8 +15,8 @@ License: [BSD](http://www.opensource.org/licenses/bsd-license.php) + + """ + +-from __future__ import absolute_import +-from __future__ import unicode_literals ++ ++ + from . import Extension + from ..treeprocessors import Treeprocessor + from ..util import parseBoolValue +@@ -82,7 +82,7 @@ class HeaderIdExtension(Extension): + self.processor = HeaderIdTreeprocessor() + self.processor.md = md + self.processor.config = self.getConfigs() +- if 'attr_list' in md.treeprocessors.keys(): ++ if 'attr_list' in list(md.treeprocessors.keys()): + # insert after attr_list treeprocessor + md.treeprocessors.add('headerid', self.processor, '>attr_list') + else: +diff --git a/src/3rdparty/chromium/third_party/Python-Markdown/markdown/extensions/meta.py b/src/3rdparty/chromium/third_party/Python-Markdown/markdown/extensions/meta.py +index 711235ef4..6192ad3cb 100644 +--- a/src/3rdparty/chromium/third_party/Python-Markdown/markdown/extensions/meta.py ++++ b/src/3rdparty/chromium/third_party/Python-Markdown/markdown/extensions/meta.py +@@ -15,8 +15,8 @@ License: [BSD](http://www.opensource.org/licenses/bsd-license.php) + + """ + +-from __future__ import absolute_import +-from __future__ import unicode_literals ++ ++ + from . import Extension + from ..preprocessors import Preprocessor + import re +diff --git a/src/3rdparty/chromium/third_party/Python-Markdown/markdown/extensions/nl2br.py b/src/3rdparty/chromium/third_party/Python-Markdown/markdown/extensions/nl2br.py +index 8acd60c2e..6527425b5 100644 +--- a/src/3rdparty/chromium/third_party/Python-Markdown/markdown/extensions/nl2br.py ++++ b/src/3rdparty/chromium/third_party/Python-Markdown/markdown/extensions/nl2br.py +@@ -16,8 +16,8 @@ License: [BSD](http://www.opensource.org/licenses/bsd-license.php) + + """ + +-from __future__ import absolute_import +-from __future__ import unicode_literals ++ ++ + from . import Extension + from ..inlinepatterns import SubstituteTagPattern + +diff --git a/src/3rdparty/chromium/third_party/Python-Markdown/markdown/extensions/sane_lists.py b/src/3rdparty/chromium/third_party/Python-Markdown/markdown/extensions/sane_lists.py +index 213c8a6fb..e14d0c68b 100644 +--- a/src/3rdparty/chromium/third_party/Python-Markdown/markdown/extensions/sane_lists.py ++++ b/src/3rdparty/chromium/third_party/Python-Markdown/markdown/extensions/sane_lists.py +@@ -15,8 +15,8 @@ License: [BSD](http://www.opensource.org/licenses/bsd-license.php) + + """ + +-from __future__ import absolute_import +-from __future__ import unicode_literals ++ ++ + from . import Extension + from ..blockprocessors import OListProcessor, UListProcessor + import re +diff --git a/src/3rdparty/chromium/third_party/Python-Markdown/markdown/extensions/smart_strong.py b/src/3rdparty/chromium/third_party/Python-Markdown/markdown/extensions/smart_strong.py +index 58570bb55..381c0aed5 100644 +--- a/src/3rdparty/chromium/third_party/Python-Markdown/markdown/extensions/smart_strong.py ++++ b/src/3rdparty/chromium/third_party/Python-Markdown/markdown/extensions/smart_strong.py +@@ -15,8 +15,8 @@ License: [BSD](http://www.opensource.org/licenses/bsd-license.php) + + ''' + +-from __future__ import absolute_import +-from __future__ import unicode_literals ++ ++ + from . import Extension + from ..inlinepatterns import SimpleTagPattern + +diff --git a/src/3rdparty/chromium/third_party/Python-Markdown/markdown/extensions/smarty.py b/src/3rdparty/chromium/third_party/Python-Markdown/markdown/extensions/smarty.py +index 46e54c1ec..251879d68 100644 +--- a/src/3rdparty/chromium/third_party/Python-Markdown/markdown/extensions/smarty.py ++++ b/src/3rdparty/chromium/third_party/Python-Markdown/markdown/extensions/smarty.py +@@ -81,7 +81,7 @@ smartypants.py license: + ''' + + +-from __future__ import unicode_literals ++ + from . import Extension + from ..inlinepatterns import HtmlPattern + from ..odict import OrderedDict +diff --git a/src/3rdparty/chromium/third_party/Python-Markdown/markdown/extensions/tables.py b/src/3rdparty/chromium/third_party/Python-Markdown/markdown/extensions/tables.py +index 368321d46..9e80adc12 100644 +--- a/src/3rdparty/chromium/third_party/Python-Markdown/markdown/extensions/tables.py ++++ b/src/3rdparty/chromium/third_party/Python-Markdown/markdown/extensions/tables.py +@@ -15,8 +15,8 @@ License: [BSD](http://www.opensource.org/licenses/bsd-license.php) + + """ + +-from __future__ import absolute_import +-from __future__ import unicode_literals ++ ++ + from . import Extension + from ..blockprocessors import BlockProcessor + from ..util import etree +diff --git a/src/3rdparty/chromium/third_party/Python-Markdown/markdown/extensions/toc.py b/src/3rdparty/chromium/third_party/Python-Markdown/markdown/extensions/toc.py +index b3cf898f4..cb4a08ae5 100644 +--- a/src/3rdparty/chromium/third_party/Python-Markdown/markdown/extensions/toc.py ++++ b/src/3rdparty/chromium/third_party/Python-Markdown/markdown/extensions/toc.py +@@ -13,8 +13,8 @@ License: [BSD](http://www.opensource.org/licenses/bsd-license.php) + + """ + +-from __future__ import absolute_import +-from __future__ import unicode_literals ++ ++ + from . import Extension + from ..treeprocessors import Treeprocessor + from ..util import etree, parseBoolValue, AMP_SUBSTITUTE, HTML_PLACEHOLDER_RE, string_type +@@ -257,7 +257,7 @@ class TocTreeprocessor(Treeprocessor): + + # serialize and attach to markdown instance. + toc = self.markdown.serializer(div) +- for pp in self.markdown.postprocessors.values(): ++ for pp in list(self.markdown.postprocessors.values()): + toc = pp.run(toc) + self.markdown.toc = toc + +diff --git a/src/3rdparty/chromium/third_party/Python-Markdown/markdown/extensions/wikilinks.py b/src/3rdparty/chromium/third_party/Python-Markdown/markdown/extensions/wikilinks.py +index 94e1b6794..9105562e4 100644 +--- a/src/3rdparty/chromium/third_party/Python-Markdown/markdown/extensions/wikilinks.py ++++ b/src/3rdparty/chromium/third_party/Python-Markdown/markdown/extensions/wikilinks.py +@@ -15,8 +15,8 @@ License: [BSD](http://www.opensource.org/licenses/bsd-license.php) + + ''' + +-from __future__ import absolute_import +-from __future__ import unicode_literals ++ ++ + from . import Extension + from ..inlinepatterns import Pattern + from ..util import etree +diff --git a/src/3rdparty/chromium/third_party/Python-Markdown/markdown/inlinepatterns.py b/src/3rdparty/chromium/third_party/Python-Markdown/markdown/inlinepatterns.py +index 95d358d71..d4e84ba4c 100644 +--- a/src/3rdparty/chromium/third_party/Python-Markdown/markdown/inlinepatterns.py ++++ b/src/3rdparty/chromium/third_party/Python-Markdown/markdown/inlinepatterns.py +@@ -41,19 +41,19 @@ So, we apply the expressions in the following order: + * finally we apply strong and emphasis + """ + +-from __future__ import absolute_import +-from __future__ import unicode_literals ++ ++ + from . import util + from . import odict + import re + try: # pragma: no cover + from urllib.parse import urlparse, urlunparse + except ImportError: # pragma: no cover +- from urlparse import urlparse, urlunparse ++ from urllib.parse import urlparse, urlunparse + try: # pragma: no cover + from html import entities + except ImportError: # pragma: no cover +- import htmlentitydefs as entities ++ import html.entities as entities + + + def build_inlinepatterns(md_instance, **kwargs): +diff --git a/src/3rdparty/chromium/third_party/Python-Markdown/markdown/odict.py b/src/3rdparty/chromium/third_party/Python-Markdown/markdown/odict.py +index 584ad7c17..e484aa107 100644 +--- a/src/3rdparty/chromium/third_party/Python-Markdown/markdown/odict.py ++++ b/src/3rdparty/chromium/third_party/Python-Markdown/markdown/odict.py +@@ -1,5 +1,5 @@ +-from __future__ import unicode_literals +-from __future__ import absolute_import ++ ++ + from . import util + from copy import deepcopy + +@@ -33,7 +33,7 @@ class OrderedDict(dict): + + def __deepcopy__(self, memo): + return self.__class__([(key, deepcopy(value, memo)) +- for key, value in self.items()]) ++ for key, value in list(self.items())]) + + def __copy__(self): + # The Python's default copy implementation will alter the state +diff --git a/src/3rdparty/chromium/third_party/Python-Markdown/markdown/postprocessors.py b/src/3rdparty/chromium/third_party/Python-Markdown/markdown/postprocessors.py +index 2d4dcb589..aac1b04a8 100644 +--- a/src/3rdparty/chromium/third_party/Python-Markdown/markdown/postprocessors.py ++++ b/src/3rdparty/chromium/third_party/Python-Markdown/markdown/postprocessors.py +@@ -8,8 +8,8 @@ processing. + + """ + +-from __future__ import absolute_import +-from __future__ import unicode_literals ++ ++ + from . import util + from . import odict + import re +diff --git a/src/3rdparty/chromium/third_party/Python-Markdown/markdown/preprocessors.py b/src/3rdparty/chromium/third_party/Python-Markdown/markdown/preprocessors.py +index 7fd38d331..6010eb0c0 100644 +--- a/src/3rdparty/chromium/third_party/Python-Markdown/markdown/preprocessors.py ++++ b/src/3rdparty/chromium/third_party/Python-Markdown/markdown/preprocessors.py +@@ -6,8 +6,8 @@ Preprocessors work on source text before we start doing anything too + complicated. + """ + +-from __future__ import absolute_import +-from __future__ import unicode_literals ++ ++ + from . import util + from . import odict + import re +@@ -166,7 +166,7 @@ class HtmlBlockPreprocessor(Preprocessor): + left_tag, left_index, ''.join(items[i:])) + right_listindex = \ + self._stringindex_to_listindex(data_index, items[i:]) + i +- if 'markdown' in attrs.keys(): ++ if 'markdown' in list(attrs.keys()): + items[i] = items[i][left_index:] # remove opening tag + placeholder = self.markdown.htmlStash.store_tag( + left_tag, attrs, i + 1, right_listindex + 1) +@@ -230,7 +230,7 @@ class HtmlBlockPreprocessor(Preprocessor): + + if block.rstrip().endswith(">") \ + and self._equal_tags(left_tag, right_tag): +- if self.markdown_in_raw and 'markdown' in attrs.keys(): ++ if self.markdown_in_raw and 'markdown' in list(attrs.keys()): + block = block[left_index:-len(right_tag) - 2] + new_blocks.append(self.markdown.htmlStash. + store_tag(left_tag, attrs, 0, 2)) +@@ -268,7 +268,7 @@ class HtmlBlockPreprocessor(Preprocessor): + text.insert(0, block[data_index:]) + + in_tag = False +- if self.markdown_in_raw and 'markdown' in attrs.keys(): ++ if self.markdown_in_raw and 'markdown' in list(attrs.keys()): + items[0] = items[0][left_index:] + items[-1] = items[-1][:-len(right_tag) - 2] + if items[len(items) - 1]: # not a newline/empty string +@@ -290,7 +290,7 @@ class HtmlBlockPreprocessor(Preprocessor): + items = [] + + if items: +- if self.markdown_in_raw and 'markdown' in attrs.keys(): ++ if self.markdown_in_raw and 'markdown' in list(attrs.keys()): + items[0] = items[0][left_index:] + items[-1] = items[-1][:-len(right_tag) - 2] + if items[len(items) - 1]: # not a newline/empty string +diff --git a/src/3rdparty/chromium/third_party/Python-Markdown/markdown/serializers.py b/src/3rdparty/chromium/third_party/Python-Markdown/markdown/serializers.py +index 1e8d9dd28..64d528e41 100644 +--- a/src/3rdparty/chromium/third_party/Python-Markdown/markdown/serializers.py ++++ b/src/3rdparty/chromium/third_party/Python-Markdown/markdown/serializers.py +@@ -37,8 +37,8 @@ + # -------------------------------------------------------------------- + + +-from __future__ import absolute_import +-from __future__ import unicode_literals ++ ++ + from . import util + ElementTree = util.etree.ElementTree + QName = util.etree.QName +@@ -153,7 +153,7 @@ def _serialize_html(write, elem, qnames, namespaces, format): + _serialize_html(write, e, qnames, None, format) + else: + write("<" + tag) +- items = elem.items() ++ items = list(elem.items()) + if items or namespaces: + items = sorted(items) # lexical order + for k, v in items: +@@ -169,7 +169,7 @@ def _serialize_html(write, elem, qnames, namespaces, format): + else: + write(" %s=\"%s\"" % (qnames[k], v)) + if namespaces: +- items = namespaces.items() ++ items = list(namespaces.items()) + items.sort(key=lambda x: x[1]) # sort on prefix + for v, k in items: + if k: +@@ -261,7 +261,7 @@ def _namespaces(elem, default_namespace=None): + add_qname(tag) + elif tag is not None and tag is not Comment and tag is not PI: + _raise_serialization_error(tag) +- for key, value in elem.items(): ++ for key, value in list(elem.items()): + if isinstance(key, QName): + key = key.text + if key not in qnames: +diff --git a/src/3rdparty/chromium/third_party/Python-Markdown/markdown/treeprocessors.py b/src/3rdparty/chromium/third_party/Python-Markdown/markdown/treeprocessors.py +index d06f19288..590738281 100644 +--- a/src/3rdparty/chromium/third_party/Python-Markdown/markdown/treeprocessors.py ++++ b/src/3rdparty/chromium/third_party/Python-Markdown/markdown/treeprocessors.py +@@ -1,5 +1,5 @@ +-from __future__ import unicode_literals +-from __future__ import absolute_import ++ ++ + from . import util + from . import odict + from . import inlinepatterns +diff --git a/src/3rdparty/chromium/third_party/Python-Markdown/markdown/util.py b/src/3rdparty/chromium/third_party/Python-Markdown/markdown/util.py +index d3d48f099..f2d68cb33 100644 +--- a/src/3rdparty/chromium/third_party/Python-Markdown/markdown/util.py ++++ b/src/3rdparty/chromium/third_party/Python-Markdown/markdown/util.py +@@ -1,5 +1,5 @@ + # -*- coding: utf-8 -*- +-from __future__ import unicode_literals ++ + import re + import sys + +@@ -15,9 +15,9 @@ if PY3: # pragma: no cover + text_type = str + int2str = chr + else: # pragma: no cover +- string_type = basestring # noqa +- text_type = unicode # noqa +- int2str = unichr # noqa ++ string_type = str # noqa ++ text_type = str # noqa ++ int2str = chr # noqa + + + """ +diff --git a/src/3rdparty/chromium/third_party/SPIRV-Tools/src/kokoro/windows-msvc-2015-release-bazel/build.bat b/src/3rdparty/chromium/third_party/SPIRV-Tools/src/kokoro/windows-msvc-2015-release-bazel/build.bat +index ddb4f540d..4e5d1f4bf 100644 +--- a/src/3rdparty/chromium/third_party/SPIRV-Tools/src/kokoro/windows-msvc-2015-release-bazel/build.bat ++++ b/src/3rdparty/chromium/third_party/SPIRV-Tools/src/kokoro/windows-msvc-2015-release-bazel/build.bat +@@ -37,7 +37,7 @@ call "C:\Program Files (x86)\Microsoft Visual Studio 14.0\VC\vcvarsall.bat" x64 + set BAZEL_VS=C:\Program Files (x86)\Microsoft Visual Studio 14.0 + set BAZEL_VC=C:\Program Files (x86)\Microsoft Visual Studio 14.0\VC + set BAZEL_SH=c:\tools\msys64\usr\bin\bash.exe +-set BAZEL_PYTHON=c:\tools\python2\python.exe ++set BAZEL_PYTHON=c:\tools\python3\python.exe + + :: ######################################### + :: Start building. +diff --git a/src/3rdparty/chromium/third_party/SPIRV-Tools/src/utils/generate_grammar_tables.py b/src/3rdparty/chromium/third_party/SPIRV-Tools/src/utils/generate_grammar_tables.py +index 2a6773369..830328bf4 100755 +--- a/src/3rdparty/chromium/third_party/SPIRV-Tools/src/utils/generate_grammar_tables.py ++++ b/src/3rdparty/chromium/third_party/SPIRV-Tools/src/utils/generate_grammar_tables.py +@@ -447,7 +447,7 @@ def generate_enum_operand_kind_entry(entry, extension_map): + exts = [] + params = entry.get('parameters', []) + params = [p.get('kind') for p in params] +- params = zip(params, [''] * len(params)) ++ params = list(zip(params, [''] * len(params))) + version = entry.get('version', None) + max_version = entry.get('lastVersion', None) + +@@ -492,7 +492,7 @@ def generate_enum_operand_kind(enum, synthetic_exts_list): + for ext in exts: + if ext not in extension_map[value]: + extension_map[value].append(ext) +- synthetic_exts_list.extend(extension_map.values()) ++ synthetic_exts_list.extend(list(extension_map.values())) + + name = '{}_{}Entries'.format(PYGEN_VARIABLE_PREFIX, kind) + entries = [' {}'.format(generate_enum_operand_kind_entry(e, extension_map)) +@@ -529,14 +529,14 @@ def generate_operand_kind_table(enums): + three_optional_enums = [e for e in enums if e[0] in three_optional_enums] + enums.extend(three_optional_enums) + +- enum_kinds, enum_names, enum_entries = zip(*enums) ++ enum_kinds, enum_names, enum_entries = list(zip(*enums)) + # Mark the last three as optional ones. + enum_quantifiers = [''] * (len(enums) - 3) + ['?'] * 3 + # And we don't want redefinition of them. + enum_entries = enum_entries[:-3] + enum_kinds = [convert_operand_kind(e) + for e in zip(enum_kinds, enum_quantifiers)] +- table_entries = zip(enum_kinds, enum_names, enum_names) ++ table_entries = list(zip(enum_kinds, enum_names, enum_names)) + table_entries = [' {{{}, ARRAY_SIZE({}), {}}}'.format(*e) + for e in table_entries] + +diff --git a/src/3rdparty/chromium/third_party/SPIRV-Tools/src/utils/update_build_version.py b/src/3rdparty/chromium/third_party/SPIRV-Tools/src/utils/update_build_version.py +index 321de74bd..8184191a2 100755 +--- a/src/3rdparty/chromium/third_party/SPIRV-Tools/src/utils/update_build_version.py ++++ b/src/3rdparty/chromium/third_party/SPIRV-Tools/src/utils/update_build_version.py +@@ -101,11 +101,11 @@ def describe(directory): + Runs 'git describe', or alternately 'git rev-parse HEAD', in directory. If + successful, returns the output; otherwise returns 'unknown hash, '.""" + try: +- # decode() is needed here for Python3 compatibility. In Python2, ++ # decode() is needed here for Python3 compatibility. In Python3, + # str and bytes are the same type, but not in Python3. + # Popen.communicate() returns a bytes instance, which needs to be + # decoded into text data first in Python3. And this decode() won't +- # hurt Python2. ++ # hurt Python3. + return command_output(['git', 'describe'], directory).rstrip().decode() + except: + try: +diff --git a/src/3rdparty/chromium/third_party/abseil-cpp/absl/abseil.podspec.gen.py b/src/3rdparty/chromium/third_party/abseil-cpp/absl/abseil.podspec.gen.py +index 6aefb794d..6e155e754 100755 +--- a/src/3rdparty/chromium/third_party/abseil-cpp/absl/abseil.podspec.gen.py ++++ b/src/3rdparty/chromium/third_party/abseil-cpp/absl/abseil.podspec.gen.py +@@ -199,7 +199,7 @@ def write_indented_list(f, leading, values): + + def generate(args): + """Generates a podspec file from all BUILD files under absl directory.""" +- rules = filter(relevant_rule, collect_rules("absl")) ++ rules = list(filter(relevant_rule, collect_rules("absl"))) + with open(args.output, "wt") as f: + write_podspec(f, rules, vars(args)) + +diff --git a/src/3rdparty/chromium/third_party/angle/scripts/apply_clang_format_on_all_sources.py b/src/3rdparty/chromium/third_party/angle/scripts/apply_clang_format_on_all_sources.py +index 779ef91e8..8cc06f113 100755 +--- a/src/3rdparty/chromium/third_party/angle/scripts/apply_clang_format_on_all_sources.py ++++ b/src/3rdparty/chromium/third_party/angle/scripts/apply_clang_format_on_all_sources.py +@@ -9,7 +9,7 @@ + # example usage: + # ./scripts/apply_clang_format_on_all_sources.py src + +-from __future__ import print_function ++ + + import os + import sys +diff --git a/src/3rdparty/chromium/third_party/angle/scripts/bmp_to_nv12.py b/src/3rdparty/chromium/third_party/angle/scripts/bmp_to_nv12.py +index 10491658b..d1c2f6f2c 100755 +--- a/src/3rdparty/chromium/third_party/angle/scripts/bmp_to_nv12.py ++++ b/src/3rdparty/chromium/third_party/angle/scripts/bmp_to_nv12.py +@@ -1,4 +1,4 @@ +-#!/usr/bin/python2 ++#!/usr/bin/python3 + # + # Copyright 2016 The ANGLE Project Authors. All rights reserved. + # Use of this source code is governed by a BSD-style license that can be +diff --git a/src/3rdparty/chromium/third_party/angle/scripts/bootstrap.py b/src/3rdparty/chromium/third_party/angle/scripts/bootstrap.py +index c505ffaff..5683d1fc8 100755 +--- a/src/3rdparty/chromium/third_party/angle/scripts/bootstrap.py ++++ b/src/3rdparty/chromium/third_party/angle/scripts/bootstrap.py +@@ -1,4 +1,4 @@ +-#!/usr/bin/python2 ++#!/usr/bin/python3 + + # Copyright 2015 Google Inc. All rights reserved. + # Use of this source code is governed by a BSD-style license that can be +@@ -18,11 +18,11 @@ def main(): + try: + rc = subprocess.call(gclient_cmd, shell=True) + except OSError: +- print 'could not run "%s" via shell' % gclient_cmd ++ print('could not run "%s" via shell' % gclient_cmd) + sys.exit(1) + + if rc: +- print 'failed command: "%s"' % gclient_cmd ++ print('failed command: "%s"' % gclient_cmd) + sys.exit(1) + + with open('.gclient') as gclient_file: +@@ -35,7 +35,7 @@ def main(): + with open('.gclient', 'w') as gclient_file: + gclient_file.write(content) + +- print 'created .gclient' ++ print('created .gclient') + + + if __name__ == '__main__': +diff --git a/src/3rdparty/chromium/third_party/angle/scripts/export_targets.py b/src/3rdparty/chromium/third_party/angle/scripts/export_targets.py +index bfea05db7..fe509f255 100755 +--- a/src/3rdparty/chromium/third_party/angle/scripts/export_targets.py ++++ b/src/3rdparty/chromium/third_party/angle/scripts/export_targets.py +@@ -2,8 +2,8 @@ + assert __name__ == '__main__' + + ''' +-To update ANGLE in Gecko, use Windows with git-bash, and setup depot_tools, python2, and +-python3. Because depot_tools expects `python` to be `python2` (shame!), python2 must come ++To update ANGLE in Gecko, use Windows with git-bash, and setup depot_tools, python3, and ++python3. Because depot_tools expects `python` to be `python3` (shame!), python3 must come + before python3 in your path. + + Upstream: https://chromium.googlesource.com/angle/angle +diff --git a/src/3rdparty/chromium/third_party/angle/scripts/file_exists.py b/src/3rdparty/chromium/third_party/angle/scripts/file_exists.py +index 976bf546c..a3529174d 100644 +--- a/src/3rdparty/chromium/third_party/angle/scripts/file_exists.py ++++ b/src/3rdparty/chromium/third_party/angle/scripts/file_exists.py +@@ -1,11 +1,11 @@ +-#!/usr/bin/python2 ++#!/usr/bin/python3 + # Copyright 2019 The ANGLE Project Authors. All rights reserved. + # Use of this source code is governed by a BSD-style license that can be + # found in the LICENSE file. + # + # Simple helper for use in 'gn' files to check if a file exists. + +-from __future__ import print_function ++ + + import os, shutil, sys + +diff --git a/src/3rdparty/chromium/third_party/angle/scripts/gen_gl_enum_utils.py b/src/3rdparty/chromium/third_party/angle/scripts/gen_gl_enum_utils.py +index 0d614f577..6ce05c57c 100755 +--- a/src/3rdparty/chromium/third_party/angle/scripts/gen_gl_enum_utils.py ++++ b/src/3rdparty/chromium/third_party/angle/scripts/gen_gl_enum_utils.py +@@ -118,9 +118,9 @@ exclude_gl_enum_groups = {'SpecialNumbers'} + + def dump_value_to_string_mapping(gl_enum_in_groups, exporting_enums): + exporting_groups = list() +- for group_name, inner_mapping in gl_enum_in_groups.iteritems(): ++ for group_name, inner_mapping in gl_enum_in_groups.items(): + string_value_pairs = list( +- filter(lambda x: x[0] in exporting_enums, inner_mapping.iteritems())) ++ [x for x in iter(inner_mapping.items()) if x[0] in exporting_enums]) + if not string_value_pairs: + continue + +@@ -211,7 +211,7 @@ def main(header_output_path, source_output_path): + script_name=os.path.basename(sys.argv[0]), + data_source_name="gl.xml and gl_angle_ext.xml", + year=date.today().year, +- gl_enum_groups=',\n'.join(sorted(gl_enum_in_groups.iterkeys()))) ++ gl_enum_groups=',\n'.join(sorted(gl_enum_in_groups.keys()))) + + header_output_path = registry_xml.script_relative(header_output_path) + with open(header_output_path, 'w') as f: +@@ -249,9 +249,9 @@ if __name__ == '__main__': + + if len(sys.argv) > 1: + if sys.argv[1] == 'inputs': +- print ','.join(inputs) ++ print(','.join(inputs)) + elif sys.argv[1] == 'outputs': +- print ','.join(outputs) ++ print(','.join(outputs)) + else: + sys.exit( + main( +diff --git a/src/3rdparty/chromium/third_party/angle/scripts/gen_proc_table.py b/src/3rdparty/chromium/third_party/angle/scripts/gen_proc_table.py +index 43e821de8..ef8584d00 100644 +--- a/src/3rdparty/chromium/third_party/angle/scripts/gen_proc_table.py ++++ b/src/3rdparty/chromium/third_party/angle/scripts/gen_proc_table.py +@@ -94,9 +94,9 @@ def main(): + inputs = [source for source in registry_xml.xml_inputs] + outputs = [out_file_name_gles, out_file_name_gl] + if sys.argv[1] == 'inputs': +- print ','.join(inputs) ++ print(','.join(inputs)) + elif sys.argv[1] == 'outputs': +- print ','.join(outputs) ++ print(','.join(outputs)) + else: + print('Invalid script parameters') + return 1 +@@ -116,7 +116,7 @@ def main(): + + # Also don't add GLES extension commands to libGL proc table + extension_commands = [] +- for extension_name, ext_cmd_names in sorted(glesxml.ext_data.iteritems()): ++ for extension_name, ext_cmd_names in sorted(glesxml.ext_data.items()): + extension_commands.extend(glesxml.ext_data[extension_name]) + for name in extension_commands: + name_no_suffix = name +@@ -155,7 +155,7 @@ def main(): + all_functions[function] = function + + proc_data = [(' {"%s", P(%s)}' % (func, angle_func)) +- for func, angle_func in sorted(all_functions.iteritems())] ++ for func, angle_func in sorted(all_functions.items())] + + with open(out_file_name_gles, 'w') as out_file: + output_cpp = template_cpp.format( +@@ -204,7 +204,7 @@ def main(): + all_functions[function] = function + + proc_data = [(' {"%s", P(%s)}' % (func, angle_func)) +- for func, angle_func in sorted(all_functions.iteritems())] ++ for func, angle_func in sorted(all_functions.items())] + + with open(out_file_name_gl, 'w') as out_file: + output_cpp = template_cpp.format( +diff --git a/src/3rdparty/chromium/third_party/angle/scripts/generate_android_bp.py b/src/3rdparty/chromium/third_party/angle/scripts/generate_android_bp.py +index f7853c175..a00a5cb1d 100644 +--- a/src/3rdparty/chromium/third_party/angle/scripts/generate_android_bp.py ++++ b/src/3rdparty/chromium/third_party/angle/scripts/generate_android_bp.py +@@ -42,7 +42,7 @@ def has_child_values(value): + if isinstance(value, list): + return len(value) > 0 + if isinstance(value, dict): +- for (item, item_value) in value.items(): ++ for (item, item_value) in list(value.items()): + if has_child_values(item_value): + return True + return False +@@ -68,7 +68,7 @@ def write_blueprint_key_value(output, name, value, indent=1): + if not value: + return + output.append(tabs(indent) + '%s: {' % name) +- for (item, item_value) in value.items(): ++ for (item, item_value) in list(value.items()): + write_blueprint_key_value(output, item, item_value, indent + 1) + output.append(tabs(indent) + '},') + return +@@ -80,7 +80,7 @@ def write_blueprint_key_value(output, name, value, indent=1): + + def write_blueprint(output, target_type, values): + output.append('%s {' % target_type) +- for (key, value) in values.items(): ++ for (key, value) in list(values.items()): + write_blueprint_key_value(output, key, value) + output.append('}') + +@@ -288,20 +288,20 @@ def merge_bps(bps_for_abis): + if key == 'defaults': + # arch-specific defaults are not supported + break +- value_in_all_abis = value_in_all_abis and (key in bps_for_abis[abi2].keys( +- )) and (value in bps_for_abis[abi2][key]) ++ value_in_all_abis = value_in_all_abis and (key in list(bps_for_abis[abi2].keys( ++ ))) and (value in bps_for_abis[abi2][key]) + if value_in_all_abis: +- if key in common_bp.keys(): ++ if key in list(common_bp.keys()): + common_bp[key].append(value) + else: + common_bp[key] = [value] + else: +- if 'arch' not in common_bp.keys(): ++ if 'arch' not in list(common_bp.keys()): + # Make sure there is an 'arch' entry to hold ABI-specific values + common_bp['arch'] = {} + for abi3 in abi_targets: + common_bp['arch'][abi3] = {} +- if key in common_bp['arch'][abi].keys(): ++ if key in list(common_bp['arch'][abi].keys()): + common_bp['arch'][abi][key].append(value) + else: + common_bp['arch'][abi][key] = [value] +@@ -316,7 +316,7 @@ def library_target_to_blueprint(target, build_info): + bps_for_abis = {} + blueprint_type = "" + for abi in abi_targets: +- if target not in build_info[abi].keys(): ++ if target not in list(build_info[abi].keys()): + bps_for_abis[abi] = {} + continue + +@@ -423,7 +423,7 @@ def action_target_to_blueprint(target, build_info): + bp_outputs = [] + for gn_output in target_info['outputs']: + output = os.path.basename(gn_output) +- if output in outputs_remap.keys(): ++ if output in list(outputs_remap.keys()): + output = outputs_remap[output] + bp_outputs.append(output) + +diff --git a/src/3rdparty/chromium/third_party/angle/scripts/generate_entry_points.py b/src/3rdparty/chromium/third_party/angle/scripts/generate_entry_points.py +index 5bd82ab35..0f4db41c7 100755 +--- a/src/3rdparty/chromium/third_party/angle/scripts/generate_entry_points.py ++++ b/src/3rdparty/chromium/third_party/angle/scripts/generate_entry_points.py +@@ -1,4 +1,4 @@ +-#!/usr/bin/python2 ++#!/usr/bin/python3 + # + # Copyright 2017 The ANGLE Project Authors. All rights reserved. + # Use of this source code is governed by a BSD-style license that can be +@@ -738,7 +738,7 @@ def make_param(param_type, param_name): + + def just_the_type_packed(param, entry): + name = just_the_name(param) +- if entry.has_key(name): ++ if name in entry: + return entry[name] + else: + return just_the_type(param) +@@ -1239,7 +1239,7 @@ def write_export_files(entry_points, includes, source, lib_name, lib_description + + + def write_context_api_decls(template, decls, api): +- for ver in decls['core'].keys(): ++ for ver in list(decls['core'].keys()): + interface_lines = [] + + for i in decls['core'][ver]: +@@ -1263,9 +1263,9 @@ def write_context_api_decls(template, decls, api): + out.write(content) + out.close() + +- if 'exts' in decls.keys(): ++ if 'exts' in list(decls.keys()): + interface_lines = [] +- for annotation in decls['exts'].keys(): ++ for annotation in list(decls['exts'].keys()): + interface_lines.append("\\\n /* " + annotation + " */ \\\n\\") + + for extname in sorted(decls['exts'][annotation].keys()): +@@ -1437,7 +1437,7 @@ def format_write_param_type_to_stream_case(param_type): + + + def get_resource_id_types(all_param_types): +- return [t[:-2] for t in filter(lambda t: t.endswith("ID"), all_param_types)] ++ return [t[:-2] for t in [t for t in all_param_types if t.endswith("ID")]] + + + def format_resource_id_types(all_param_types): +@@ -1499,9 +1499,7 @@ def format_param_type_to_resource_id_type_case(param_type): + + + def format_param_type_resource_id_cases(all_param_types): +- id_types = filter( +- lambda t: t.endswith("ID") or t.endswith("IDConstPointer") or t.endswith("IDPointer"), +- all_param_types) ++ id_types = [t for t in all_param_types if t.endswith("ID") or t.endswith("IDConstPointer") or t.endswith("IDPointer")] + return "\n".join([format_param_type_to_resource_id_type_case(t) for t in id_types]) + + +@@ -1672,7 +1670,7 @@ def get_egl_exports(): + + egl.AddExtensionCommands(registry_xml.supported_egl_extensions, ['egl']) + +- for extension_name, ext_cmd_names in sorted(egl.ext_data.iteritems()): ++ for extension_name, ext_cmd_names in sorted(egl.ext_data.items()): + + if len(ext_cmd_names) == 0: + continue +@@ -1815,9 +1813,9 @@ def main(): + ] + + if sys.argv[1] == 'inputs': +- print ','.join(inputs) ++ print(','.join(inputs)) + elif sys.argv[1] == 'outputs': +- print ','.join(outputs) ++ print(','.join(outputs)) + else: + print('Invalid script parameters') + return 1 +@@ -1927,7 +1925,7 @@ def main(): + + xml.AddExtensionCommands(registry_xml.supported_extensions, ['gles2', 'gles1']) + +- for extension_name, ext_cmd_names in sorted(xml.ext_data.iteritems()): ++ for extension_name, ext_cmd_names in sorted(xml.ext_data.items()): + extension_commands.extend(xml.ext_data[extension_name]) + + # Detect and filter duplicate extensions. +diff --git a/src/3rdparty/chromium/third_party/angle/scripts/generate_loader.py b/src/3rdparty/chromium/third_party/angle/scripts/generate_loader.py +index 216b5cf3d..1bb0bfe01 100755 +--- a/src/3rdparty/chromium/third_party/angle/scripts/generate_loader.py ++++ b/src/3rdparty/chromium/third_party/angle/scripts/generate_loader.py +@@ -1,4 +1,4 @@ +-#!/usr/bin/python2 ++#!/usr/bin/python3 + # + # Copyright 2018 The ANGLE Project Authors. All rights reserved. + # Use of this source code is governed by a BSD-style license that can be +@@ -290,9 +290,9 @@ def main(): + ] + + if sys.argv[1] == 'inputs': +- print ','.join(inputs) ++ print(','.join(inputs)) + elif sys.argv[1] == 'outputs': +- print ','.join(outputs) ++ print(','.join(outputs)) + else: + print('Invalid script parameters') + return 1 +diff --git a/src/3rdparty/chromium/third_party/angle/scripts/generate_new_renderer.py b/src/3rdparty/chromium/third_party/angle/scripts/generate_new_renderer.py +index 9b26c5ee6..13e772dd0 100755 +--- a/src/3rdparty/chromium/third_party/angle/scripts/generate_new_renderer.py ++++ b/src/3rdparty/chromium/third_party/angle/scripts/generate_new_renderer.py +@@ -1,4 +1,4 @@ +-#!/usr/bin/python2 ++#!/usr/bin/python3 + # + # Copyright 2015 The ANGLE Project Authors. All rights reserved. + # Use of this source code is governed by a BSD-style license that can be +diff --git a/src/3rdparty/chromium/third_party/angle/scripts/generate_stats.py b/src/3rdparty/chromium/third_party/angle/scripts/generate_stats.py +index 8924b7e05..528931a8a 100755 +--- a/src/3rdparty/chromium/third_party/angle/scripts/generate_stats.py ++++ b/src/3rdparty/chromium/third_party/angle/scripts/generate_stats.py +@@ -110,7 +110,7 @@ import pickle + import re + import subprocess + import sys +-import urllib ++import urllib.request, urllib.parse, urllib.error + from google.auth.transport.requests import Request + from googleapiclient.discovery import build + from google_auth_oauthlib.flow import InstalledAppFlow +@@ -179,7 +179,7 @@ def get_latest_success_build_info(bot_name): + if 'build_name' not in info: + info['build_name'] = line.strip().split("'")[1] + # Remove the bot name and prepend the build link +- info['build_link'] = BUILD_LINK_PREFIX + urllib.quote( ++ info['build_link'] = BUILD_LINK_PREFIX + urllib.parse.quote( + info['build_name'].split(BOT_NAME_PREFIX)[1]) + if 'Created' in line: + # Example output of line with 'Created': +@@ -194,13 +194,13 @@ def get_latest_success_build_info(bot_name): + # ... + # "parent_got_angle_revision": "8cbd321cafa92ffbf0495e6d0aeb9e1a97940fee", + # ... +- info['angle_revision'] = filter(str.isalnum, line.split(':')[1]) ++ info['angle_revision'] = list(filter(str.isalnum, line.split(':')[1])) + if '"revision"' in line: + # Example output of line with chromium revision: + # ... + # "revision": "3b68405a27f1f9590f83ae07757589dba862f141", + # ... +- info['revision'] = filter(str.isalnum, line.split(':')[1]) ++ info['revision'] = list(filter(str.isalnum, line.split(':')[1])) + if 'build_name' not in info: + raise ValueError("Could not find build_name from bot '" + bot_name + "'") + return info +@@ -490,7 +490,7 @@ def get_headers(service, spreadsheet_id, sheet_names): + header_ranges = [sheet_name + '!A1:Z' for sheet_name in sheet_names] + LOGGER.debug("Called [spreadsheets.values().batchGet(spreadsheetId='" + spreadsheet_id + + ', ranges=' + str(header_ranges) + "')]") +- request = service.values().batchGet(spreadsheetId=spreadsheet_id, ranges=header_ranges) ++ request = list(service.values()).batchGet(spreadsheetId=spreadsheet_id, ranges=header_ranges) + response = request.execute() + headers = {} + for k, sheet_name in enumerate(sheet_names): +@@ -511,7 +511,7 @@ def batch_update_values(service, spreadsheet_id, data): + } + LOGGER.debug("Called [spreadsheets.values().batchUpdate(spreadsheetId='" + spreadsheet_id + + "', body=" + str(batch_update_values_request_body) + ')]') +- request = service.values().batchUpdate( ++ request = list(service.values()).batchUpdate( + spreadsheetId=spreadsheet_id, body=batch_update_values_request_body) + request.execute() + +@@ -576,7 +576,7 @@ def update_headers(service, spreadsheet_id, headers, info): + headers_stale = True + headers[sheet_name].append(req) + # Headers also must contain all the keys seen in this step +- for key in info[bot_name][step_name].keys(): ++ for key in list(info[bot_name][step_name].keys()): + if key not in headers[sheet_name]: + headers_stale = True + headers[sheet_name].append(key) +@@ -607,7 +607,7 @@ def append_values(service, spreadsheet_id, sheet_name, values): + "', body=" + str(append_values_request_body) + ", range='" + header_range + + "', insertDataOption='" + insert_data_option + "', valueInputOption='" + + value_input_option + "')]") +- request = service.values().append( ++ request = list(service.values()).append( + spreadsheetId=spreadsheet_id, + body=append_values_request_body, + range=header_range, +diff --git a/src/3rdparty/chromium/third_party/angle/scripts/msvs_projects.py b/src/3rdparty/chromium/third_party/angle/scripts/msvs_projects.py +index f24a95216..4248fc79b 100755 +--- a/src/3rdparty/chromium/third_party/angle/scripts/msvs_projects.py ++++ b/src/3rdparty/chromium/third_party/angle/scripts/msvs_projects.py +@@ -1,4 +1,4 @@ +-#!/usr/bin/python2 ++#!/usr/bin/python3 + # + # Copyright 2017 The ANGLE Project Authors. All rights reserved. + # Use of this source code is governed by a BSD-style license that can be +diff --git a/src/3rdparty/chromium/third_party/angle/scripts/perf_test_runner.py b/src/3rdparty/chromium/third_party/angle/scripts/perf_test_runner.py +index 989ffcfe5..4737db77e 100755 +--- a/src/3rdparty/chromium/third_party/angle/scripts/perf_test_runner.py ++++ b/src/3rdparty/chromium/third_party/angle/scripts/perf_test_runner.py +@@ -1,4 +1,4 @@ +-#!/usr/bin/python2 ++#!/usr/bin/python3 + # + # Copyright 2015 The ANGLE Project Authors. All rights reserved. + # Use of this source code is governed by a BSD-style license that can be +diff --git a/src/3rdparty/chromium/third_party/angle/scripts/registry_xml.py b/src/3rdparty/chromium/third_party/angle/scripts/registry_xml.py +index e596e8ae7..0ba279eaa 100644 +--- a/src/3rdparty/chromium/third_party/angle/scripts/registry_xml.py ++++ b/src/3rdparty/chromium/third_party/angle/scripts/registry_xml.py +@@ -1,4 +1,4 @@ +-#!/usr/bin/python2 ++#!/usr/bin/python3 + # + # Copyright 2018 The ANGLE Project Authors. All rights reserved. + # Use of this source code is governed by a BSD-style license that can be +@@ -216,7 +216,7 @@ class GLCommandNames: + def get_all_commands(self): + cmd_names = [] + # Combine all the version lists into a single list +- for version, version_cmd_names in sorted(self.command_names.iteritems()): ++ for version, version_cmd_names in sorted(self.command_names.items()): + cmd_names += version_cmd_names + + return cmd_names +@@ -314,7 +314,7 @@ class RegistryXML: + + self.ext_data[extension_name] = sorted(ext_cmd_names) + +- for extension_name, ext_cmd_names in sorted(self.ext_data.iteritems()): ++ for extension_name, ext_cmd_names in sorted(self.ext_data.items()): + + # Detect and filter duplicate extensions. + dupes = [] +diff --git a/src/3rdparty/chromium/third_party/angle/scripts/remove_files.py b/src/3rdparty/chromium/third_party/angle/scripts/remove_files.py +index f52a1a816..6e7883b9e 100755 +--- a/src/3rdparty/chromium/third_party/angle/scripts/remove_files.py ++++ b/src/3rdparty/chromium/third_party/angle/scripts/remove_files.py +@@ -1,4 +1,4 @@ +-#!/usr/bin/python2 ++#!/usr/bin/python3 + # + # Copyright 2019 The ANGLE Project Authors. All rights reserved. + # Use of this source code is governed by a BSD-style license that can be +diff --git a/src/3rdparty/chromium/third_party/angle/scripts/roll_chromium_deps.py b/src/3rdparty/chromium/third_party/angle/scripts/roll_chromium_deps.py +index d377894c5..8c28000ab 100755 +--- a/src/3rdparty/chromium/third_party/angle/scripts/roll_chromium_deps.py ++++ b/src/3rdparty/chromium/third_party/angle/scripts/roll_chromium_deps.py +@@ -21,7 +21,7 @@ import platform + import re + import subprocess + import sys +-import urllib2 ++import urllib.request, urllib.error, urllib.parse + + + def FindSrcDirPath(): +@@ -147,7 +147,7 @@ def _RunCommand(command, working_dir=None, ignore_exit_code=False, extra_env=Non + logging.debug('CMD: %s CWD: %s', ' '.join(command), working_dir) + env = os.environ.copy() + if extra_env: +- assert all(isinstance(value, str) for value in extra_env.values()) ++ assert all(isinstance(value, str) for value in list(extra_env.values())) + logging.debug('extra env: %s', extra_env) + env.update(extra_env) + p = subprocess.Popen( +@@ -209,7 +209,7 @@ def ReadRemoteCrCommit(revision): + + def ReadUrlContent(url): + """Connect to a remote host and read the contents. Returns a list of lines.""" +- conn = urllib2.urlopen(url) ++ conn = urllib.request.urlopen(url) + try: + return conn.readlines() + except IOError as e: +@@ -232,7 +232,7 @@ def GetMatchingDepsEntries(depsentry_dict, dir_path): + A list of DepsEntry objects. + """ + result = [] +- for path, depsentry in depsentry_dict.iteritems(): ++ for path, depsentry in depsentry_dict.items(): + if path == dir_path: + result.append(depsentry) + else: +@@ -247,7 +247,7 @@ def BuildDepsentryDict(deps_dict): + result = {} + + def AddDepsEntries(deps_subdict): +- for path, dep in deps_subdict.iteritems(): ++ for path, dep in deps_subdict.items(): + if path in result: + continue + if not isinstance(dep, dict): +@@ -311,7 +311,7 @@ def CalculateChangedDeps(angle_deps, new_cr_deps): + result = [] + angle_entries = BuildDepsentryDict(angle_deps) + new_cr_entries = BuildDepsentryDict(new_cr_deps) +- for path, angle_deps_entry in angle_entries.iteritems(): ++ for path, angle_deps_entry in angle_entries.items(): + if path not in ANGLE_CHROMIUM_DEPS: + continue + +diff --git a/src/3rdparty/chromium/third_party/angle/scripts/run_code_generation.py b/src/3rdparty/chromium/third_party/angle/scripts/run_code_generation.py +index 9477a9d39..a4ea98f8a 100755 +--- a/src/3rdparty/chromium/third_party/angle/scripts/run_code_generation.py ++++ b/src/3rdparty/chromium/third_party/angle/scripts/run_code_generation.py +@@ -1,4 +1,4 @@ +-#!/usr/bin/python2 ++#!/usr/bin/python3 + # + # Copyright 2017 The ANGLE Project Authors. All rights reserved. + # Use of this source code is governed by a BSD-style license that can be +@@ -157,12 +157,12 @@ def any_hash_dirty(name, filenames, new_hashes, old_hashes): + + def any_old_hash_missing(all_new_hashes, all_old_hashes): + result = False +- for file, old_hashes in all_old_hashes.iteritems(): ++ for file, old_hashes in all_old_hashes.items(): + if file not in all_new_hashes: + print('"%s" does not exist. Code gen dirty.' % file) + result = True + else: +- for name, _ in old_hashes.iteritems(): ++ for name, _ in old_hashes.items(): + if name not in all_new_hashes[file]: + print('Hash for %s is missing from "%s". Code gen is dirty.' % (name, file)) + result = True +@@ -197,7 +197,7 @@ def main(): + if len(sys.argv) > 1 and sys.argv[1] == '--verify-no-dirty': + verify_only = True + +- for name, script in sorted(generators.iteritems()): ++ for name, script in sorted(generators.items()): + info = auto_script(script) + fname = get_hash_file_name(name) + filenames = info['inputs'] + info['outputs'] + [script] +@@ -236,14 +236,14 @@ def main(): + sys.exit(1) + + # Update the output hashes again since they can be formatted. +- for name, script in sorted(generators.iteritems()): ++ for name, script in sorted(generators.items()): + info = auto_script(script) + fname = get_hash_file_name(name) + update_output_hashes(name, info['outputs'], all_new_hashes[fname]) + + os.chdir(script_dir) + +- for fname, new_hashes in all_new_hashes.iteritems(): ++ for fname, new_hashes in all_new_hashes.items(): + hash_fname = os.path.join(hash_dir, fname) + json.dump( + new_hashes, +diff --git a/src/3rdparty/chromium/third_party/angle/scripts/trigger.py b/src/3rdparty/chromium/third_party/angle/scripts/trigger.py +index e8b0e5461..7c1fdaf56 100755 +--- a/src/3rdparty/chromium/third_party/angle/scripts/trigger.py ++++ b/src/3rdparty/chromium/third_party/angle/scripts/trigger.py +@@ -1,4 +1,4 @@ +-#!/usr/bin/python2 ++#!/usr/bin/python3 + # + # Copyright 2019 The ANGLE Project Authors. All rights reserved. + # Use of this source code is governed by a BSD-style license that can be +diff --git a/src/3rdparty/chromium/third_party/angle/scripts/update_canary_angle.py b/src/3rdparty/chromium/third_party/angle/scripts/update_canary_angle.py +index 5df46cc5f..28110631d 100755 +--- a/src/3rdparty/chromium/third_party/angle/scripts/update_canary_angle.py ++++ b/src/3rdparty/chromium/third_party/angle/scripts/update_canary_angle.py +@@ -1,4 +1,4 @@ +-#!/usr/bin/python2 ++#!/usr/bin/python3 + # + # Copyright 2016 The ANGLE Project Authors. All rights reserved. + # Use of this source code is governed by a BSD-style license that can be +diff --git a/src/3rdparty/chromium/third_party/angle/src/common/Float16ToFloat32.py b/src/3rdparty/chromium/third_party/angle/src/common/Float16ToFloat32.py +index 592da65e7..a79e6154a 100644 +--- a/src/3rdparty/chromium/third_party/angle/src/common/Float16ToFloat32.py ++++ b/src/3rdparty/chromium/third_party/angle/src/common/Float16ToFloat32.py +@@ -48,7 +48,7 @@ def convertOffset(i): + return 1024 + + +-print """// ++print("""// + // Copyright 2012 The ANGLE Project Authors. All rights reserved. + // Use of this source code is governed by a BSD-style license that can be + // found in the LICENSE file. +@@ -58,27 +58,27 @@ print """// + + namespace gl + { +-""" ++""") + +-print "const static unsigned g_mantissa[2048] = {" ++print("const static unsigned g_mantissa[2048] = {") + for i in range(0, 2048): +- print " %#010x," % convertMantissa(i) +-print "};\n" ++ print(" %#010x," % convertMantissa(i)) ++print("};\n") + +-print "const static unsigned g_exponent[64] = {" ++print("const static unsigned g_exponent[64] = {") + for i in range(0, 64): +- print " %#010x," % convertExponent(i) +-print "};\n" ++ print(" %#010x," % convertExponent(i)) ++print("};\n") + +-print "const static unsigned g_offset[64] = {" ++print("const static unsigned g_offset[64] = {") + for i in range(0, 64): +- print " %#010x," % convertOffset(i) +-print "};\n" ++ print(" %#010x," % convertOffset(i)) ++print("};\n") + +-print """float float16ToFloat32(unsigned short h) ++print("""float float16ToFloat32(unsigned short h) + { + unsigned i32 = g_mantissa[g_offset[h >> 10] + (h & 0x3ff)] + g_exponent[h >> 10]; + return bitCast(i32); + } + } +-""" ++""") +diff --git a/src/3rdparty/chromium/third_party/angle/src/common/gen_packed_gl_enums.py b/src/3rdparty/chromium/third_party/angle/src/common/gen_packed_gl_enums.py +index b72e9f62d..ffe96d05f 100644 +--- a/src/3rdparty/chromium/third_party/angle/src/common/gen_packed_gl_enums.py ++++ b/src/3rdparty/chromium/third_party/angle/src/common/gen_packed_gl_enums.py +@@ -34,12 +34,12 @@ def load_enums(path): + enums_dict = json.loads(map_file.read(), object_pairs_hook=OrderedDict) + + enums = [] +- for (enum_name, value_list) in enums_dict.iteritems(): ++ for (enum_name, value_list) in enums_dict.items(): + + values = [] + i = 0 + +- for (value_name, value_gl_name) in value_list.iteritems(): ++ for (value_name, value_gl_name) in value_list.items(): + values.append(EnumValue(value_name, value_gl_name, i)) + i += 1 + +@@ -245,9 +245,9 @@ def main(): + ] + + if sys.argv[1] == 'inputs': +- print ','.join(inputs) ++ print(','.join(inputs)) + elif sys.argv[1] == 'outputs': +- print ','.join(outputs) ++ print(','.join(outputs)) + else: + print('Invalid script parameters') + return 1 +diff --git a/src/3rdparty/chromium/third_party/angle/src/common/gen_uniform_type_table.py b/src/3rdparty/chromium/third_party/angle/src/common/gen_uniform_type_table.py +index faa87e698..8d6fad128 100644 +--- a/src/3rdparty/chromium/third_party/angle/src/common/gen_uniform_type_table.py ++++ b/src/3rdparty/chromium/third_party/angle/src/common/gen_uniform_type_table.py +@@ -124,7 +124,7 @@ def get_component_type(uniform_type): + + + def get_texture_type(uniform_type): +- for sampler_type, tex_type in texture_types.items(): ++ for sampler_type, tex_type in list(texture_types.items()): + if uniform_type.endswith(sampler_type): + return "GL_TEXTURE_" + tex_type + return "GL_NONE" +@@ -271,9 +271,9 @@ def main(): + outputs = ['uniform_type_info_autogen.cpp'] + + if sys.argv[1] == 'inputs': +- print ','.join(inputs) ++ print(','.join(inputs)) + elif sys.argv[1] == 'outputs': +- print ','.join(outputs) ++ print(','.join(outputs)) + else: + print('Invalid script parameters') + return 1 +diff --git a/src/3rdparty/chromium/third_party/angle/src/compiler/generate_parser_tools.py b/src/3rdparty/chromium/third_party/angle/src/compiler/generate_parser_tools.py +index 0990e26d6..0a0257f3e 100644 +--- a/src/3rdparty/chromium/third_party/angle/src/compiler/generate_parser_tools.py ++++ b/src/3rdparty/chromium/third_party/angle/src/compiler/generate_parser_tools.py +@@ -148,12 +148,12 @@ def generate_parser(basename, generate_header): + # Call flex and bison to generate the lexer and parser. + flex_result = run_flex(basename) + if flex_result != 0: +- print 'Failed to run flex. Error ' + str(flex_result) ++ print('Failed to run flex. Error ' + str(flex_result)) + return 1 + + bison_result = run_bison(basename, generate_header) + if bison_result != 0: +- print 'Failed to run bison. Error ' + str(bison_result) ++ print('Failed to run bison. Error ' + str(bison_result)) + return 2 + + return 0 +diff --git a/src/3rdparty/chromium/third_party/angle/src/compiler/translator/gen_builtin_symbols.py b/src/3rdparty/chromium/third_party/angle/src/compiler/translator/gen_builtin_symbols.py +index a128129a4..2463ba3e2 100644 +--- a/src/3rdparty/chromium/third_party/angle/src/compiler/translator/gen_builtin_symbols.py ++++ b/src/3rdparty/chromium/third_party/angle/src/compiler/translator/gen_builtin_symbols.py +@@ -802,7 +802,7 @@ class UnmangledGroupedList: + class TType: + + def __init__(self, glsl_header_type): +- if isinstance(glsl_header_type, basestring): ++ if isinstance(glsl_header_type, str): + self.data = self.parse_type(glsl_header_type) + else: + self.data = glsl_header_type +@@ -1108,16 +1108,16 @@ def get_function_names(group, mangled_names, unmangled_names): + parameters = get_parameters(function_props) + mangled_names.append(get_function_mangled_name(function_name, parameters)) + if 'subgroups' in group: +- for subgroup_name, subgroup in group['subgroups'].iteritems(): ++ for subgroup_name, subgroup in group['subgroups'].items(): + get_function_names(subgroup, mangled_names, unmangled_names) + + + def get_variable_names(group, mangled_names): + if 'variables' in group: +- for variable_name, props in group['variables'].iteritems(): ++ for variable_name, props in group['variables'].items(): + mangled_names.append(variable_name) + if 'subgroups' in group: +- for subgroup_name, subgroup in group['subgroups'].iteritems(): ++ for subgroup_name, subgroup in group['subgroups'].items(): + get_variable_names(subgroup, mangled_names) + + +@@ -1289,9 +1289,9 @@ def gen_function_variants(function_props): + return function_variants + + # If we have a normal gentype then we're generating variants for different sizes of vectors. +- sizes = range(1, 5) ++ sizes = list(range(1, 5)) + if 'vec' in gen_type: +- sizes = range(2, 5) ++ sizes = list(range(2, 5)) + for size in sizes: + variant_props = function_props.copy() + variant_parameters = [] +@@ -1457,7 +1457,7 @@ def process_function_group( + unmangled_script_generated_hash_tests, mangled_builtins) + + if 'subgroups' in group: +- for subgroup_name, subgroup in group['subgroups'].iteritems(): ++ for subgroup_name, subgroup in group['subgroups'].items(): + process_function_group( + group_name + subgroup_name, subgroup, parameter_declarations, name_declarations, + unmangled_function_if_statements, defined_function_variants, +@@ -1481,7 +1481,7 @@ def prune_parameters_arrays(parameter_declarations, function_declarations): + parameter_variable_name_replacements = {} + used_param_variable_names = set() + for param_variable_name, param_declaration in sorted( +- parameter_declarations.iteritems(), key=lambda item: -len(item[0])): ++ iter(parameter_declarations.items()), key=lambda item: -len(item[0])): + replaced = False + for used in used_param_variable_names: + if used.startswith(param_variable_name): +@@ -1491,13 +1491,13 @@ def prune_parameters_arrays(parameter_declarations, function_declarations): + if not replaced: + used_param_variable_names.add(param_variable_name) + +- for i in xrange(len(function_declarations)): +- for replaced, replacement in parameter_variable_name_replacements.iteritems(): ++ for i in range(len(function_declarations)): ++ for replaced, replacement in parameter_variable_name_replacements.items(): + function_declarations[i] = function_declarations[i].replace( + 'BuiltInParameters::' + replaced + ',', 'BuiltInParameters::' + replacement + ',') + + return [ +- value for key, value in parameter_declarations.iteritems() ++ value for key, value in parameter_declarations.items() + if key in used_param_variable_names + ] + +@@ -1510,7 +1510,7 @@ def process_single_variable_group(shader_type, group_name, group, builtin_id_dec + global id_counter + if 'variables' not in group: + return +- for variable_name, props in group['variables'].iteritems(): ++ for variable_name, props in group['variables'].items(): + essl_level = props['essl_level'] if 'essl_level' in props else None + glsl_level = props['glsl_level'] if 'glsl_level' in props else None + template_args = { +@@ -1559,7 +1559,7 @@ def process_single_variable_group(shader_type, group_name, group, builtin_id_dec + template_args['fields'] = 'fields_{name_with_suffix}'.format(**template_args) + init_member_variables.append( + ' TFieldList *{fields} = new TFieldList();'.format(**template_args)) +- for field_name, field_type in props['fields'].iteritems(): ++ for field_name, field_type in props['fields'].items(): + template_args['field_name'] = field_name + template_args['field_type'] = TType(field_type).get_dynamic_type_string() + template_name_declaration = 'constexpr const ImmutableString {field_name}("{field_name}");' +@@ -1671,7 +1671,7 @@ def process_variable_group(shader_type, group_name, group, builtin_id_declaratio + get_variable_definitions, script_generated_hash_tests) + + if 'subgroups' in group: +- for subgroup_name, subgroup in group['subgroups'].iteritems(): ++ for subgroup_name, subgroup in group['subgroups'].items(): + process_variable_group( + shader_type, subgroup_name, subgroup, builtin_id_declarations, + builtin_id_definitions, name_declarations, init_member_variables, +@@ -1746,15 +1746,15 @@ def generate_files(essl_only, args, functions_txt_filename, variables_json_filen + # This script uses a perfect hash function to avoid dealing with collisions + mangled_names = [] + unmangled_names = [] +- for group_name, group in parsed_functions.iteritems(): ++ for group_name, group in parsed_functions.items(): + get_function_names(group, mangled_names, unmangled_names) +- for group_name, group in parsed_variables.iteritems(): ++ for group_name, group in parsed_variables.items(): + get_variable_names(group, mangled_names) + + # Hashing mangled names + mangled_names = list(dict.fromkeys(mangled_names)) + num_mangled_names = len(mangled_names) +- mangled_names_dict = dict(zip(mangled_names, range(0, len(mangled_names)))) ++ mangled_names_dict = dict(list(zip(mangled_names, list(range(0, len(mangled_names)))))) + # Generate the perfect hash function + f1, f2, mangled_G = generate_hash(mangled_names_dict, Hash2) + mangled_hashfn = HashFunction(f1, f2, mangled_G) +@@ -1766,7 +1766,7 @@ def generate_files(essl_only, args, functions_txt_filename, variables_json_filen + # Hashing unmangled names + unmangled_names = list(dict.fromkeys(unmangled_names)) + num_unmangled_names = len(unmangled_names) +- unmangled_names_dict = dict(zip(unmangled_names, range(0, len(unmangled_names)))) ++ unmangled_names_dict = dict(list(zip(unmangled_names, list(range(0, len(unmangled_names)))))) + # Generate the perfect hash function + f1, f2, unmangled_G = generate_hash(unmangled_names_dict, Hash2) + unmangled_hashfn = HashFunction(f1, f2, unmangled_G) +@@ -1775,7 +1775,7 @@ def generate_files(essl_only, args, functions_txt_filename, variables_json_filen + # Array for querying unmangled builtins + unmangled_function_if_statements = UnmangledGroupedList(unmangled_hashfn, num_unmangled_names) + +- for group_name, group in parsed_functions.iteritems(): ++ for group_name, group in parsed_functions.items(): + process_function_group( + group_name, group, parameter_declarations, name_declarations, + unmangled_function_if_statements, defined_function_variants, builtin_id_declarations, +@@ -1785,7 +1785,7 @@ def generate_files(essl_only, args, functions_txt_filename, variables_json_filen + + parameter_declarations = prune_parameters_arrays(parameter_declarations, function_declarations) + +- for group_name, group in parsed_variables.iteritems(): ++ for group_name, group in parsed_variables.items(): + process_variable_group('NONE', group_name, group, builtin_id_declarations, + builtin_id_definitions, name_declarations, init_member_variables, + get_variable_declarations, mangled_builtins, +@@ -1846,9 +1846,9 @@ def generate_files(essl_only, args, functions_txt_filename, variables_json_filen + 'num_mangled_names': + num_mangled_names, + 'script_generated_hash_tests': +- '\n'.join(script_generated_hash_tests.iterkeys()), ++ '\n'.join(iter(script_generated_hash_tests.keys())), + 'unmangled_script_generated_hash_tests': +- '\n'.join(unmangled_script_generated_hash_tests.iterkeys()), ++ '\n'.join(iter(unmangled_script_generated_hash_tests.keys())), + 'mangled_S1': + str(mangled_S1).replace('[', ' ').replace(']', ' '), + 'mangled_S2': +@@ -1939,9 +1939,9 @@ def main(): + ] + + if args.auto_script_command == 'inputs': +- print ','.join(inputs) ++ print(','.join(inputs)) + elif args.auto_script_command == 'outputs': +- print ','.join(outputs) ++ print(','.join(outputs)) + else: + print('Invalid script parameters') + return 1 +diff --git a/src/3rdparty/chromium/third_party/angle/src/compiler/translator/gen_emulated_builtin_function_tables.py b/src/3rdparty/chromium/third_party/angle/src/compiler/translator/gen_emulated_builtin_function_tables.py +index 958ba2c0e..7d2fbc5d8 100644 +--- a/src/3rdparty/chromium/third_party/angle/src/compiler/translator/gen_emulated_builtin_function_tables.py ++++ b/src/3rdparty/chromium/third_party/angle/src/compiler/translator/gen_emulated_builtin_function_tables.py +@@ -123,9 +123,9 @@ def main(): + outputs = [hlsl_fname] + + if sys.argv[1] == 'inputs': +- print ','.join(inputs) ++ print(','.join(inputs)) + elif sys.argv[1] == 'outputs': +- print ','.join(outputs) ++ print(','.join(outputs)) + else: + print('Invalid script parameters') + return 1 +diff --git a/src/3rdparty/chromium/third_party/angle/src/libANGLE/gen_copy_conversion_table.py b/src/3rdparty/chromium/third_party/angle/src/libANGLE/gen_copy_conversion_table.py +index 5ce915a7d..48c0fe43b 100644 +--- a/src/3rdparty/chromium/third_party/angle/src/libANGLE/gen_copy_conversion_table.py ++++ b/src/3rdparty/chromium/third_party/angle/src/libANGLE/gen_copy_conversion_table.py +@@ -78,9 +78,9 @@ def main(): + outputs = [out_file_name] + + if sys.argv[1] == 'inputs': +- print ','.join(inputs) ++ print(','.join(inputs)) + elif sys.argv[1] == 'outputs': +- print ','.join(outputs) ++ print(','.join(outputs)) + else: + print('Invalid script parameters') + return 1 +@@ -90,7 +90,7 @@ def main(): + + format_map = {} + +- for description, data in json_data.iteritems(): ++ for description, data in json_data.items(): + for texture_format, framebuffer_format in data: + if texture_format not in format_map: + format_map[texture_format] = [] +@@ -98,7 +98,7 @@ def main(): + + texture_format_cases = "" + +- for texture_format, framebuffer_formats in sorted(format_map.iteritems()): ++ for texture_format, framebuffer_formats in sorted(format_map.items()): + texture_format_cases += parse_texture_format_case(texture_format, framebuffer_formats) + + with open(out_file_name, 'wt') as out_file: +diff --git a/src/3rdparty/chromium/third_party/angle/src/libANGLE/gen_format_map.py b/src/3rdparty/chromium/third_party/angle/src/libANGLE/gen_format_map.py +index a35606846..0a37e0695 100644 +--- a/src/3rdparty/chromium/third_party/angle/src/libANGLE/gen_format_map.py ++++ b/src/3rdparty/chromium/third_party/angle/src/libANGLE/gen_format_map.py +@@ -118,7 +118,7 @@ def parse_type_case(type, result): + + def parse_format_case(format, type_map): + type_cases = "" +- for type, internal_format in sorted(type_map.iteritems()): ++ for type, internal_format in sorted(type_map.items()): + type_cases += parse_type_case(type, internal_format) + return template_format_case.format(format=format, type_cases=type_cases) + +@@ -133,9 +133,9 @@ def main(): + outputs = ['format_map_autogen.cpp'] + + if sys.argv[1] == 'inputs': +- print ','.join(inputs) ++ print(','.join(inputs)) + elif sys.argv[1] == 'outputs': +- print ','.join(outputs) ++ print(','.join(outputs)) + else: + print('Invalid script parameters') + return 1 +@@ -147,12 +147,12 @@ def main(): + + format_cases = "" + +- for format, type_map in sorted(format_map.iteritems()): ++ for format, type_map in sorted(format_map.items()): + format_cases += parse_format_case(format, type_map) + + combo_data_file = 'es3_format_type_combinations.json' + es3_combo_data = angle_format.load_json(combo_data_file) +- combo_data = [combo for sublist in es3_combo_data.values() for combo in sublist] ++ combo_data = [combo for sublist in list(es3_combo_data.values()) for combo in sublist] + + types = set() + formats = set() +@@ -180,9 +180,9 @@ def main(): + + es3_combo_cases = "" + +- for format, type_combos in combos.iteritems(): ++ for format, type_combos in combos.items(): + this_type_cases = "" +- for type, combos in type_combos.iteritems(): ++ for type, combos in type_combos.items(): + internal_format_cases = "" + for internal_format in combos: + internal_format_cases += " case " + internal_format + ":\n" +diff --git a/src/3rdparty/chromium/third_party/angle/src/libANGLE/gen_overlay_fonts.py b/src/3rdparty/chromium/third_party/angle/src/libANGLE/gen_overlay_fonts.py +index 22a99de3f..6f0fd4d5d 100644 +--- a/src/3rdparty/chromium/third_party/angle/src/libANGLE/gen_overlay_fonts.py ++++ b/src/3rdparty/chromium/third_party/angle/src/libANGLE/gen_overlay_fonts.py +@@ -28,7 +28,7 @@ out_file_cpp = 'Overlay_font_autogen.cpp' + out_file_h = 'Overlay_font_autogen.h' + font_file = 'overlay/DejaVuSansMono-Bold.ttf' + +-template_out_file_h = u"""// GENERATED FILE - DO NOT EDIT. ++template_out_file_h = """// GENERATED FILE - DO NOT EDIT. + // Generated by {script_name} using {font_file}. + // + // Copyright {copyright_year} The ANGLE Project Authors. All rights reserved. +@@ -58,7 +58,7 @@ constexpr int kFontImageHeight = {max_font_height} * kFontCharactersPerCol; + + """ + +-template_out_file_cpp = u"""// GENERATED FILE - DO NOT EDIT. ++template_out_file_cpp = """// GENERATED FILE - DO NOT EDIT. + // Generated by {script_name} using images from {font_file}. + // + // Copyright {copyright_year} The ANGLE Project Authors. All rights reserved. +@@ -137,7 +137,7 @@ void OverlayState::initFontData(uint8_t *fontData) const + }} // namespace gl + """ + +-template_get_font_layer_pixel = u"""case {layer}: ++template_get_font_layer_pixel = """case {layer}: + return GetFontLayerPixel({font_image}, x, y); + """ + +diff --git a/src/3rdparty/chromium/third_party/angle/src/libANGLE/gen_overlay_widgets.py b/src/3rdparty/chromium/third_party/angle/src/libANGLE/gen_overlay_widgets.py +index f374dc8e3..6b1959905 100644 +--- a/src/3rdparty/chromium/third_party/angle/src/libANGLE/gen_overlay_widgets.py ++++ b/src/3rdparty/chromium/third_party/angle/src/libANGLE/gen_overlay_widgets.py +@@ -18,7 +18,7 @@ OUT_HEADER_FILE_NAME = 'Overlay_autogen.h' + + IN_JSON_FILE_NAME = 'overlay_widgets.json' + +-OUT_SOURCE_FILE_TEMPLATE = u"""// GENERATED FILE - DO NOT EDIT. ++OUT_SOURCE_FILE_TEMPLATE = """// GENERATED FILE - DO NOT EDIT. + // Generated by {script_name} using data from {input_file_name}. + // + // Copyright {copyright_year} The ANGLE Project Authors. All rights reserved. +@@ -60,7 +60,7 @@ void Overlay::initOverlayWidgets() + + """ + +-OUT_HEADER_FILE_TEMPLATE = u"""// GENERATED FILE - DO NOT EDIT. ++OUT_HEADER_FILE_TEMPLATE = """// GENERATED FILE - DO NOT EDIT. + // Generated by {script_name} using data from {input_file_name}. + // + // Copyright {copyright_year} The ANGLE Project Authors. All rights reserved. +@@ -86,7 +86,7 @@ enum class WidgetId + }} // namespace gl + """ + +-WIDGET_INIT_TEMPLATE = u"""{{ ++WIDGET_INIT_TEMPLATE = """{{ + const int32_t fontSize = GetFontSize({font_size}, kLargeFont); + const int32_t offsetX = {offset_x}; + const int32_t offsetY = {offset_y}; +@@ -161,7 +161,7 @@ class OverlayWidget: + + def is_negative_coord(coords, axis, widgets_so_far): + +- if isinstance(coords[axis], unicode): ++ if isinstance(coords[axis], str): + coord_split = coords[axis].split('.') + # The coordinate is in the form other_widget.edge.mode + # We simply need to know if other_widget's coordinate is negative or not. +@@ -197,7 +197,7 @@ def get_offset_helper(widget, axis, smaller_coord_side): + # The case for the Y axis is similar, with the edge values being top or bottom. + + coord = widget.coords[axis] +- if not isinstance(coord, unicode): ++ if not isinstance(coord, str): + is_left = coord >= 0 + return coord, is_left + +diff --git a/src/3rdparty/chromium/third_party/angle/src/libANGLE/renderer/angle_format.py b/src/3rdparty/chromium/third_party/angle/src/libANGLE/renderer/angle_format.py +index 88e3236f0..97f2222b2 100644 +--- a/src/3rdparty/chromium/third_party/angle/src/libANGLE/renderer/angle_format.py ++++ b/src/3rdparty/chromium/third_party/angle/src/libANGLE/renderer/angle_format.py +@@ -53,7 +53,7 @@ def load_with_override(override_path): + results = load_without_override() + overrides = load_json(override_path) + +- for k, v in overrides.iteritems(): ++ for k, v in overrides.items(): + results[k] = v + + return results +@@ -61,7 +61,7 @@ def load_with_override(override_path): + + def get_all_angle_formats(): + map_path = get_angle_format_map_abs_path() +- return load_inverse_table(map_path).keys() ++ return list(load_inverse_table(map_path).keys()) + + + def get_component_type(format_id): +@@ -95,7 +95,7 @@ def get_component_type(format_id): + + def get_channel_tokens(format_id): + r = re.compile(r'([' + kChannels + '][\d]+)') +- return filter(r.match, r.split(format_id)) ++ return list(filter(r.match, r.split(format_id))) + + + def get_channels(format_id): +diff --git a/src/3rdparty/chromium/third_party/angle/src/libANGLE/renderer/d3d/d3d11/gen_blit11helper.py b/src/3rdparty/chromium/third_party/angle/src/libANGLE/renderer/d3d/d3d11/gen_blit11helper.py +index 4be6a253a..722aeb318 100644 +--- a/src/3rdparty/chromium/third_party/angle/src/libANGLE/renderer/d3d/d3d11/gen_blit11helper.py ++++ b/src/3rdparty/chromium/third_party/angle/src/libANGLE/renderer/d3d/d3d11/gen_blit11helper.py +@@ -1,4 +1,4 @@ +-#!/usr/bin/python2 ++#!/usr/bin/python3 + # + # Copyright 2018 The ANGLE Project Authors. All rights reserved. + # Use of this source code is governed by a BSD-style license that can be +@@ -322,9 +322,9 @@ def main(): + outputs = ['Blit11Helper_autogen.inc', 'd3d11_blit_shaders_autogen.gni'] + + if sys.argv[1] == 'inputs': +- print ','.join(inputs) ++ print(','.join(inputs)) + elif sys.argv[1] == 'outputs': +- print ','.join(outputs) ++ print(','.join(outputs)) + else: + print('Invalid script parameters') + return 1 +diff --git a/src/3rdparty/chromium/third_party/angle/src/libANGLE/renderer/d3d/d3d11/gen_texture_format_table.py b/src/3rdparty/chromium/third_party/angle/src/libANGLE/renderer/d3d/d3d11/gen_texture_format_table.py +index b1c07f727..8c206c201 100644 +--- a/src/3rdparty/chromium/third_party/angle/src/libANGLE/renderer/d3d/d3d11/gen_texture_format_table.py ++++ b/src/3rdparty/chromium/third_party/angle/src/libANGLE/renderer/d3d/d3d11/gen_texture_format_table.py +@@ -85,9 +85,9 @@ def get_swizzle_format_id(internal_format, angle_format): + internal_format) + + bits = angle_format['bits'] +- max_component_bits = max(bits.itervalues()) ++ max_component_bits = max(bits.values()) + channels_different = not all( +- [component_bits == bits.itervalues().next() for component_bits in bits.itervalues()]) ++ [component_bits == next(iter(bits.values())) for component_bits in bits.values()]) + + # The format itself can be used for swizzles if it can be accessed as a render target and + # sampled and the bit count for all 4 channels is the same. +@@ -197,7 +197,7 @@ def json_to_table_data(internal_format, format_name, prefix, json): + "condition": prefix, + } + +- for k, v in json.iteritems(): ++ for k, v in json.items(): + parsed[k] = v + + # Derived values. +@@ -218,15 +218,15 @@ def parse_json_angle_format_case(format_name, angle_format, json_data): + support_test = None + fallback = None + +- for k, v in angle_format.iteritems(): ++ for k, v in angle_format.items(): + if k == "FL10Plus": + assert support_test is None + support_test = "OnlyFL10Plus(deviceCaps)" +- for k2, v2 in v.iteritems(): ++ for k2, v2 in v.items(): + supported_case[k2] = v2 + elif k == "FL9_3": + split = True +- for k2, v2 in v.iteritems(): ++ for k2, v2 in v.items(): + unsupported_case[k2] = v2 + elif k == "supportTest": + assert support_test is None +@@ -251,7 +251,7 @@ def parse_json_angle_format_case(format_name, angle_format, json_data): + def parse_json_into_switch_angle_format_string(json_map, json_data): + table_data = '' + +- for internal_format, format_name in sorted(json_map.iteritems()): ++ for internal_format, format_name in sorted(json_map.items()): + + if format_name not in json_data: + continue +@@ -290,9 +290,9 @@ def main(): + outputs = ['texture_format_table_autogen.cpp'] + + if sys.argv[1] == 'inputs': +- print ','.join(inputs) ++ print(','.join(inputs)) + elif sys.argv[1] == 'outputs': +- print ','.join(outputs) ++ print(','.join(outputs)) + else: + print('Invalid script parameters') + return 1 +diff --git a/src/3rdparty/chromium/third_party/angle/src/libANGLE/renderer/gen_angle_format_table.py b/src/3rdparty/chromium/third_party/angle/src/libANGLE/renderer/gen_angle_format_table.py +index df8b6c4ba..41dd943cb 100644 +--- a/src/3rdparty/chromium/third_party/angle/src/libANGLE/renderer/gen_angle_format_table.py ++++ b/src/3rdparty/chromium/third_party/angle/src/libANGLE/renderer/gen_angle_format_table.py +@@ -284,7 +284,7 @@ def json_to_table_data(format_id, json, angle_to_gl): + "fastCopyFunctions": "NoCopyFunctions", + } + +- for k, v in json.iteritems(): ++ for k, v in json.items(): + parsed[k] = v + + if "glInternalFormat" not in parsed: +@@ -400,9 +400,9 @@ def main(): + outputs = ['Format_table_autogen.cpp', 'FormatID_autogen.h'] + + if sys.argv[1] == 'inputs': +- print ','.join(inputs) ++ print(','.join(inputs)) + elif sys.argv[1] == 'outputs': +- print ','.join(outputs) ++ print(','.join(outputs)) + else: + print('Invalid script parameters') + return 1 +@@ -412,7 +412,7 @@ def main(): + angle_to_gl = angle_format.load_inverse_table('angle_format_map.json') + data_source_name = 'angle_format_data.json' + json_data = angle_format.load_json(data_source_name) +- all_angle = angle_to_gl.keys() ++ all_angle = list(angle_to_gl.keys()) + + angle_format_cases = parse_angle_format_table(all_angle, json_data, angle_to_gl) + switch_data = gen_map_switch_string(gl_to_angle) +diff --git a/src/3rdparty/chromium/third_party/angle/src/libANGLE/renderer/gen_dxgi_format_table.py b/src/3rdparty/chromium/third_party/angle/src/libANGLE/renderer/gen_dxgi_format_table.py +index 0342726c9..0eb6e43fe 100644 +--- a/src/3rdparty/chromium/third_party/angle/src/libANGLE/renderer/gen_dxgi_format_table.py ++++ b/src/3rdparty/chromium/third_party/angle/src/libANGLE/renderer/gen_dxgi_format_table.py +@@ -10,6 +10,7 @@ + from datetime import date + import sys + import angle_format ++from functools import reduce + + template_cpp = """// GENERATED FILE - DO NOT EDIT. + // Generated by {script_name} using data from {data_source_name}. +@@ -94,9 +95,9 @@ def main(): + outputs = ['dxgi_format_map_autogen.cpp'] + + if sys.argv[1] == 'inputs': +- print ','.join(inputs) ++ print(','.join(inputs)) + elif sys.argv[1] == 'outputs': +- print ','.join(outputs) ++ print(','.join(outputs)) + else: + print('Invalid script parameters') + return 1 +@@ -120,15 +121,15 @@ def main(): + + all_angle = angle_format.get_all_angle_formats() + +- for dxgi_format, a_format in sorted(dxgi_map.iteritems()): ++ for dxgi_format, a_format in sorted(dxgi_map.items()): + +- found = [ctype in dxgi_format for ctype in types.keys()] ++ found = [ctype in dxgi_format for ctype in list(types.keys())] + count = reduce((lambda a, b: int(a) + int(b)), found) + + component_type = 'GL_NONE' + + if count == 1: +- gltype = next(gltype for ctype, gltype in types.iteritems() if ctype in dxgi_format) ++ gltype = next(gltype for ctype, gltype in types.items() if ctype in dxgi_format) + component_cases += format_case(dxgi_format, gltype) + else: + component_cases += undefined_case(dxgi_format) +diff --git a/src/3rdparty/chromium/third_party/angle/src/libANGLE/renderer/gen_dxgi_support_tables.py b/src/3rdparty/chromium/third_party/angle/src/libANGLE/renderer/gen_dxgi_support_tables.py +index 96287b14e..c805836e8 100644 +--- a/src/3rdparty/chromium/third_party/angle/src/libANGLE/renderer/gen_dxgi_support_tables.py ++++ b/src/3rdparty/chromium/third_party/angle/src/libANGLE/renderer/gen_dxgi_support_tables.py +@@ -183,7 +183,7 @@ def do_format(format_data): + 'mipAutoGen': macro_prefix + 'MIPGEN' + } + +- for format_name, format_support in sorted(format_data.iteritems()): ++ for format_name, format_support in sorted(format_data.items()): + + always_supported = set() + never_supported = set() +@@ -198,7 +198,7 @@ def do_format(format_data): + fl_10_0_check_10_1_supported = set() + fl_10_0_check_11_0_supported = set() + +- for json_flag, support in format_support.iteritems(): ++ for json_flag, support in format_support.items(): + + d3d_flag = [json_flag_to_d3d[json_flag]] + +@@ -306,9 +306,9 @@ def main(): + outputs = ['dxgi_support_table_autogen.cpp'] + + if sys.argv[1] == 'inputs': +- print ','.join(inputs) ++ print(','.join(inputs)) + elif sys.argv[1] == 'outputs': +- print ','.join(outputs) ++ print(','.join(outputs)) + else: + print('Invalid script parameters') + return 1 +diff --git a/src/3rdparty/chromium/third_party/angle/src/libANGLE/renderer/gen_load_functions_table.py b/src/3rdparty/chromium/third_party/angle/src/libANGLE/renderer/gen_load_functions_table.py +index e0667befd..2d6460371 100755 +--- a/src/3rdparty/chromium/third_party/angle/src/libANGLE/renderer/gen_load_functions_table.py ++++ b/src/3rdparty/chromium/third_party/angle/src/libANGLE/renderer/gen_load_functions_table.py +@@ -113,7 +113,7 @@ def get_load_func(func_name, type_functions): + snippet += "{\n" + snippet += " switch (type)\n" + snippet += " {\n" +- for gl_type, load_function in sorted(type_functions.iteritems()): ++ for gl_type, load_function in sorted(type_functions.items()): + snippet += " case " + gl_type + ":\n" + requiresConversion = str('LoadToNative<' not in load_function).lower() + snippet += " return LoadImageFunctionInfo(" + load_function + ", " + requiresConversion + ");\n" +@@ -136,14 +136,14 @@ def get_unknown_load_func(angle_to_type_map, internal_format): + def parse_json(json_data): + table_data = '' + load_functions_data = '' +- for internal_format, angle_to_type_map in sorted(json_data.iteritems()): ++ for internal_format, angle_to_type_map in sorted(json_data.items()): + + s = ' ' + + table_data += s + 'case ' + internal_format + ':\n' + + do_switch = len( +- angle_to_type_map) > 1 or angle_to_type_map.keys()[0] != angle_format_unknown ++ angle_to_type_map) > 1 or list(angle_to_type_map.keys())[0] != angle_format_unknown + + if do_switch: + table_data += s + '{\n' +@@ -152,7 +152,7 @@ def parse_json(json_data): + table_data += s + '{\n' + s += ' ' + +- for angle_format, type_functions in sorted(angle_to_type_map.iteritems()): ++ for angle_format, type_functions in sorted(angle_to_type_map.items()): + + if angle_format == angle_format_unknown: + continue +@@ -164,7 +164,7 @@ def parse_json(json_data): + table_data += s + ' return ' + func_name + ';\n' + + if angle_format_unknown in angle_to_type_map: +- for gl_type, load_function in angle_to_type_map[angle_format_unknown].iteritems(): ++ for gl_type, load_function in angle_to_type_map[angle_format_unknown].items(): + if gl_type not in type_functions: + type_functions[gl_type] = load_function + +@@ -202,9 +202,9 @@ def main(): + outputs = ['load_functions_table_autogen.cpp'] + + if sys.argv[1] == 'inputs': +- print ','.join(inputs) ++ print(','.join(inputs)) + elif sys.argv[1] == 'outputs': +- print ','.join(outputs) ++ print(','.join(outputs)) + else: + print('Invalid script parameters') + return 1 +diff --git a/src/3rdparty/chromium/third_party/angle/src/libANGLE/renderer/gl/generate_gl_dispatch_table.py b/src/3rdparty/chromium/third_party/angle/src/libANGLE/renderer/gl/generate_gl_dispatch_table.py +index 455d437da..e30c793d5 100644 +--- a/src/3rdparty/chromium/third_party/angle/src/libANGLE/renderer/gl/generate_gl_dispatch_table.py ++++ b/src/3rdparty/chromium/third_party/angle/src/libANGLE/renderer/gl/generate_gl_dispatch_table.py +@@ -260,9 +260,9 @@ def main(): + ] + + if sys.argv[1] == 'inputs': +- print ','.join(inputs) ++ print(','.join(inputs)) + elif sys.argv[1] == 'outputs': +- print ','.join(outputs) ++ print(','.join(outputs)) + else: + print('Invalid script parameters') + return 1 +@@ -330,7 +330,7 @@ def main(): + # Used later in the NULL bindings. + all_entry_points = [] + +- for comment, entry_points in json_data.iteritems(): ++ for comment, entry_points in json_data.items(): + for entry_point_no_prefix in entry_points: + entry_point = "gl" + entry_point_no_prefix + +@@ -397,7 +397,7 @@ def main(): + raise Exception('Entry point ' + entry_point + ' not found in the xml.') + + table_data = [] +- for comment, entry_points in sorted(json_data.iteritems()): ++ for comment, entry_points in sorted(json_data.items()): + formatted = [" // " + comment] + formatted += [format_ep_decl(entry_point) for entry_point in sorted(entry_points)] + +@@ -414,25 +414,25 @@ def main(): + out.write(dispatch_table_header) + + gl_data = [] +- for gl_required, entry_points in sorted(gl_requirements.iteritems()): ++ for gl_required, entry_points in sorted(gl_requirements.items()): + gl_data.append(format_requirements_lines(gl_required, entry_points)) + + gl_extensions_data = [] +- for extension, entry_points in sorted(gl_extension_requirements.iteritems()): ++ for extension, entry_points in sorted(gl_extension_requirements.items()): + gl_extensions_data.append( + format_extension_requirements_lines(extension, entry_points, "gl")) + + gles2_data = [] +- for gles2_required, entry_points in sorted(gles2_requirements.iteritems()): ++ for gles2_required, entry_points in sorted(gles2_requirements.items()): + gles2_data.append(format_requirements_lines(gles2_required, entry_points)) + + gles2_extensions_data = [] +- for extension, entry_points in sorted(gles2_extension_requirements.iteritems()): ++ for extension, entry_points in sorted(gles2_extension_requirements.items()): + gles2_extensions_data.append( + format_extension_requirements_lines(extension, entry_points, "gles2")) + + both_extensions_data = [] +- for extension, entry_points in sorted(both_extension_requirements.iteritems()): ++ for extension, entry_points in sorted(both_extension_requirements.items()): + both_extensions_data.append( + format_extension_requirements_lines(extension, entry_points, "gles2|gl")) + +diff --git a/src/3rdparty/chromium/third_party/angle/src/libANGLE/renderer/metal/gen_mtl_format_table.py b/src/3rdparty/chromium/third_party/angle/src/libANGLE/renderer/metal/gen_mtl_format_table.py +index 2b4e3d3dc..af3f4d889 100644 +--- a/src/3rdparty/chromium/third_party/angle/src/libANGLE/renderer/metal/gen_mtl_format_table.py ++++ b/src/3rdparty/chromium/third_party/angle/src/libANGLE/renderer/metal/gen_mtl_format_table.py +@@ -471,9 +471,9 @@ def main(): + outputs = ['mtl_format_table_autogen.mm'] + + if sys.argv[1] == 'inputs': +- print ','.join(inputs) ++ print(','.join(inputs)) + elif sys.argv[1] == 'outputs': +- print ','.join(outputs) ++ print(','.join(outputs)) + else: + print('Invalid script parameters') + return 1 +diff --git a/src/3rdparty/chromium/third_party/angle/src/libANGLE/renderer/metal/shaders/gen_mtl_internal_shaders.py b/src/3rdparty/chromium/third_party/angle/src/libANGLE/renderer/metal/shaders/gen_mtl_internal_shaders.py +index 40fc73fcc..123872191 100644 +--- a/src/3rdparty/chromium/third_party/angle/src/libANGLE/renderer/metal/shaders/gen_mtl_internal_shaders.py ++++ b/src/3rdparty/chromium/third_party/angle/src/libANGLE/renderer/metal/shaders/gen_mtl_internal_shaders.py +@@ -224,9 +224,9 @@ def main(): + ] + os_specific_autogen_files + + if sys.argv[1] == 'inputs': +- print ','.join(inputs) ++ print(','.join(inputs)) + elif sys.argv[1] == 'outputs': +- print ','.join(outputs) ++ print(','.join(outputs)) + else: + print('Invalid script parameters') + return 1 +@@ -239,7 +239,7 @@ def main(): + + # -------- Generate shader constants ----------- + angle_to_gl = angle_format.load_inverse_table('../../angle_format_map.json') +- shader_formats_autogen = gen_shader_enums_code(angle_to_gl.keys()) ++ shader_formats_autogen = gen_shader_enums_code(list(angle_to_gl.keys())) + shader_autogen_header = boilerplate_code + shader_formats_autogen + + with open('format_autogen.h', 'wt') as out_file: +diff --git a/src/3rdparty/chromium/third_party/angle/src/libANGLE/renderer/vulkan/gen_vk_format_table.py b/src/3rdparty/chromium/third_party/angle/src/libANGLE/renderer/vulkan/gen_vk_format_table.py +index 476130422..c3a74c091 100644 +--- a/src/3rdparty/chromium/third_party/angle/src/libANGLE/renderer/vulkan/gen_vk_format_table.py ++++ b/src/3rdparty/chromium/third_party/angle/src/libANGLE/renderer/vulkan/gen_vk_format_table.py +@@ -107,9 +107,9 @@ def verify_vk_map_keys(angle_to_gl, vk_json_data): + + no_error = True + for table in ["map", "fallbacks"]: +- for angle_format in vk_json_data[table].keys(): +- if not angle_format in angle_to_gl.keys(): +- print "Invalid format " + angle_format + " in vk_format_map.json in " + table ++ for angle_format in list(vk_json_data[table].keys()): ++ if not angle_format in list(angle_to_gl.keys()): ++ print("Invalid format " + angle_format + " in vk_format_map.json in " + table) + no_error = False + + return no_error +@@ -218,9 +218,9 @@ def main(): + outputs = [out_file_name] + + if sys.argv[1] == 'inputs': +- print ','.join(inputs) ++ print(','.join(inputs)) + elif sys.argv[1] == 'outputs': +- print ','.join(outputs) ++ print(','.join(outputs)) + else: + print('Invalid script parameters') + return 1 +@@ -233,7 +233,7 @@ def main(): + return 1 + + vk_cases = [ +- gen_format_case(angle, gl, vk_json_data) for angle, gl in sorted(angle_to_gl.iteritems()) ++ gen_format_case(angle, gl, vk_json_data) for angle, gl in sorted(angle_to_gl.items()) + ] + + output_cpp = template_table_autogen_cpp.format( +diff --git a/src/3rdparty/chromium/third_party/angle/src/libANGLE/renderer/vulkan/gen_vk_internal_shaders.py b/src/3rdparty/chromium/third_party/angle/src/libANGLE/renderer/vulkan/gen_vk_internal_shaders.py +index b9b54f6ac..82148161f 100644 +--- a/src/3rdparty/chromium/third_party/angle/src/libANGLE/renderer/vulkan/gen_vk_internal_shaders.py ++++ b/src/3rdparty/chromium/third_party/angle/src/libANGLE/renderer/vulkan/gen_vk_internal_shaders.py +@@ -28,7 +28,7 @@ is_windows = platform.system() == 'Windows' + is_linux = platform.system() == 'Linux' + + # Templates for the generated files: +-template_shader_library_cpp = u"""// GENERATED FILE - DO NOT EDIT. ++template_shader_library_cpp = """// GENERATED FILE - DO NOT EDIT. + // Generated by {script_name} using data from {input_file_name} + // + // Copyright {copyright_year} The ANGLE Project Authors. All rights reserved. +@@ -117,7 +117,7 @@ void ShaderLibrary::destroy(VkDevice device) + }} // namespace rx + """ + +-template_shader_library_h = u"""// GENERATED FILE - DO NOT EDIT. ++template_shader_library_h = """// GENERATED FILE - DO NOT EDIT. + // Generated by {script_name} using data from {input_file_name} + // + // Copyright {copyright_year} The ANGLE Project Authors. All rights reserved. +@@ -160,7 +160,7 @@ class ShaderLibrary final : angle::NonCopyable + #endif // LIBANGLE_RENDERER_VULKAN_VK_INTERNAL_SHADERS_AUTOGEN_H_ + """ + +-template_shader_includes_gni = u"""# GENERATED FILE - DO NOT EDIT. ++template_shader_includes_gni = """# GENERATED FILE - DO NOT EDIT. + # Generated by {script_name} using data from {input_file_name} + # + # Copyright {copyright_year} The ANGLE Project Authors. All rights reserved. +@@ -175,7 +175,7 @@ angle_vulkan_internal_shaders = [ + ] + """ + +-template_spirv_blob_inc = u"""// GENERATED FILE - DO NOT EDIT. ++template_spirv_blob_inc = """// GENERATED FILE - DO NOT EDIT. + // Generated by {script_name}. + // + // Copyright {copyright_year} The ANGLE Project Authors. All rights reserved. +@@ -267,7 +267,7 @@ def get_shader_variations(shader): + flags = {} + enums = [] + +- for key, value in variations.iteritems(): ++ for key, value in variations.items(): + if key == "Description": + continue + elif key == "Flags": +@@ -468,11 +468,11 @@ class CompileQueue: + # [ name, arg1, ..., argN ]. In that case, name is option[0] and option[1:] are extra arguments + # that need to be passed to glslang_validator for this variation. + def get_variation_name(option): +- return option if isinstance(option, unicode) else option[0] ++ return option if isinstance(option, str) else option[0] + + + def get_variation_args(option): +- return [] if isinstance(option, unicode) else option[1:] ++ return [] if isinstance(option, str) else option[1:] + + + def compile_variation(glslang_path, compile_queue, shader_file, shader_basename, flags, enums, +diff --git a/src/3rdparty/chromium/third_party/angle/src/libANGLE/renderer/vulkan/gen_vk_mandatory_format_support_table.py b/src/3rdparty/chromium/third_party/angle/src/libANGLE/renderer/vulkan/gen_vk_mandatory_format_support_table.py +index da1cf4778..da2102f1a 100644 +--- a/src/3rdparty/chromium/third_party/angle/src/libANGLE/renderer/vulkan/gen_vk_mandatory_format_support_table.py ++++ b/src/3rdparty/chromium/third_party/angle/src/libANGLE/renderer/vulkan/gen_vk_mandatory_format_support_table.py +@@ -101,9 +101,9 @@ def main(): + outputs = [out_file_name] + + if sys.argv[1] == 'inputs': +- print ','.join(inputs) ++ print(','.join(inputs)) + elif sys.argv[1] == 'outputs': +- print ','.join(outputs) ++ print(','.join(outputs)) + else: + print('Invalid script parameters') + return 1 +diff --git a/src/3rdparty/chromium/third_party/angle/third_party/rapidjson/src/bin/jsonschema/bin/jsonschema_suite b/src/3rdparty/chromium/third_party/angle/third_party/rapidjson/src/bin/jsonschema/bin/jsonschema_suite +index 96108c86b..2f6faa20a 100755 +--- a/src/3rdparty/chromium/third_party/angle/third_party/rapidjson/src/bin/jsonschema/bin/jsonschema_suite ++++ b/src/3rdparty/chromium/third_party/angle/third_party/rapidjson/src/bin/jsonschema/bin/jsonschema_suite +@@ -10,7 +10,7 @@ except ImportError: + The argparse library could not be imported. jsonschema_suite requires + either Python 2.7 or for you to install argparse. You can do so by + running `pip install argparse`, `easy_install argparse` or by +- downloading argparse and running `python2.6 setup.py install`. ++ downloading argparse and running `python3.6 setup.py install`. + + See https://pypi.python.org/pypi/argparse for details. + """.strip("\n"))) +diff --git a/src/3rdparty/chromium/third_party/angle/third_party/vulkan-headers/src/registry/cgenerator.py b/src/3rdparty/chromium/third_party/angle/third_party/vulkan-headers/src/registry/cgenerator.py +index 11d54683b..72e63a204 100644 +--- a/src/3rdparty/chromium/third_party/angle/third_party/vulkan-headers/src/registry/cgenerator.py ++++ b/src/3rdparty/chromium/third_party/angle/third_party/vulkan-headers/src/registry/cgenerator.py +@@ -300,12 +300,12 @@ class COutputGenerator(OutputGenerator): + + # Everyone with an explicit mayalias="true" + self.may_alias = set(typeName +- for typeName, data in self.registry.typedict.items() ++ for typeName, data in list(self.registry.typedict.items()) + if data.elem.get('mayalias') == 'true') + + # Every type mentioned in some other type's parentstruct attribute. + parent_structs = (otherType.elem.get('parentstruct') +- for otherType in self.registry.typedict.values()) ++ for otherType in list(self.registry.typedict.values())) + self.may_alias.update(set(x for x in parent_structs + if x is not None)) + return typeName in self.may_alias +diff --git a/src/3rdparty/chromium/third_party/angle/third_party/vulkan-headers/src/registry/generator.py b/src/3rdparty/chromium/third_party/angle/third_party/vulkan-headers/src/registry/generator.py +index c6e58720e..72b6d3f9d 100644 +--- a/src/3rdparty/chromium/third_party/angle/third_party/vulkan-headers/src/registry/generator.py ++++ b/src/3rdparty/chromium/third_party/angle/third_party/vulkan-headers/src/registry/generator.py +@@ -5,7 +5,7 @@ + # SPDX-License-Identifier: Apache-2.0 + """Base class for source/header/doc generators, as well as some utility functions.""" + +-from __future__ import unicode_literals ++ + + import io + import os +@@ -324,7 +324,7 @@ class OutputGenerator: + declared first when emitting this enum.""" + name = elem.get('name') + numVal = None +- if 'value' in elem.keys(): ++ if 'value' in list(elem.keys()): + value = elem.get('value') + # print('About to translate value =', value, 'type =', type(value)) + if needsNum: +@@ -336,7 +336,7 @@ class OutputGenerator: + # value += enuminfo.type + self.logMsg('diag', 'Enum', name, '-> value [', numVal, ',', value, ']') + return [numVal, value] +- if 'bitpos' in elem.keys(): ++ if 'bitpos' in list(elem.keys()): + value = elem.get('bitpos') + bitpos = int(value, 0) + numVal = 1 << bitpos +@@ -345,13 +345,13 @@ class OutputGenerator: + value = value + 'ULL' + self.logMsg('diag', 'Enum', name, '-> bitpos [', numVal, ',', value, ']') + return [numVal, value] +- if 'offset' in elem.keys(): ++ if 'offset' in list(elem.keys()): + # Obtain values in the mapping from the attributes + enumNegative = False + offset = int(elem.get('offset'), 0) + extnumber = int(elem.get('extnumber'), 0) + extends = elem.get('extends') +- if 'dir' in elem.keys(): ++ if 'dir' in list(elem.keys()): + enumNegative = True + self.logMsg('diag', 'Enum', name, 'offset =', offset, + 'extnumber =', extnumber, 'extends =', extends, +@@ -365,7 +365,7 @@ class OutputGenerator: + # More logic needed! + self.logMsg('diag', 'Enum', name, '-> offset [', numVal, ',', value, ']') + return [numVal, value] +- if 'alias' in elem.keys(): ++ if 'alias' in list(elem.keys()): + return [None, elem.get('alias')] + return [None, None] + +diff --git a/src/3rdparty/chromium/third_party/angle/third_party/vulkan-headers/src/registry/reg.py b/src/3rdparty/chromium/third_party/angle/third_party/vulkan-headers/src/registry/reg.py +index c63804b11..e74939bd5 100755 +--- a/src/3rdparty/chromium/third_party/angle/third_party/vulkan-headers/src/registry/reg.py ++++ b/src/3rdparty/chromium/third_party/angle/third_party/vulkan-headers/src/registry/reg.py +@@ -112,7 +112,7 @@ class BaseInfo: + If 'required' is not True, also returns True if neither element + has an attribute value for key.""" + +- if required and key not in self.elem.keys(): ++ if required and key not in list(self.elem.keys()): + return False + return self.elem.get(key) == info.elem.get(key) + +@@ -1186,7 +1186,7 @@ class Registry: + # being generated. Add extensions matching the pattern specified in + # regExtensions, then remove extensions matching the pattern + # specified in regRemoveExtensions +- for (extName, ei) in sorted(self.extdict.items(), key=lambda x: x[1].number if x[1].number is not None else '0'): ++ for (extName, ei) in sorted(list(self.extdict.items()), key=lambda x: x[1].number if x[1].number is not None else '0'): + extName = ei.name + include = False + +diff --git a/src/3rdparty/chromium/third_party/angle/third_party/vulkan-loader/src/scripts/loader_genvk.py b/src/3rdparty/chromium/third_party/angle/third_party/vulkan-loader/src/scripts/loader_genvk.py +index 7c8185880..638709a22 100644 +--- a/src/3rdparty/chromium/third_party/angle/third_party/vulkan-loader/src/scripts/loader_genvk.py ++++ b/src/3rdparty/chromium/third_party/angle/third_party/vulkan-loader/src/scripts/loader_genvk.py +@@ -256,7 +256,7 @@ def genTarget(args): + makeGenOpts(args) + + # Select a generator matching the requested target +- if (args.target in genOpts.keys()): ++ if (args.target in list(genOpts.keys())): + createGenerator = genOpts[args.target][0] + options = genOpts[args.target][1] + +@@ -277,7 +277,7 @@ def genTarget(args): + return (gen, options) + else: + write('No generator options for unknown target:', args.target, file=sys.stderr) +- return none ++ return None + + # -feature name + # -extension name +@@ -357,7 +357,7 @@ if __name__ == '__main__': + from dispatch_table_helper_generator import DispatchTableHelperOutputGenerator, DispatchTableHelperOutputGeneratorOptions + from helper_file_generator import HelperFileOutputGenerator, HelperFileOutputGeneratorOptions + from loader_extension_generator import LoaderExtensionOutputGenerator, LoaderExtensionGeneratorOptions +- # Temporary workaround for vkconventions python2 compatibility ++ # Temporary workaround for vkconventions python3 compatibility + import abc; abc.ABC = abc.ABCMeta('ABC', (object,), {}) + from vkconventions import VulkanConventions + +diff --git a/src/3rdparty/chromium/third_party/angle/third_party/vulkan-loader/src/scripts/update_deps.py b/src/3rdparty/chromium/third_party/angle/third_party/vulkan-loader/src/scripts/update_deps.py +index ea21c9fba..e82dcc877 100755 +--- a/src/3rdparty/chromium/third_party/angle/third_party/vulkan-loader/src/scripts/update_deps.py ++++ b/src/3rdparty/chromium/third_party/angle/third_party/vulkan-loader/src/scripts/update_deps.py +@@ -238,7 +238,7 @@ option can be a relative or absolute path. + + """ + +-from __future__ import print_function ++ + + import argparse + import json +diff --git a/src/3rdparty/chromium/third_party/angle/third_party/vulkan-tools/src/build-gn/generate_vulkan_layers_json.py b/src/3rdparty/chromium/third_party/angle/third_party/vulkan-tools/src/build-gn/generate_vulkan_layers_json.py +index 93d105b62..27f2294cf 100755 +--- a/src/3rdparty/chromium/third_party/angle/third_party/vulkan-tools/src/build-gn/generate_vulkan_layers_json.py ++++ b/src/3rdparty/chromium/third_party/angle/third_party/vulkan-tools/src/build-gn/generate_vulkan_layers_json.py +@@ -17,7 +17,7 @@ + """Generate copies of the Vulkan layers JSON files, with no paths, forcing + Vulkan to use the default search path to look for layers.""" + +-from __future__ import print_function ++ + + import argparse + import glob +diff --git a/src/3rdparty/chromium/third_party/angle/third_party/vulkan-tools/src/scripts/generate_source.py b/src/3rdparty/chromium/third_party/angle/third_party/vulkan-tools/src/scripts/generate_source.py +index a2002aae1..8e93f6364 100755 +--- a/src/3rdparty/chromium/third_party/angle/third_party/vulkan-tools/src/scripts/generate_source.py ++++ b/src/3rdparty/chromium/third_party/angle/third_party/vulkan-tools/src/scripts/generate_source.py +@@ -53,11 +53,11 @@ def main(argv): + # generate in temp directory so we can compare or copy later + temp_obj = tempfile.TemporaryDirectory(prefix='VulkanLoader_generated_source_') + temp_dir = temp_obj.name +- for path in files_to_gen.keys(): ++ for path in list(files_to_gen.keys()): + os.makedirs(os.path.join(temp_dir, path)) + + # run each code generator +- for path, filenames in files_to_gen.items(): ++ for path, filenames in list(files_to_gen.items()): + for filename in filenames: + if args.verify or args.incremental: + output_path = os.path.join(temp_dir, path) +@@ -82,17 +82,17 @@ def main(argv): + if args.verify: + # compare contents of temp dir and repo + temp_files = {} +- for path in files_to_gen.keys(): ++ for path in list(files_to_gen.keys()): + temp_files[path] = set() + temp_files[path].update(set(os.listdir(os.path.join(temp_dir, path)))) + + repo_files = {} +- for path in files_to_gen.keys(): ++ for path in list(files_to_gen.keys()): + repo_files[path] = set() + repo_files[path].update(set(os.listdir(os.path.join(repo_dir, path))) - set(verify_exclude)) + + files_match = True +- for path in files_to_gen.keys(): ++ for path in list(files_to_gen.keys()): + for filename in sorted((temp_files[path] | repo_files[path])): + if filename not in repo_files[path]: + print('ERROR: Missing repo file', filename) +@@ -114,7 +114,7 @@ def main(argv): + + elif args.incremental: + # copy missing or differing files from temp directory to repo +- for path in files_to_gen.keys(): ++ for path in list(files_to_gen.keys()): + for filename in os.listdir(os.path.join(temp_dir,path)): + temp_filename = os.path.join(temp_dir, path, filename) + repo_filename = os.path.join(repo_dir, path, filename) +diff --git a/src/3rdparty/chromium/third_party/angle/third_party/vulkan-tools/src/scripts/kvt_genvk.py b/src/3rdparty/chromium/third_party/angle/third_party/vulkan-tools/src/scripts/kvt_genvk.py +index 8cd3e1beb..3a9f5aa55 100644 +--- a/src/3rdparty/chromium/third_party/angle/third_party/vulkan-tools/src/scripts/kvt_genvk.py ++++ b/src/3rdparty/chromium/third_party/angle/third_party/vulkan-tools/src/scripts/kvt_genvk.py +@@ -248,7 +248,7 @@ def genTarget(args): + # Create generator options with specified parameters + makeGenOpts(args) + +- if (args.target in genOpts.keys()): ++ if (args.target in list(genOpts.keys())): + createGenerator = genOpts[args.target][0] + options = genOpts[args.target][1] + +@@ -351,7 +351,7 @@ if __name__ == '__main__': + from mock_icd_generator import MockICDGeneratorOptions, MockICDOutputGenerator + from vulkan_tools_helper_file_generator import HelperFileOutputGenerator, HelperFileOutputGeneratorOptions + from vulkaninfo_generator import VulkanInfoGenerator, VulkanInfoGeneratorOptions +- # Temporary workaround for vkconventions python2 compatibility ++ # Temporary workaround for vkconventions python3 compatibility + import abc + abc.ABC = abc.ABCMeta('ABC', (object,), {}) + from vkconventions import VulkanConventions +diff --git a/src/3rdparty/chromium/third_party/angle/third_party/vulkan-tools/src/scripts/mock_icd_generator.py b/src/3rdparty/chromium/third_party/angle/third_party/vulkan-tools/src/scripts/mock_icd_generator.py +index 30f396c3c..e8281be45 100644 +--- a/src/3rdparty/chromium/third_party/angle/third_party/vulkan-tools/src/scripts/mock_icd_generator.py ++++ b/src/3rdparty/chromium/third_party/angle/third_party/vulkan-tools/src/scripts/mock_icd_generator.py +@@ -1259,7 +1259,7 @@ class MockICDOutputGenerator(OutputGenerator): + self.newline() + #write('// endFeature looking at self.sections[command]', file=self.outFile) + if (self.sections['command']): +- write('\n'.join(self.sections['command']), end=u'', file=self.outFile) ++ write('\n'.join(self.sections['command']), end='', file=self.outFile) + self.newline() + if (self.featureExtraProtect != None): + write('#endif /*', self.featureExtraProtect, '*/', file=self.outFile) +diff --git a/src/3rdparty/chromium/third_party/angle/third_party/vulkan-tools/src/scripts/update_deps.py b/src/3rdparty/chromium/third_party/angle/third_party/vulkan-tools/src/scripts/update_deps.py +index ea21c9fba..e82dcc877 100755 +--- a/src/3rdparty/chromium/third_party/angle/third_party/vulkan-tools/src/scripts/update_deps.py ++++ b/src/3rdparty/chromium/third_party/angle/third_party/vulkan-tools/src/scripts/update_deps.py +@@ -238,7 +238,7 @@ option can be a relative or absolute path. + + """ + +-from __future__ import print_function ++ + + import argparse + import json +diff --git a/src/3rdparty/chromium/third_party/angle/third_party/vulkan-tools/src/scripts/vulkaninfo_generator.py b/src/3rdparty/chromium/third_party/angle/third_party/vulkan-tools/src/scripts/vulkaninfo_generator.py +index 61e8651be..10ae74b33 100644 +--- a/src/3rdparty/chromium/third_party/angle/third_party/vulkan-tools/src/scripts/vulkaninfo_generator.py ++++ b/src/3rdparty/chromium/third_party/angle/third_party/vulkan-tools/src/scripts/vulkaninfo_generator.py +@@ -186,7 +186,7 @@ class VulkanInfoGenerator(OutputGenerator): + self.types_to_gen = set() + + self.extension_sets = OrderedDict() +- for ext_cat in EXTENSION_CATEGORIES.keys(): ++ for ext_cat in list(EXTENSION_CATEGORIES.keys()): + self.extension_sets[ext_cat] = set() + + self.enums = [] +@@ -235,7 +235,7 @@ class VulkanInfoGenerator(OutputGenerator): + + types_to_gen.update( + GatherTypesToGen(self.all_structures, structures_to_gen)) +- for key in EXTENSION_CATEGORIES.keys(): ++ for key in list(EXTENSION_CATEGORIES.keys()): + types_to_gen.update( + GatherTypesToGen(self.all_structures, self.extension_sets[key])) + types_to_gen = sorted(types_to_gen) +@@ -252,12 +252,12 @@ class VulkanInfoGenerator(OutputGenerator): + structs_to_comp.update( + GatherTypesToGen(self.all_structures, struct_comparisons_to_gen)) + +- for key, value in self.extension_sets.items(): ++ for key, value in list(self.extension_sets.items()): + self.extension_sets[key] = sorted(value) + + alias_versions = OrderedDict() + for version in self.vulkan_versions: +- for aliased_type, aliases in self.aliases.items(): ++ for aliased_type, aliases in list(self.aliases.items()): + for alias in aliases: + if alias in version.names: + alias_versions[alias] = version.minorVersion +@@ -292,12 +292,12 @@ class VulkanInfoGenerator(OutputGenerator): + + out += "pNextChainInfos get_chain_infos() {\n" + out += " pNextChainInfos infos;\n" +- for key in EXTENSION_CATEGORIES.keys(): ++ for key in list(EXTENSION_CATEGORIES.keys()): + out += PrintChainBuilders(key, + self.extension_sets[key], self.all_structures) + out += " return infos;\n}\n" + +- for key, value in EXTENSION_CATEGORIES.items(): ++ for key, value in list(EXTENSION_CATEGORIES.items()): + out += PrintChainIterator(key, + self.extension_sets[key], self.all_structures, value.get('type'), self.extTypes, self.aliases, self.vulkan_versions) + +@@ -324,7 +324,7 @@ class VulkanInfoGenerator(OutputGenerator): + gen.OutputGenerator.genGroup(self, groupinfo, groupName, alias) + + if alias is not None: +- if alias in self.aliases.keys(): ++ if alias in list(self.aliases.keys()): + self.aliases[alias].append(groupName) + else: + self.aliases[alias] = [groupName, ] +@@ -339,7 +339,7 @@ class VulkanInfoGenerator(OutputGenerator): + gen.OutputGenerator.genType(self, typeinfo, name, alias) + + if alias is not None: +- if alias in self.aliases.keys(): ++ if alias in list(self.aliases.keys()): + self.aliases[alias].append(name) + else: + self.aliases[alias] = [name, ] +@@ -358,7 +358,7 @@ class VulkanInfoGenerator(OutputGenerator): + if(node.get('values').find(vendor)) != -1: + return + +- for key, value in EXTENSION_CATEGORIES.items(): ++ for key, value in list(EXTENSION_CATEGORIES.items()): + if typeinfo.elem.get('structextends') == value.get('extends'): + self.extension_sets[key].add(name) + +@@ -660,8 +660,8 @@ def PrintChainIterator(listName, structures, all_structures, checkExtLoc, extTyp + + extNameStr = None + extType = None +- for k, e in extTypes.items(): +- if k == s.name or (s.name in aliases.keys() and k in aliases[s.name]): ++ for k, e in list(extTypes.items()): ++ if k == s.name or (s.name in list(aliases.keys()) and k in aliases[s.name]): + if e.extNameStr is not None: + extNameStr = e.extNameStr + if e.type is not None: +@@ -672,7 +672,7 @@ def PrintChainIterator(listName, structures, all_structures, checkExtLoc, extTyp + for v in versions: + if s.name in v.names: + version = v.minorVersion +- if s.name in aliases.keys(): ++ if s.name in list(aliases.keys()): + for alias in aliases[s.name]: + oldVersionName = alias + +@@ -680,7 +680,7 @@ def PrintChainIterator(listName, structures, all_structures, checkExtLoc, extTyp + out += AddGuardHeader(s) + out += " if (structure->sType == " + s.sTypeName + has_version = version is not None +- has_extNameStr = extNameStr is not None or s.name in aliases.keys() ++ has_extNameStr = extNameStr is not None or s.name in list(aliases.keys()) + + if has_version or has_extNameStr: + out += " && \n (" +@@ -700,7 +700,7 @@ def PrintChainIterator(listName, structures, all_structures, checkExtLoc, extTyp + "("+s.name+"*)structure;\n" + + out += " Dump" + s.name + "(p, " +- if s.name in aliases.keys() and version is not None: ++ if s.name in list(aliases.keys()) and version is not None: + out += "version.minor >= " + version + " ?\"" + \ + s.name + "\":\"" + oldVersionName + "\"" + else: +@@ -787,11 +787,11 @@ class VulkanEnum: + extBase = 1000000000 + extBlockSize = 1000 + childValue = extBase + (extNum - 1) * extBlockSize + extOffset +- if ('dir' in child.keys()): ++ if ('dir' in list(child.keys())): + childValue = -childValue + duplicate = False + for o in self.options: +- if o.values()['optName'] == childName: ++ if list(o.values())['optName'] == childName: + duplicate = True + if duplicate: + continue +@@ -912,7 +912,7 @@ class VulkanStructure: + self.members.append(VulkanVariable( + node, constants, self.name)) + +- for k, e in extTypes.items(): ++ for k, e in list(extTypes.items()): + if k == self.name: + if e.guard is not None: + self.guard = e.guard +diff --git a/src/3rdparty/chromium/third_party/angle/tools/angle_tools.py b/src/3rdparty/chromium/third_party/angle/tools/angle_tools.py +index f4d14c4ca..857d0b282 100644 +--- a/src/3rdparty/chromium/third_party/angle/tools/angle_tools.py ++++ b/src/3rdparty/chromium/third_party/angle/tools/angle_tools.py +@@ -1,4 +1,4 @@ +-#!/usr/bin/python2 ++#!/usr/bin/python3 + # + # Copyright 2019 The ANGLE Project Authors. All rights reserved. + # Use of this source code is governed by a BSD-style license that can be +diff --git a/src/3rdparty/chromium/third_party/angle/tools/flex-bison/update_flex_bison_binaries.py b/src/3rdparty/chromium/third_party/angle/tools/flex-bison/update_flex_bison_binaries.py +index 99c002b04..dd95d473b 100755 +--- a/src/3rdparty/chromium/third_party/angle/tools/flex-bison/update_flex_bison_binaries.py ++++ b/src/3rdparty/chromium/third_party/angle/tools/flex-bison/update_flex_bison_binaries.py +@@ -1,4 +1,4 @@ +-#!/usr/bin/python2 ++#!/usr/bin/python3 + # + # Copyright 2019 The ANGLE Project Authors. All rights reserved. + # Use of this source code is governed by a BSD-style license that can be +diff --git a/src/3rdparty/chromium/third_party/angle/tools/glslang/update_glslang_binary.py b/src/3rdparty/chromium/third_party/angle/tools/glslang/update_glslang_binary.py +index 1e2bb5256..99cb2a0cb 100755 +--- a/src/3rdparty/chromium/third_party/angle/tools/glslang/update_glslang_binary.py ++++ b/src/3rdparty/chromium/third_party/angle/tools/glslang/update_glslang_binary.py +@@ -1,4 +1,4 @@ +-#!/usr/bin/python2 ++#!/usr/bin/python3 + # + # Copyright 2019 The ANGLE Project Authors. All rights reserved. + # Use of this source code is governed by a BSD-style license that can be +diff --git a/src/3rdparty/chromium/third_party/blink/PRESUBMIT_test.py b/src/3rdparty/chromium/third_party/blink/PRESUBMIT_test.py +index bd74f2cf6..fc50ffa00 100755 +--- a/src/3rdparty/chromium/third_party/blink/PRESUBMIT_test.py ++++ b/src/3rdparty/chromium/third_party/blink/PRESUBMIT_test.py +@@ -113,7 +113,7 @@ class PresubmitTest(unittest.TestCase): + # pylint: disable=W0212 + errors = PRESUBMIT._CheckForWrongMojomIncludes(mock_input_api, + MockOutputApi()) +- self.assertEquals( ++ self.assertEqual( + 'Public blink headers using Blink variant mojoms found. ' + + 'You must include .mojom-forward.h or .mojom-shared.h instead:', + errors[0].message) +@@ -137,7 +137,7 @@ class PresubmitTest(unittest.TestCase): + # pylint: disable=W0212 + errors = PRESUBMIT._CheckForWrongMojomIncludes(mock_input_api, + MockOutputApi()) +- self.assertEquals([], errors) ++ self.assertEqual([], errors) + + + class CxxDependencyTest(unittest.TestCase): +@@ -201,8 +201,8 @@ class CxxDependencyTest(unittest.TestCase): + + for item in self.disallow_list: + errors = self.runCheck(filename, ['%s' % item]) +- self.assertEquals(1, len(errors)) +- self.assertRegexpMatches( ++ self.assertEqual(1, len(errors)) ++ self.assertRegex( + errors[0].message, + r'^[^:]+:\d+ uses disallowed identifier .+$') + +@@ -214,8 +214,8 @@ class CxxDependencyTest(unittest.TestCase): + + for item in self.disallow_list: + errors = self.runCheck(filename, ['%s' % item]) +- self.assertEquals(1, len(errors)) +- self.assertRegexpMatches( ++ self.assertEqual(1, len(errors)) ++ self.assertRegex( + errors[0].message, + r'^[^:]+:\d+ uses disallowed identifier .+$') + +@@ -227,8 +227,8 @@ class CxxDependencyTest(unittest.TestCase): + + for item in self.disallow_list: + errors = self.runCheck(filename, ['%s' % item]) +- self.assertEquals(1, len(errors)) +- self.assertRegexpMatches( ++ self.assertEqual(1, len(errors)) ++ self.assertRegex( + errors[0].message, + r'^[^:]+:\d+ uses disallowed identifier .+$') + +diff --git a/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/aggregate_generated_bindings.py b/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/aggregate_generated_bindings.py +index 4a4cb9f57..e069b78e5 100755 +--- a/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/aggregate_generated_bindings.py ++++ b/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/aggregate_generated_bindings.py +@@ -46,7 +46,7 @@ Usage: + Design doc: http://www.chromium.org/developers/design-documents/idl-build + """ + +-from __future__ import print_function ++ + + import errno + import optparse +diff --git a/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/bind_gen/blink_v8_bridge.py b/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/bind_gen/blink_v8_bridge.py +index 3225ecca6..fc078d31b 100644 +--- a/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/bind_gen/blink_v8_bridge.py ++++ b/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/bind_gen/blink_v8_bridge.py +@@ -344,7 +344,7 @@ def make_default_value_expr(idl_type, default_value): + """ + assert default_value.is_type_compatible_with(idl_type) + +- class DefaultValueExpr: ++ class DefaultValueExpr(object): + _ALLOWED_SYMBOLS_IN_DEPS = ("isolate") + + def __init__(self, initializer_expr, initializer_deps, +@@ -502,7 +502,7 @@ def make_v8_to_blink_value(blink_var_name, + assert isinstance(blink_var_name, str) + assert isinstance(v8_value_expr, str) + assert isinstance(idl_type, web_idl.IdlType) +- assert (argument_index is None or isinstance(argument_index, (int, long))) ++ assert (argument_index is None or isinstance(argument_index, int)) + assert (default_value is None + or isinstance(default_value, web_idl.LiteralConstant)) + +@@ -622,7 +622,7 @@ def make_v8_to_blink_value_variadic(blink_var_name, v8_array, + """ + assert isinstance(blink_var_name, str) + assert isinstance(v8_array, str) +- assert isinstance(v8_array_start_index, (int, long)) ++ assert isinstance(v8_array_start_index, int) + assert isinstance(idl_type, web_idl.IdlType) + + pattern = ("auto&& ${{{_1}}} = " +diff --git a/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/bind_gen/callback_interface.py b/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/bind_gen/callback_interface.py +index 4a6df5130..8b51f23a4 100644 +--- a/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/bind_gen/callback_interface.py ++++ b/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/bind_gen/callback_interface.py +@@ -177,7 +177,7 @@ def generate_callback_interface(callback_interface_identifier): + prop_install_mode=PropInstallMode.UNCONDITIONAL, + trampoline_var_name=None, + attribute_entries=[], +- constant_entries=filter(is_unconditional, constant_entries), ++ constant_entries=list(filter(is_unconditional, constant_entries)), + exposed_construct_entries=[], + operation_entries=[]) + (install_interface_template_decl, install_interface_template_def, +diff --git a/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/bind_gen/code_node.py b/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/bind_gen/code_node.py +index 52972fefe..68e7f52d0 100644 +--- a/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/bind_gen/code_node.py ++++ b/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/bind_gen/code_node.py +@@ -316,7 +316,7 @@ class CodeNode(object): + for node in outers: + if node.own_template_vars is None: + continue +- for name, value in node.own_template_vars.items(): ++ for name, value in list(node.own_template_vars.items()): + assert name not in bindings, ( + "Duplicated template variable binding: {}".format(name)) + bindings[name] = value +@@ -341,7 +341,7 @@ class CodeNode(object): + + def add_template_vars(self, template_vars): + assert isinstance(template_vars, dict) +- for name, value in template_vars.items(): ++ for name, value in list(template_vars.items()): + self.add_template_var(name, value) + + @property +@@ -357,7 +357,7 @@ class CodeNode(object): + + def set_base_template_vars(self, template_vars): + assert isinstance(template_vars, dict) +- for name, value in template_vars.items(): ++ for name, value in list(template_vars.items()): + assert isinstance(name, str) + assert not isinstance(value, CodeNode) + assert self._base_template_vars is None +@@ -503,13 +503,13 @@ class CompositeNode(CodeNode): + gensym_kwargs = {} + template_vars = {} + for arg in args: +- assert isinstance(arg, (CodeNode, int, long, str)) ++ assert isinstance(arg, (CodeNode, int, str)) + gensym = CodeNode.gensym() + gensym_args.append("${{{}}}".format(gensym)) + template_vars[gensym] = arg +- for key, value in kwargs.items(): +- assert isinstance(key, (int, long, str)) +- assert isinstance(value, (CodeNode, int, long, str)) ++ for key, value in list(kwargs.items()): ++ assert isinstance(key, (int, str)) ++ assert isinstance(value, (CodeNode, int, str)) + gensym = CodeNode.gensym() + gensym_kwargs[key] = "${{{}}}".format(gensym) + template_vars[gensym] = value +@@ -602,7 +602,7 @@ class ListNode(CodeNode): + def insert(self, index, node): + if node is None: + return +- assert isinstance(index, (int, long)) ++ assert isinstance(index, int) + assert isinstance(node, CodeNode) + assert node.outer is None and node.prev is None + +@@ -721,20 +721,19 @@ class SymbolScopeNode(SequenceNode): + if not scope_chains: + return counts + +- self_index = iter(scope_chains).next().index(self) +- scope_chains = map( +- lambda scope_chain: scope_chain[self_index + 1:], scope_chains) ++ self_index = iter(scope_chains).__next__().index(self) ++ scope_chains = [scope_chain[self_index + 1:] for scope_chain in scope_chains] + scope_to_likeliness = {} + for scope_chain in scope_chains: + if not scope_chain: + counts[DIRECT_USES] += 1 + else: + likeliness = min( +- map(lambda scope: scope.likeliness, scope_chain)) ++ [scope.likeliness for scope in scope_chain]) + scope = scope_chain[0] + scope_to_likeliness[scope] = max( + likeliness, scope_to_likeliness.get(scope, likeliness)) +- for likeliness in scope_to_likeliness.values(): ++ for likeliness in list(scope_to_likeliness.values()): + counts[DIRECT_CHILD_SCOPES] += 1 + counts[likeliness] += 1 + return counts +@@ -896,8 +895,7 @@ class SymbolNode(CodeNode): + + def _request_symbol_definition(self, renderer): + symbol_scope_chain = tuple( +- filter(lambda node: isinstance(node, SymbolScopeNode), +- renderer.callers_from_first_to_last)) ++ [node for node in renderer.callers_from_first_to_last if isinstance(node, SymbolScopeNode)]) + + for caller in renderer.callers_from_last_to_first: + caller.on_code_symbol_referenced(self, symbol_scope_chain) +diff --git a/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/bind_gen/code_node_cxx.py b/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/bind_gen/code_node_cxx.py +index 93f246cb4..9646968a2 100644 +--- a/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/bind_gen/code_node_cxx.py ++++ b/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/bind_gen/code_node_cxx.py +@@ -319,8 +319,8 @@ class CxxFuncDeclNode(CompositeNode): + CompositeNode.__init__(self, + template_format, + name=_to_maybe_text_node(name), +- arg_decls=ListNode(map(_to_maybe_text_node, +- arg_decls), ++ arg_decls=ListNode(list(map(_to_maybe_text_node, ++ arg_decls)), + separator=", "), + return_type=_to_maybe_text_node(return_type), + template=template, +@@ -401,7 +401,7 @@ class CxxFuncDefNode(CompositeNode): + member_initializer_list = "" + else: + member_initializer_list = ListNode( +- map(_to_maybe_text_node, member_initializer_list), ++ list(map(_to_maybe_text_node, member_initializer_list)), + separator=", ", + head=" : ") + +@@ -413,7 +413,7 @@ class CxxFuncDefNode(CompositeNode): + template_format, + name=_to_maybe_text_node(name), + arg_decls=ListNode( +- map(_to_maybe_text_node, arg_decls), separator=", "), ++ list(map(_to_maybe_text_node, arg_decls)), separator=", "), + return_type=_to_maybe_text_node(return_type), + class_name=class_name, + template=template, +diff --git a/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/bind_gen/codegen_accumulator.py b/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/bind_gen/codegen_accumulator.py +index 2c822edf4..03a5f7497 100644 +--- a/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/bind_gen/codegen_accumulator.py ++++ b/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/bind_gen/codegen_accumulator.py +@@ -26,7 +26,7 @@ class CodeGenAccumulator(object): + return self._include_headers + + def add_include_headers(self, headers): +- self._include_headers.update(filter(None, headers)) ++ self._include_headers.update([_f for _f in headers if _f]) + + @staticmethod + def require_include_headers(headers): +@@ -37,7 +37,7 @@ class CodeGenAccumulator(object): + return self._class_decls + + def add_class_decls(self, class_names): +- self._class_decls.update(filter(None, class_names)) ++ self._class_decls.update([_f for _f in class_names if _f]) + + @staticmethod + def require_class_decls(class_names): +@@ -48,7 +48,7 @@ class CodeGenAccumulator(object): + return self._struct_decls + + def add_struct_decls(self, struct_names): +- self._struct_decls.update(filter(None, struct_names)) ++ self._struct_decls.update([_f for _f in struct_names if _f]) + + @staticmethod + def require_struct_decls(struct_names): +diff --git a/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/bind_gen/codegen_context.py b/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/bind_gen/codegen_context.py +index 9760f9637..82d7bd88f 100644 +--- a/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/bind_gen/codegen_context.py ++++ b/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/bind_gen/codegen_context.py +@@ -114,7 +114,7 @@ class CodeGenContext(object): + ) + + # Define public readonly properties of this class. +- for attr in cls._context_attrs.keys(): ++ for attr in list(cls._context_attrs.keys()): + + def make_get(): + _attr = cls._internal_attr(attr) +@@ -133,11 +133,11 @@ class CodeGenContext(object): + def __init__(self, **kwargs): + assert CodeGenContext._was_initialized + +- for arg in kwargs.keys(): ++ for arg in list(kwargs.keys()): + assert arg in self._context_attrs, "Unknown argument: {}".format( + arg) + +- for attr, default_value in self._context_attrs.items(): ++ for attr, default_value in list(self._context_attrs.items()): + value = kwargs[attr] if attr in kwargs else default_value + assert (default_value is None + or type(value) is type(default_value)), ( +@@ -149,13 +149,13 @@ class CodeGenContext(object): + Returns a copy of this context applying the updates given as the + arguments. + """ +- for arg in kwargs.keys(): ++ for arg in list(kwargs.keys()): + assert arg in self._context_attrs, "Unknown argument: {}".format( + arg) + + new_object = copy.copy(self) + +- for attr, new_value in kwargs.items(): ++ for attr, new_value in list(kwargs.items()): + old_value = getattr(self, attr) + assert old_value is None or type(new_value) is type(old_value), ( + "Type mismatch at argument: {}".format(attr)) +@@ -172,7 +172,7 @@ class CodeGenContext(object): + """ + bindings = {} + +- for attr in self._context_attrs.keys(): ++ for attr in list(self._context_attrs.keys()): + value = getattr(self, attr) + if value is None: + value = NonRenderable(attr) +diff --git a/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/bind_gen/codegen_expr.py b/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/bind_gen/codegen_expr.py +index a229a6c71..cbd1fee36 100644 +--- a/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/bind_gen/codegen_expr.py ++++ b/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/bind_gen/codegen_expr.py +@@ -109,7 +109,7 @@ def expr_and(terms): + + if any(term.is_always_false for term in terms): + return _Expr(False) +- terms = filter(lambda x: not x.is_always_true, terms) ++ terms = [x for x in terms if not x.is_always_true] + if not terms: + return _Expr(True) + if len(terms) == 1: +@@ -124,7 +124,7 @@ def expr_or(terms): + + if any(term.is_always_true for term in terms): + return _Expr(True) +- terms = filter(lambda x: not x.is_always_false, terms) ++ terms = [x for x in terms if not x.is_always_false] + if not terms: + return _Expr(False) + if len(terms) == 1: +@@ -210,9 +210,7 @@ def expr_from_exposure(exposure, + feature, arg)) + + def ref_selected(features): +- feature_tokens = map( +- lambda feature: "OriginTrialFeature::k{}".format(feature), +- features) ++ feature_tokens = ["OriginTrialFeature::k{}".format(feature) for feature in features] + return _Expr("${{feature_selector}}.IsAnyOf({})".format( + ", ".join(feature_tokens))) + +@@ -222,7 +220,7 @@ def expr_from_exposure(exposure, + elif exposure.only_in_secure_contexts is False: + secure_context_term = _Expr(True) + else: +- terms = map(ref_enabled, exposure.only_in_secure_contexts) ++ terms = list(map(ref_enabled, exposure.only_in_secure_contexts)) + secure_context_term = expr_or( + [_Expr("${is_in_secure_context}"), + expr_not(expr_and(terms))]) +@@ -269,16 +267,15 @@ def expr_from_exposure(exposure, + # [RuntimeEnabled] + if exposure.runtime_enabled_features: + feature_enabled_terms.extend( +- map(ref_enabled, exposure.runtime_enabled_features)) ++ list(map(ref_enabled, exposure.runtime_enabled_features))) + feature_selector_names.extend( + exposure.context_dependent_runtime_enabled_features) + + # [ContextEnabled] + if exposure.context_enabled_features: +- terms = map( +- lambda feature: _Expr( ++ terms = [_Expr( + "${{context_feature_settings}}->is{}Enabled()".format( +- feature)), exposure.context_enabled_features) ++ feature)) for feature in exposure.context_enabled_features] + context_enabled_terms.append( + expr_and([_Expr("${context_feature_settings}"), + expr_or(terms)])) +diff --git a/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/bind_gen/codegen_format.py b/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/bind_gen/codegen_format.py +index 87d26eec3..9758e3651 100644 +--- a/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/bind_gen/codegen_format.py ++++ b/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/bind_gen/codegen_format.py +@@ -23,13 +23,13 @@ class _TemplateFormatter(string.Formatter): + self._template_formatter_indexing_count_ = 0 + + def get_value(self, key, args, kwargs): +- if isinstance(key, (int, long)): ++ if isinstance(key, int): + return args[key] + assert isinstance(key, str) + if not key: + # Prior to Python 3.1, when a positional argument specifier is + # omitted, |format_string="{}"| produces |key=""|. Should be +- # removed once Python2 gets retired. ++ # removed once Python3 gets retired. + index = self._template_formatter_indexing_count_ + self._template_formatter_indexing_count_ += 1 + return args[index] +diff --git a/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/bind_gen/codegen_utils.py b/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/bind_gen/codegen_utils.py +index 2bcc4fed4..e72282aa6 100644 +--- a/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/bind_gen/codegen_utils.py ++++ b/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/bind_gen/codegen_utils.py +@@ -116,4 +116,4 @@ def write_code_node_to_file(code_node, filepath): + # stderr=format_result.error_message)) + # + # web_idl.file_io.write_to_file_if_changed(filepath, format_result.contents) +- web_idl.file_io.write_to_file_if_changed(filepath, rendered_text) ++ web_idl.file_io.write_to_file_if_changed(filepath, rendered_text.encode('utf-8')) +diff --git a/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/bind_gen/dictionary.py b/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/bind_gen/dictionary.py +index b39f01004..4d6820229 100644 +--- a/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/bind_gen/dictionary.py ++++ b/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/bind_gen/dictionary.py +@@ -993,7 +993,7 @@ def make_dict_trace_func(cg_context): + _2 = _blink_member_name(member).value_var + return TextNode(_format(pattern, _1=_1, _2=_2)) + +- body.extend(map(make_trace_member_node, own_members)) ++ body.extend(list(map(make_trace_member_node, own_members))) + body.append(TextNode("BaseClass::Trace(visitor);")) + + return func_decl, func_def +diff --git a/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/bind_gen/interface.py b/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/bind_gen/interface.py +index 10ff30656..630a4319f 100644 +--- a/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/bind_gen/interface.py ++++ b/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/bind_gen/interface.py +@@ -561,8 +561,7 @@ def _make_reflect_process_keyword_state(cg_context): + body=F("${return_value} = {};", constant(empty_default))) + + expr = " || ".join( +- map(lambda keyword: "reflect_value == {}".format(constant(keyword)), +- keywords)) ++ ["reflect_value == {}".format(constant(keyword)) for keyword in keywords]) + branches.append(cond=expr, body=T("${return_value} = reflect_value;")) + + if invalid_default is not None: +@@ -582,7 +581,7 @@ def _make_blink_api_call(code_node, + overriding_args=None): + assert isinstance(code_node, SymbolScopeNode) + assert isinstance(cg_context, CodeGenContext) +- assert num_of_args is None or isinstance(num_of_args, (int, long)) ++ assert num_of_args is None or isinstance(num_of_args, int) + assert (overriding_args is None + or (isinstance(overriding_args, (list, tuple)) + and all(isinstance(arg, str) for arg in overriding_args))) +@@ -1177,7 +1176,7 @@ def make_overload_dispatcher(cg_context): + + # TODO(yukishiino): Runtime-enabled features should be taken into account + # when calculating the max argument size. +- max_arg_size = max(map(args_size, items)) ++ max_arg_size = max(list(map(args_size, items))) + arg_count_def = F("const int arg_count = std::min(${info}.Length(), {});", + max_arg_size) + +@@ -1196,8 +1195,7 @@ def make_overload_dispatcher(cg_context): + did_use_break = did_use_break or can_fail + + conditional = expr_or( +- map(lambda item: expr_from_exposure(item.function_like.exposure), +- items)) ++ [expr_from_exposure(item.function_like.exposure) for item in items]) + if not conditional.is_always_true: + node = CxxUnlikelyIfNode(cond=conditional, body=node) + +@@ -1807,7 +1805,7 @@ EventListener* event_handler = JSEventHandler::CreateOrNull( + def optimize_element_cereactions_reflect(): + has_cereactions = False + has_reflect = False +- for key in ext_attrs.keys(): ++ for key in list(ext_attrs.keys()): + if key == "CEReactions": + has_cereactions = True + elif key == "Reflect": +@@ -4642,7 +4640,7 @@ class _PropEntryConstructorGroup(_PropEntryBase): + def __init__(self, is_context_dependent, exposure_conditional, world, + constructor_group, ctor_callback_name, ctor_func_length): + assert isinstance(ctor_callback_name, str) +- assert isinstance(ctor_func_length, (int, long)) ++ assert isinstance(ctor_func_length, int) + + _PropEntryBase.__init__(self, is_context_dependent, + exposure_conditional, world, constructor_group) +@@ -4670,7 +4668,7 @@ class _PropEntryOperationGroup(_PropEntryBase): + op_func_length, + no_alloc_direct_callback_name=None): + assert isinstance(op_callback_name, str) +- assert isinstance(op_func_length, (int, long)) ++ assert isinstance(op_func_length, int) + + _PropEntryBase.__init__(self, is_context_dependent, + exposure_conditional, world, operation_group) +@@ -4952,7 +4950,7 @@ def make_property_entries_and_callback_defs(cg_context, attribute_entries, + + iterate(collectionlike.attributes, process_attribute) + iterate( +- filter(should_define, collectionlike.operation_groups), ++ list(filter(should_define, collectionlike.operation_groups)), + process_operation_group) + + return callback_def_nodes +@@ -4968,8 +4966,8 @@ def _make_install_prototype_object(cg_context): + + unscopables = [] + is_unscopable = lambda member: "Unscopable" in member.extended_attributes +- unscopables.extend(filter(is_unscopable, class_like.attributes)) +- unscopables.extend(filter(is_unscopable, class_like.operations)) ++ unscopables.extend(list(filter(is_unscopable, class_like.attributes))) ++ unscopables.extend(list(filter(is_unscopable, class_like.operations))) + if unscopables: + nodes.extend([ + TextNode("""\ +@@ -5175,13 +5173,10 @@ def make_install_interface_template(cg_context, function_name, class_name, api_c + ]) + + if class_like.identifier == "CSSStyleDeclaration": +- css_properties = filter( +- lambda attr: "CSSProperty" in attr.extended_attributes, +- class_like.attributes) ++ css_properties = [attr for attr in class_like.attributes if "CSSProperty" in attr.extended_attributes] + if css_properties: + prop_name_list = "".join( +- map(lambda attr: "\"{}\", ".format(attr.identifier), +- css_properties)) ++ ["\"{}\", ".format(attr.identifier) for attr in css_properties]) + body.append( + T("""\ + // CSSStyleDeclaration-specific settings +@@ -5530,7 +5525,7 @@ ${instance_object} = ${v8_context}->Global()->GetPrototype().As();\ + TextNode(installer_call_text), + ])) + body.append(EmptyNode()) +- for conditional, entries in conditional_to_entries.items(): ++ for conditional, entries in list(conditional_to_entries.items()): + body.append( + CxxUnlikelyIfNode( + cond=conditional, +@@ -5567,8 +5562,7 @@ ${instance_object} = ${v8_context}->Global()->GetPrototype().As();\ + "V8DOMConfiguration::InstallConstants(${isolate}, " + "${interface_template}, ${prototype_template}, " + "kConstantCallbackTable, base::size(kConstantCallbackTable));") +- constant_callback_entries = filter(lambda entry: entry.const_callback_name, +- constant_entries) ++ constant_callback_entries = [entry for entry in constant_entries if entry.const_callback_name] + install_properties(table_name, constant_callback_entries, + _make_constant_callback_registration_table, + installer_call_text) +@@ -5584,8 +5578,7 @@ ${instance_object} = ${v8_context}->Global()->GetPrototype().As();\ + "V8DOMConfiguration::InstallConstants(${isolate}, " + "${interface_template}, ${prototype_template}, " + "kConstantValueTable, base::size(kConstantValueTable));") +- constant_value_entries = filter( +- lambda entry: not entry.const_callback_name, constant_entries) ++ constant_value_entries = [entry for entry in constant_entries if not entry.const_callback_name] + install_properties(table_name, constant_value_entries, + _make_constant_value_registration_table, + installer_call_text) +@@ -5617,12 +5610,10 @@ ${instance_object} = ${v8_context}->Global()->GetPrototype().As();\ + "${instance_template}, ${prototype_template}, " + "${interface_template}, ${signature}, " + "kOperationTable, base::size(kOperationTable));") +- entries = filter(lambda entry: not entry.no_alloc_direct_callback_name, +- operation_entries) ++ entries = [entry for entry in operation_entries if not entry.no_alloc_direct_callback_name] + install_properties(table_name, entries, _make_operation_registration_table, + installer_call_text) +- entries = filter(lambda entry: entry.no_alloc_direct_callback_name, +- operation_entries) ++ entries = [entry for entry in operation_entries if entry.no_alloc_direct_callback_name] + install_properties(table_name, entries, _make_operation_registration_table, + installer_call_text) + +@@ -5661,7 +5652,7 @@ def make_indexed_and_named_property_callbacks_and_install_node(cg_context): + + def most_derived_interface(*interfaces): + key = lambda interface: len(interface.inclusive_inherited_interfaces) +- return sorted(filter(None, interfaces), key=key)[-1] ++ return sorted([_f for _f in interfaces if _f], key=key)[-1] + + cg_context = cg_context.make_copy( + v8_callback_type=CodeGenContext.V8_OTHER_CALLBACK) +@@ -5699,7 +5690,7 @@ def make_indexed_and_named_property_callbacks_and_install_node(cg_context): + flags.append("v8::PropertyHandlerFlags::kHasNoSideEffect") + property_handler_flags = ( + "static_cast({})".format(" | ".join( +- map(lambda flag: "int32_t({})".format(flag), flags)))) ++ ["int32_t({})".format(flag) for flag in flags]))) + pattern = """\ + // Named interceptors + {{ +@@ -6336,8 +6327,7 @@ def make_v8_context_snapshot_api(cg_context, component, attribute_entries, + assert isinstance(component, web_idl.Component) + + derived_interfaces = cg_context.interface.deriveds +- derived_names = map(lambda interface: interface.identifier, +- derived_interfaces) ++ derived_names = [interface.identifier for interface in derived_interfaces] + derived_names.append(cg_context.interface.identifier) + if not ("Window" in derived_names or "HTMLDocument" in derived_names): + return None, None +@@ -6411,9 +6401,7 @@ def _make_v8_context_snapshot_get_reference_table_function( + collect_callbacks(named_properties_object_callback_defs) + collect_callbacks(cross_origin_property_callback_defs) + +- entry_nodes = map( +- lambda name: TextNode("reinterpret_cast({}),".format(name)), +- filter(None, callback_names)) ++ entry_nodes = [TextNode("reinterpret_cast({}),".format(name)) for name in [_f for _f in callback_names if _f]] + table_node = ListNode([ + TextNode("using namespace ${class_name}Callbacks;"), + TextNode("static const intptr_t kReferenceTable[] = {"), +@@ -6451,10 +6439,10 @@ def _make_v8_context_snapshot_install_props_per_context_function( + class_name=None, + prop_install_mode=PropInstallMode.V8_CONTEXT_SNAPSHOT, + trampoline_var_name=None, +- attribute_entries=filter(selector, attribute_entries), +- constant_entries=filter(selector, constant_entries), +- exposed_construct_entries=filter(selector, exposed_construct_entries), +- operation_entries=filter(selector, operation_entries)) ++ attribute_entries=list(filter(selector, attribute_entries)), ++ constant_entries=list(filter(selector, constant_entries)), ++ exposed_construct_entries=list(filter(selector, exposed_construct_entries)), ++ operation_entries=list(filter(selector, operation_entries))) + + return func_decl, func_def + +@@ -6810,11 +6798,11 @@ def generate_interface(interface_identifier): + class_name=impl_class_name, + prop_install_mode=PropInstallMode.UNCONDITIONAL, + trampoline_var_name=tp_install_unconditional_props, +- attribute_entries=filter(is_unconditional, attribute_entries), +- constant_entries=filter(is_unconditional, constant_entries), +- exposed_construct_entries=filter(is_unconditional, +- exposed_construct_entries), +- operation_entries=filter(is_unconditional, operation_entries)) ++ attribute_entries=list(filter(is_unconditional, attribute_entries)), ++ constant_entries=list(filter(is_unconditional, constant_entries)), ++ exposed_construct_entries=list(filter(is_unconditional, ++ exposed_construct_entries)), ++ operation_entries=list(filter(is_unconditional, operation_entries))) + (install_context_independent_props_decl, + install_context_independent_props_def, + install_context_independent_props_trampoline) = make_install_properties( +@@ -6823,11 +6811,11 @@ def generate_interface(interface_identifier): + class_name=impl_class_name, + prop_install_mode=PropInstallMode.CONTEXT_INDEPENDENT, + trampoline_var_name=tp_install_context_independent_props, +- attribute_entries=filter(is_context_independent, attribute_entries), +- constant_entries=filter(is_context_independent, constant_entries), +- exposed_construct_entries=filter(is_context_independent, +- exposed_construct_entries), +- operation_entries=filter(is_context_independent, operation_entries)) ++ attribute_entries=list(filter(is_context_independent, attribute_entries)), ++ constant_entries=list(filter(is_context_independent, constant_entries)), ++ exposed_construct_entries=list(filter(is_context_independent, ++ exposed_construct_entries)), ++ operation_entries=list(filter(is_context_independent, operation_entries))) + (install_context_dependent_props_decl, install_context_dependent_props_def, + install_context_dependent_props_trampoline) = make_install_properties( + cg_context, +@@ -6835,11 +6823,11 @@ def generate_interface(interface_identifier): + class_name=impl_class_name, + prop_install_mode=PropInstallMode.CONTEXT_DEPENDENT, + trampoline_var_name=tp_install_context_dependent_props, +- attribute_entries=filter(is_context_dependent, attribute_entries), +- constant_entries=filter(is_context_dependent, constant_entries), +- exposed_construct_entries=filter(is_context_dependent, +- exposed_construct_entries), +- operation_entries=filter(is_context_dependent, operation_entries)) ++ attribute_entries=list(filter(is_context_dependent, attribute_entries)), ++ constant_entries=list(filter(is_context_dependent, constant_entries)), ++ exposed_construct_entries=list(filter(is_context_dependent, ++ exposed_construct_entries)), ++ operation_entries=list(filter(is_context_dependent, operation_entries))) + (install_interface_template_decl, install_interface_template_def, + install_interface_template_trampoline) = make_install_interface_template( + cg_context, +diff --git a/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/bind_gen/mako_renderer.py b/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/bind_gen/mako_renderer.py +index b4c705538..bf23b1893 100644 +--- a/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/bind_gen/mako_renderer.py ++++ b/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/bind_gen/mako_renderer.py +@@ -105,7 +105,7 @@ class MakoRenderer(object): + on_error = self._caller_stack_on_error + if (len(current) <= len(on_error) + and all(current[i] == on_error[i] +- for i in xrange(len(current)))): ++ for i in range(len(current)))): + pass # Error happened in a deeper caller. + else: + self._caller_stack_on_error = list(self._caller_stack) +@@ -166,7 +166,7 @@ def _guess_caller_name(caller): + """Returns the best-guessed name of |caller|.""" + try: + # Outer CodeNode may have a binding to the caller. +- for name, value in caller.outer.template_vars.items(): ++ for name, value in list(caller.outer.template_vars.items()): + if value is caller: + return name + try: +diff --git a/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/bind_gen/name_style.py b/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/bind_gen/name_style.py +index 36a858cec..dab23b374 100644 +--- a/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/bind_gen/name_style.py ++++ b/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/bind_gen/name_style.py +@@ -139,8 +139,8 @@ def _format(style_func, format_string, *args, **kwargs): + assert callable(style_func) + assert isinstance(format_string, str) + +- args = map(style_func, map(_tokenize, args)) +- for key, value in kwargs.items(): ++ args = list(map(style_func, list(map(_tokenize, args)))) ++ for key, value in list(kwargs.items()): + kwargs[key] = style_func(_tokenize(value)) + return format_string.format(*args, **kwargs) + +diff --git a/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/bind_gen/path_manager.py b/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/bind_gen/path_manager.py +index e95b5b3a9..787f08ecf 100644 +--- a/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/bind_gen/path_manager.py ++++ b/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/bind_gen/path_manager.py +@@ -58,7 +58,7 @@ class PathManager(object): + cls._root_gen_dir = os.path.abspath(root_gen_dir) + cls._component_reldirs = { + component: posixpath.normpath(rel_dir) +- for component, rel_dir in component_reldirs.items() ++ for component, rel_dir in list(component_reldirs.items()) + } + cls._is_initialized = True + +diff --git a/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/bind_gen/style_format.py b/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/bind_gen/style_format.py +index dc3493cc3..017d3d47b 100644 +--- a/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/bind_gen/style_format.py ++++ b/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/bind_gen/style_format.py +@@ -70,8 +70,13 @@ def gn_format(contents, filename=None): + + + def _invoke_format_command(command_line, filename, contents): +- proc = subprocess.Popen( +- command_line, stdin=subprocess.PIPE, stdout=subprocess.PIPE) ++ kwargs = {} ++ if sys.version_info.major != 2: ++ kwargs['encoding'] = 'utf-8' ++ proc = subprocess.Popen(command_line, ++ stdin=subprocess.PIPE, ++ stdout=subprocess.PIPE, ++ **kwargs) + stdout_output, stderr_output = proc.communicate(input=contents) + exit_code = proc.wait() + +diff --git a/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/bind_gen/task_queue.py b/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/bind_gen/task_queue.py +index 0d8f4c0f3..2ede0d334 100644 +--- a/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/bind_gen/task_queue.py ++++ b/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/bind_gen/task_queue.py +@@ -2,9 +2,11 @@ + # Use of this source code is governed by a BSD-style license that can be + # found in the LICENSE file. + ++import functools + import multiprocessing + + from .package_initializer import package_initializer ++from functools import reduce + + + class TaskQueue(object): +@@ -76,7 +78,7 @@ class TaskQueue(object): + if not report_progress: + return + +- done_count = reduce( ++ done_count = functools.reduce( + lambda count, worker_task: count + bool(worker_task.ready()), + self._worker_tasks, 0) + report_progress(len(self._worker_tasks), done_count) +@@ -85,4 +87,4 @@ class TaskQueue(object): + def _task_queue_run_tasks(tasks): + for task in tasks: + func, args, kwargs = task +- apply(func, args, kwargs) ++ func(*args, **kwargs) +diff --git a/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/blink_idl_lexer.py b/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/blink_idl_lexer.py +index d041f1dc2..183712824 100644 +--- a/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/blink_idl_lexer.py ++++ b/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/blink_idl_lexer.py +@@ -53,7 +53,7 @@ http://www.chromium.org/developers/design-documents/idl-compiler#TOC-Front-end + # Disable attribute validation, as lint can't import parent class to check + # pylint: disable=E1101 + +-from __future__ import print_function ++ + + import os.path + import sys +diff --git a/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/blink_idl_parser.py b/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/blink_idl_parser.py +index 0361deed5..3c4b0acbe 100644 +--- a/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/blink_idl_parser.py ++++ b/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/blink_idl_parser.py +@@ -54,7 +54,7 @@ http://www.chromium.org/developers/design-documents/idl-compiler#TOC-Front-end + # pylint: disable=E1101 + # + +-from __future__ import print_function ++ + + import os.path + import sys +diff --git a/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/code_generator.py b/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/code_generator.py +index e8280be72..826928594 100644 +--- a/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/code_generator.py ++++ b/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/code_generator.py +@@ -5,7 +5,7 @@ + # pylint: disable=import-error,print-statement,relative-import + """Plumbing for a Jinja-based code generator, including CodeGeneratorBase, a base class for all generators.""" + +-from __future__ import print_function ++ + + import os + import posixpath +@@ -13,6 +13,7 @@ import re + import sys + + from idl_types import set_ancestors, IdlType ++from itertools import groupby + from v8_globals import includes + from v8_interface import constant_filters + from v8_types import set_component_dirs +@@ -43,6 +44,7 @@ TEMPLATES_DIR = os.path.normpath( + # after path[0] == invoking script dir + sys.path.insert(1, THIRD_PARTY_DIR) + import jinja2 ++from jinja2.filters import make_attrgetter, environmentfilter + + + def generate_indented_conditional(code, conditional): +@@ -88,6 +90,13 @@ def runtime_enabled_if(code, name): + return generate_indented_conditional(code, function) + + ++@environmentfilter ++def do_stringify_key_group_by(environment, value, attribute): ++ expr = make_attrgetter(environment, attribute) ++ key = lambda item: '' if expr(item) is None else str(expr(item)) ++ return groupby(sorted(value, key=key), expr) ++ ++ + def initialize_jinja_env(cache_dir): + jinja_env = jinja2.Environment( + loader=jinja2.FileSystemLoader(TEMPLATES_DIR), +@@ -113,7 +122,9 @@ def initialize_jinja_env(cache_dir): + 'runtime_enabled_function': + v8_utilities.runtime_enabled_function, + 'secure_context': +- secure_context_if ++ secure_context_if, ++ 'stringifykeygroupby': ++ do_stringify_key_group_by + }) + jinja_env.filters.update(constant_filters()) + jinja_env.filters.update(method_filters()) +diff --git a/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/code_generator_v8.py b/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/code_generator_v8.py +index f296d486a..ebb0a4435 100644 +--- a/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/code_generator_v8.py ++++ b/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/code_generator_v8.py +@@ -95,7 +95,7 @@ class TypedefResolver(Visitor): + def resolve(self, definitions, definition_name): + """Traverse definitions and resolves typedefs with the actual types.""" + self.typedefs = {} +- for name, typedef in self.info_provider.typedefs.items(): ++ for name, typedef in list(self.info_provider.typedefs.items()): + self.typedefs[name] = typedef.idl_type + self.additional_header_includes = set() + definitions.accept(self) +@@ -338,7 +338,7 @@ class CodeGeneratorUnionType(CodeGeneratorBase): + # idl_definitions.py. What we do instead is to resolve typedefs in + # _generate_container_code() whenever a new union file is generated. + self.typedefs = {} +- for name, typedef in self.info_provider.typedefs.items(): ++ for name, typedef in list(self.info_provider.typedefs.items()): + self.typedefs[name] = typedef.idl_type + + def _generate_container_code(self, union_type): +@@ -441,7 +441,7 @@ class CodeGeneratorCallbackFunction(CodeGeneratorBase): + if not callback_functions: + return () + outputs = set() +- for callback_function_dict in callback_functions.values(): ++ for callback_function_dict in list(callback_functions.values()): + if callback_function_dict['component_dir'] != self.target_component: + continue + callback_function = callback_function_dict['callback_function'] +diff --git a/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/compute_global_objects.py b/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/compute_global_objects.py +index 7fa1c022a..297f74459 100755 +--- a/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/compute_global_objects.py ++++ b/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/compute_global_objects.py +@@ -48,7 +48,7 @@ def parse_options(): + + + def dict_union(dicts): +- return dict((k, v) for d in dicts for k, v in d.items()) ++ return dict((k, v) for d in dicts for k, v in list(d.items())) + + + def idl_file_to_global_names(idl_filename): +@@ -71,7 +71,7 @@ def idl_file_to_global_names(idl_filename): + raise ValueError( + '[Global] must take an indentifier or an identifier list.\n' + + full_path) +- return map(str.strip, global_value.strip('()').split(',')) ++ return list(map(str.strip, global_value.strip('()').split(','))) + + + def idl_files_to_interface_name_global_names(idl_files): +diff --git a/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/compute_interfaces_info_individual.py b/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/compute_interfaces_info_individual.py +index 5c62edfd0..d1e1fd04b 100755 +--- a/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/compute_interfaces_info_individual.py ++++ b/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/compute_interfaces_info_individual.py +@@ -247,7 +247,7 @@ class InterfaceInfoCollector(object): + self.union_types.update(this_union_types) + self.typedefs.update(definitions.typedefs) + for callback_function_name, callback_function in \ +- definitions.callback_functions.items(): ++ list(definitions.callback_functions.items()): + # Set 'component_dir' to specify a directory that callback function files belong to + self.callback_functions[callback_function_name] = { + 'callback_function': callback_function, +@@ -255,14 +255,14 @@ class InterfaceInfoCollector(object): + 'full_path': os.path.realpath(idl_filename), + } + # Check enum duplication. +- for enum in definitions.enumerations.values(): ++ for enum in list(definitions.enumerations.values()): + if not self.check_enum_consistency(enum): + raise Exception('Enumeration "%s" is defined more than once ' + 'with different valid values' % enum.name) + self.enumerations.update(definitions.enumerations) + + if definitions.interfaces: +- definition = next(iter(definitions.interfaces.values())) ++ definition = next(iter(list(definitions.interfaces.values()))) + interface_info = { + 'is_callback_interface': + definition.is_callback, +@@ -279,7 +279,7 @@ class InterfaceInfoCollector(object): + get_put_forward_interfaces_from_definition(definition), + } + elif definitions.dictionaries: +- definition = next(iter(definitions.dictionaries.values())) ++ definition = next(iter(list(definitions.dictionaries.values()))) + interface_info = { + 'is_callback_interface': False, + 'is_dictionary': True, +@@ -379,7 +379,7 @@ class InterfaceInfoCollector(object): + self.callback_functions, + 'enumerations': + dict((enum.name, enum.values) +- for enum in self.enumerations.values()), ++ for enum in list(self.enumerations.values())), + 'runtime_enabled_features': + runtime_enabled_features, + 'typedefs': +diff --git a/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/compute_interfaces_info_overall.py b/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/compute_interfaces_info_overall.py +index 235cf0f91..0cedd0078 100755 +--- a/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/compute_interfaces_info_overall.py ++++ b/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/compute_interfaces_info_overall.py +@@ -127,12 +127,12 @@ def dict_of_dicts_of_lists_update_or_append(existing, other): + + Needed for merging partial_interface_files across components. + """ +- for key, value in other.items(): ++ for key, value in list(other.items()): + if key not in existing: + existing[key] = value + continue + existing_value = existing[key] +- for inner_key, inner_value in value.items(): ++ for inner_key, inner_value in list(value.items()): + existing_value[inner_key].extend(inner_value) + + +@@ -178,7 +178,7 @@ def compute_global_type_info(): + garbage_collected_interfaces = set() + callback_interfaces = set() + +- for interface_name, interface_info in interfaces_info.items(): ++ for interface_name, interface_info in list(interfaces_info.items()): + component_dirs[interface_name] = idl_filename_to_component( + interface_info['full_path']) + +@@ -220,10 +220,10 @@ def compute_interfaces_info_overall(info_individuals): + partial_interface_files, info['partial_interface_files']) + + # Record inheritance information individually +- for interface_name, interface_info in interfaces_info.items(): ++ for interface_name, interface_info in list(interfaces_info.items()): + extended_attributes = interface_info['extended_attributes'] + inherited_extended_attributes_by_interface[interface_name] = dict( +- (key, value) for key, value in extended_attributes.items() ++ (key, value) for key, value in list(extended_attributes.items()) + if key in INHERITED_EXTENDED_ATTRIBUTES) + parent = interface_info['parent'] + if parent: +@@ -241,7 +241,7 @@ def compute_interfaces_info_overall(info_individuals): + # 'includes'). + # Note that moving an 'includes' statement between files does not change the + # info itself (or hence cause a rebuild)! +- for mixin_name, interface_info in interfaces_info.items(): ++ for mixin_name, interface_info in list(interfaces_info.items()): + for interface_name in interface_info['included_by_interfaces']: + interfaces_info[interface_name]['including_mixins'].append( + mixin_name) +@@ -249,7 +249,7 @@ def compute_interfaces_info_overall(info_individuals): + + # An IDL file's dependencies are partial interface files that extend it, + # and files for other interfaces that this interfaces include. +- for interface_name, interface_info in interfaces_info.items(): ++ for interface_name, interface_info in list(interfaces_info.items()): + partial_interface_paths = partial_interface_files[interface_name] + partial_interfaces_full_paths = partial_interface_paths['full_paths'] + # Partial interface definitions each need an include, as they are +@@ -311,7 +311,7 @@ def compute_interfaces_info_overall(info_individuals): + }) + + # Clean up temporary private information +- for interface_info in interfaces_info.values(): ++ for interface_info in list(interfaces_info.values()): + del interface_info['extended_attributes'] + del interface_info['union_types'] + del interface_info['is_legacy_treat_as_partial_interface'] +diff --git a/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/generate_global_constructors.py b/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/generate_global_constructors.py +index 87e6cbb89..badbed1d2 100755 +--- a/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/generate_global_constructors.py ++++ b/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/generate_global_constructors.py +@@ -108,8 +108,8 @@ def record_global_constructors(idl_filename): + elif 'Exposed' in extended_attributes: + # Exposed=env or Exposed=(env1,...) case + exposed_value = extended_attributes.get('Exposed') +- exposed_global_names = map(str.strip, +- exposed_value.strip('()').split(',')) ++ exposed_global_names = list(map(str.strip, ++ exposed_value.strip('()').split(','))) + new_constructors_list = generate_global_constructors_list( + interface_name, extended_attributes) + for name in exposed_global_names: +@@ -196,7 +196,7 @@ def main(): + record_global_constructors(idl_filename) + + # Check for [Exposed] / [Global] mismatch. +- known_global_names = EXPOSED_EXECUTION_CONTEXT_METHOD.keys() ++ known_global_names = list(EXPOSED_EXECUTION_CONTEXT_METHOD.keys()) + exposed_global_names = frozenset(global_name_to_constructors) + if not exposed_global_names.issubset(known_global_names): + unknown_global_names = exposed_global_names.difference( +diff --git a/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/generate_init_partial_interfaces.py b/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/generate_init_partial_interfaces.py +index fca4d565c..c6ebd4cb8 100755 +--- a/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/generate_init_partial_interfaces.py ++++ b/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/generate_init_partial_interfaces.py +@@ -8,7 +8,7 @@ interfaces in modules to core interfaces. + + # pylint: disable=relative-import + +-from __future__ import print_function ++ + + from optparse import OptionParser + import os +diff --git a/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/generate_origin_trial_features.py b/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/generate_origin_trial_features.py +index 130004eae..6f5bfd231 100755 +--- a/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/generate_origin_trial_features.py ++++ b/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/generate_origin_trial_features.py +@@ -80,7 +80,7 @@ def read_idl_file(reader, idl_filename): + assert len(interfaces) == 1, ( + "Expected one interface in file %r, found %d" % + (idl_filename, len(interfaces))) +- return (interfaces.values()[0], includes) ++ return (list(interfaces.values())[0], includes) + + + def interface_is_global(interface): +@@ -209,7 +209,7 @@ def origin_trial_features_context(generator_name, feature_info): + interface_info.v8_class, + 'installers': + get_install_functions([interface_info], feature_names) +- } for interface_info, feature_names in features_for_type.items()] ++ } for interface_info, feature_names in list(features_for_type.items())] + context['installers_by_interface'].sort(key=lambda x: x['name']) + + # For each conditional feature, collect a list of bindings installation +@@ -221,7 +221,7 @@ def origin_trial_features_context(generator_name, feature_info): + 'OriginTrialFeature::k%s' % feature_name, + 'installers': + get_install_functions(interfaces, [feature_name]) +- } for feature_name, interfaces in types_for_feature.items()] ++ } for feature_name, interfaces in list(types_for_feature.items())] + context['installers_by_feature'].sort(key=lambda x: x['name']) + + return context +@@ -281,7 +281,7 @@ def main(): + + info_provider = create_component_info_provider( + os.path.normpath(options.info_dir), options.target_component) +- idl_filenames = map(str.strip, open(options.idl_files_list)) ++ idl_filenames = list(map(str.strip, open(options.idl_files_list))) + + generate_origin_trial_features(info_provider, options, idl_filenames) + return 0 +diff --git a/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/generate_v8_context_snapshot_external_references.py b/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/generate_v8_context_snapshot_external_references.py +index 4f23b299d..aca641a52 100644 +--- a/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/generate_v8_context_snapshot_external_references.py ++++ b/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/generate_v8_context_snapshot_external_references.py +@@ -191,7 +191,7 @@ class ExternalReferenceTableGenerator(object): + target_definitions = definitions[component] + interfaces = target_definitions.interfaces + first_name = target_definitions.first_name +- if first_name in interfaces.keys(): ++ if first_name in list(interfaces.keys()): + interface = interfaces[first_name] + self._process_interface(interface, component, interfaces) + +diff --git a/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/idl_compiler.py b/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/idl_compiler.py +index ae06acc48..7ade03e56 100755 +--- a/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/idl_compiler.py ++++ b/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/idl_compiler.py +@@ -83,11 +83,10 @@ def parse_options(): + return options, idl_filename + + +-class IdlCompiler(object): ++class IdlCompiler(object, metaclass=abc.ABCMeta): + """The IDL Compiler. + + """ +- __metaclass__ = abc.ABCMeta + + def __init__(self, + output_directory, +diff --git a/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/idl_definitions.py b/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/idl_definitions.py +index 14e6e9d3f..e44097d2f 100644 +--- a/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/idl_definitions.py ++++ b/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/idl_definitions.py +@@ -77,13 +77,12 @@ SPECIAL_KEYWORD_LIST = ['GETTER', 'SETTER', 'DELETER'] + ################################################################################ + + +-class TypedObject(object): ++class TypedObject(object, metaclass=abc.ABCMeta): + """Object with a type, such as an Attribute or Operation (return value). + + The type can be an actual type, or can be a typedef, which must be resolved + by the TypedefResolver before passing data to the code generator. + """ +- __metaclass__ = abc.ABCMeta + idl_type_attributes = ('idl_type', ) + + +@@ -137,22 +136,22 @@ class IdlDefinitions(object): + + def accept(self, visitor): + visitor.visit_definitions(self) +- for interface in self.interfaces.values(): ++ for interface in list(self.interfaces.values()): + interface.accept(visitor) +- for callback_function in self.callback_functions.values(): ++ for callback_function in list(self.callback_functions.values()): + callback_function.accept(visitor) +- for dictionary in self.dictionaries.values(): ++ for dictionary in list(self.dictionaries.values()): + dictionary.accept(visitor) +- for enumeration in self.enumerations.values(): ++ for enumeration in list(self.enumerations.values()): + enumeration.accept(visitor) + for include in self.includes: + include.accept(visitor) +- for typedef in self.typedefs.values(): ++ for typedef in list(self.typedefs.values()): + typedef.accept(visitor) + + def update(self, other): + """Update with additional IdlDefinitions.""" +- for interface_name, new_interface in other.interfaces.items(): ++ for interface_name, new_interface in list(other.interfaces.items()): + if not new_interface.is_partial: + # Add as new interface + self.interfaces[interface_name] = new_interface +@@ -394,7 +393,7 @@ class IdlInterface(object): + else: + raise ValueError('Unrecognized node class: %s' % child_class) + +- if len(filter(None, [self.iterable, self.maplike, self.setlike])) > 1: ++ if len([_f for _f in [self.iterable, self.maplike, self.setlike] if _f]) > 1: + raise ValueError( + 'Interface can only have one of iterable<>, maplike<> and setlike<>.' + ) +@@ -429,8 +428,8 @@ class IdlInterface(object): + extended_attributes = ( + convert_constructor_operations_extended_attributes( + constructor_operations_extended_attributes)) +- if any(name in extended_attributes.keys() +- for name in self.extended_attributes.keys()): ++ if any(name in list(extended_attributes.keys()) ++ for name in list(self.extended_attributes.keys())): + raise ValueError('Detected mixed extended attributes for ' + 'both [Constructor] and constructor ' + 'operations. Do not use both in a single ' +@@ -512,6 +511,9 @@ class IdlAttribute(TypedObject): + def accept(self, visitor): + visitor.visit_attribute(self) + ++ def __lt__(self, other): ++ return self.name < other.name ++ + + ################################################################################ + # Constants +@@ -852,7 +854,7 @@ class IdlIncludes(object): + ################################################################################ + + +-class Exposure: ++class Exposure(object): + """An Exposure holds one Exposed or RuntimeEnabled condition. + Each exposure has two properties: exposed and runtime_enabled. + Exposure(e, r) corresponds to [Exposed(e r)]. Exposure(e) corresponds to +@@ -1055,7 +1057,7 @@ def convert_constructor_operations_extended_attributes(extended_attributes): + """ + + converted = {} +- for name, value in extended_attributes.items(): ++ for name, value in list(extended_attributes.items()): + if name == "CallWith": + converted["ConstructorCallWith"] = value + elif name == "RaisesException": +diff --git a/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/idl_reader.py b/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/idl_reader.py +index 8d72865a6..111bcf285 100644 +--- a/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/idl_reader.py ++++ b/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/idl_reader.py +@@ -56,7 +56,7 @@ def validate_blink_idl_definitions(idl_filename, idl_file_basename, + - Otherwise, an IDL file is invalid. + """ + targets = ( +- definitions.interfaces.values() + definitions.dictionaries.values()) ++ list(definitions.interfaces.values()) + list(definitions.dictionaries.values())) + number_of_targets = len(targets) + if number_of_targets > 1: + raise Exception( +diff --git a/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/idl_types.py b/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/idl_types.py +index cd4f0c351..85bcd26ad 100644 +--- a/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/idl_types.py ++++ b/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/idl_types.py +@@ -349,7 +349,7 @@ class IdlUnionType(IdlTypeBase): + return True + + def single_matching_member_type(self, predicate): +- matching_types = filter(predicate, self.flattened_member_types) ++ matching_types = list(filter(predicate, self.flattened_member_types)) + if len(matching_types) > 1: + raise ValueError('%s is ambiguous.' % self.name) + return matching_types[0] if matching_types else None +@@ -642,7 +642,7 @@ class IdlAnnotatedType(IdlTypeBase): + def __str__(self): + annotation = ', '.join( + (key + ('' if val is None else '=' + val)) +- for key, val in self.extended_attributes.items()) ++ for key, val in list(self.extended_attributes.items())) + return '[%s] %s' % (annotation, str(self.inner_type)) + + def __getattr__(self, name): +diff --git a/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/idl_validator.py b/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/idl_validator.py +index 64e84ef7d..835e1bf7f 100644 +--- a/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/idl_validator.py ++++ b/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/idl_validator.py +@@ -51,7 +51,7 @@ class IDLExtendedAttributeValidator(object): + + def validate_extended_attributes(self, definitions): + # FIXME: this should be done when parsing the file, rather than after. +- for interface in definitions.interfaces.values(): ++ for interface in list(definitions.interfaces.values()): + self.validate_extended_attributes_node(interface) + for attribute in interface.attributes: + self.validate_extended_attributes_node(attribute) +@@ -59,17 +59,17 @@ class IDLExtendedAttributeValidator(object): + self.validate_extended_attributes_node(operation) + for argument in operation.arguments: + self.validate_extended_attributes_node(argument) +- for dictionary in definitions.dictionaries.values(): ++ for dictionary in list(definitions.dictionaries.values()): + self.validate_extended_attributes_node(dictionary) + for member in dictionary.members: + self.validate_extended_attributes_node(member) +- for callback_function in definitions.callback_functions.values(): ++ for callback_function in list(definitions.callback_functions.values()): + self.validate_extended_attributes_node(callback_function) + for argument in callback_function.arguments: + self.validate_extended_attributes_node(argument) + + def validate_extended_attributes_node(self, node): +- for name, values_string in node.extended_attributes.items(): ++ for name, values_string in list(node.extended_attributes.items()): + self.validate_name_values_string(name, values_string) + + def validate_name_values_string(self, name, values_string): +@@ -103,7 +103,7 @@ def read_extended_attributes_file(): + line = line.strip() + if not line or line.startswith('#'): + continue +- name, _, values_string = map(str.strip, line.partition('=')) ++ name, _, values_string = list(map(str.strip, line.partition('='))) + value_list = [ + value.strip() for value in values_string.split('|') + ] +diff --git a/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/interface_dependency_resolver.py b/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/interface_dependency_resolver.py +index 696676ac7..cc41b05ed 100644 +--- a/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/interface_dependency_resolver.py ++++ b/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/interface_dependency_resolver.py +@@ -101,7 +101,7 @@ class InterfaceDependencyResolver(object): + 'this definition: %s, because this should ' + 'have a dictionary.' % definitions.idl_name) + +- target_interface = next(iter(definitions.interfaces.values())) ++ target_interface = next(iter(list(definitions.interfaces.values()))) + interface_name = target_interface.name + interface_info = self.interfaces_info[interface_name] + +@@ -163,7 +163,7 @@ def merge_interface_dependencies(definitions, component, target_interface, + dependency_idl_filename) + + dependency_interface = next( +- iter(dependency_definitions.interfaces.values())) ++ iter(list(dependency_definitions.interfaces.values()))) + + transfer_extended_attributes(dependency_interface, + dependency_idl_filename) +@@ -314,7 +314,7 @@ def transfer_extended_attributes(dependency_interface, + 'ImplementedAs', dependency_interface.name)) + + def update_attributes(attributes, extras): +- for key, value in extras.items(): ++ for key, value in list(extras.items()): + if key not in attributes: + attributes[key] = value + +@@ -362,8 +362,8 @@ def inherit_unforgeable_attributes(resolved_definitions, interfaces_info): + interface.get('cpp_includes', {}).get(component, {})) + return unforgeable_attributes, referenced_interfaces, cpp_includes + +- for component, definitions in resolved_definitions.items(): +- for interface_name, interface in definitions.interfaces.items(): ++ for component, definitions in list(resolved_definitions.items()): ++ for interface_name, interface in list(definitions.interfaces.items()): + interface_info = interfaces_info[interface_name] + inherited_unforgeable_attributes, referenced_interfaces, cpp_includes = \ + collect_unforgeable_attributes_in_ancestors( +diff --git a/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/overload_set_algorithm.py b/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/overload_set_algorithm.py +index 309de6954..acb8448b6 100644 +--- a/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/overload_set_algorithm.py ++++ b/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/overload_set_algorithm.py +@@ -183,7 +183,7 @@ def method_overloads_by_name(methods): + # Filter to only methods that are actually overloaded + method_counts = Counter(method['name'] for method in methods) + overloaded_method_names = set( +- name for name, count in method_counts.items() if count > 1) ++ name for name, count in list(method_counts.items()) if count > 1) + overloaded_methods = [ + method for method in methods + if method['name'] in overloaded_method_names +diff --git a/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/utilities.py b/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/utilities.py +index e1677ee7b..5584b8a50 100644 +--- a/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/utilities.py ++++ b/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/utilities.py +@@ -13,7 +13,7 @@ import subprocess + import sys + + if sys.version_info.major == 2: +- import cPickle as pickle ++ import pickle as pickle + else: + import pickle + +@@ -196,8 +196,8 @@ class ComponentInfoProviderModules(ComponentInfoProvider): + + @property + def callback_functions(self): +- return dict(self._component_info_core['callback_functions'].items() + +- self._component_info_modules['callback_functions'].items()) ++ return dict(list(self._component_info_core['callback_functions'].items()) + ++ list(self._component_info_modules['callback_functions'].items())) + + @property + def specifier_for_export(self): +@@ -209,8 +209,8 @@ class ComponentInfoProviderModules(ComponentInfoProvider): + + + def load_interfaces_info_overall_pickle(info_dir): +- with open(os.path.join(info_dir, +- 'interfaces_info.pickle')) as interface_info_file: ++ with open(os.path.join(info_dir, 'interfaces_info.pickle'), ++ mode='rb') as interface_info_file: + return pickle.load(interface_info_file) + + +@@ -219,7 +219,7 @@ def merge_dict_recursively(target, diff): + |target| will be updated with |diff|. Part of |diff| may be re-used in + |target|. + """ +- for key, value in diff.items(): ++ for key, value in list(diff.items()): + if key not in target: + target[key] = value + elif type(value) == dict: +@@ -236,23 +236,20 @@ def merge_dict_recursively(target, diff): + + def create_component_info_provider_core(info_dir): + interfaces_info = load_interfaces_info_overall_pickle(info_dir) +- with open( +- os.path.join(info_dir, 'core', +- 'component_info_core.pickle')) as component_info_file: ++ with open(os.path.join(info_dir, 'core', 'component_info_core.pickle'), ++ mode='rb') as component_info_file: + component_info = pickle.load(component_info_file) + return ComponentInfoProviderCore(interfaces_info, component_info) + + + def create_component_info_provider_modules(info_dir): + interfaces_info = load_interfaces_info_overall_pickle(info_dir) +- with open( +- os.path.join(info_dir, 'core', +- 'component_info_core.pickle')) as component_info_file: ++ with open(os.path.join(info_dir, 'core', 'component_info_core.pickle'), ++ mode='rb') as component_info_file: + component_info_core = pickle.load(component_info_file) +- with open( +- os.path.join( +- info_dir, 'modules', +- 'component_info_modules.pickle')) as component_info_file: ++ with open(os.path.join(info_dir, 'modules', ++ 'component_info_modules.pickle'), ++ mode='rb') as component_info_file: + component_info_modules = pickle.load(component_info_file) + return ComponentInfoProviderModules(interfaces_info, component_info_core, + component_info_modules) +@@ -356,7 +353,7 @@ def write_pickle_file(pickle_filename, data): + pickle_filename = abs(pickle_filename) + # If |data| is same with the file content, we skip updating. + if os.path.isfile(pickle_filename): +- with open(pickle_filename) as pickle_file: ++ with open(pickle_filename, 'rb') as pickle_file: + try: + if pickle.load(pickle_file) == data: + return +@@ -445,7 +442,7 @@ def get_interface_extended_attributes_from_idl(file_contents): + if parences < 0 or square_brackets < 0: + raise ValueError('You have more close braces than open braces.') + if parences == 0 and square_brackets == 0: +- name, _, value = map(str.strip, concatenated.partition('=')) ++ name, _, value = list(map(str.strip, concatenated.partition('='))) + extended_attributes[name] = value + concatenated = None + return extended_attributes +diff --git a/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/v8_attributes.py b/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/v8_attributes.py +index 45aba557f..0f897d1b1 100644 +--- a/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/v8_attributes.py ++++ b/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/v8_attributes.py +@@ -110,9 +110,7 @@ def attribute_context(interface, attribute, interfaces, component_info): + # [ReflectOnly] + reflect_only = extended_attribute_value_as_list(attribute, 'ReflectOnly') + if reflect_only: +- reflect_only = map( +- lambda v: cpp_content_attribute_value_name(interface, v), +- reflect_only) ++ reflect_only = [cpp_content_attribute_value_name(interface, v) for v in reflect_only] + if is_custom_element_callbacks or is_reflect: + includes.add('core/html/custom/v0_custom_element_processing_stack.h') + # [PerWorldBindings] +diff --git a/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/v8_dictionary.py b/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/v8_dictionary.py +index 81b0e6b99..629018c0d 100644 +--- a/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/v8_dictionary.py ++++ b/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/v8_dictionary.py +@@ -254,7 +254,7 @@ def dictionary_impl_context(dictionary, interfaces_info): + raise Exception('Member name conflict: %s' % cpp_name) + members_dict[cpp_name] = member + return sorted( +- members_dict.values(), key=lambda member: member['cpp_name']) ++ list(members_dict.values()), key=lambda member: member['cpp_name']) + + includes.clear() + header_forward_decls = set() +diff --git a/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/v8_interface.py b/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/v8_interface.py +index a43260414..a85b03abe 100644 +--- a/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/v8_interface.py ++++ b/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/v8_interface.py +@@ -189,7 +189,7 @@ def context_enabled_features(attributes): + return sorted([ + member for member in members + if member.get(KEY) and not member.get('exposed_test') +- ]) ++ ], key=lambda item: item['name']) + + def member_filter_by_name(members, name): + return [member for member in members if member[KEY] == name] +@@ -612,7 +612,8 @@ def interface_context(interface, interfaces, component_info): + sorted( + origin_trial_features(interface, context['constants'], + context['attributes'], context['methods']) + +- context_enabled_features(context['attributes'])), ++ context_enabled_features(context['attributes']), ++ key=lambda item: item['name']), + }) + if context['optional_features']: + includes.add('platform/bindings/v8_per_context_data.h') +@@ -1356,9 +1357,9 @@ def resolution_tests_methods(effective_overloads): + + # Extract argument and IDL type to simplify accessing these in each loop. + arguments = [method['arguments'][index] for method in methods] +- arguments_methods = zip(arguments, methods) ++ arguments_methods = list(zip(arguments, methods)) + idl_types = [argument['idl_type_object'] for argument in arguments] +- idl_types_methods = zip(idl_types, methods) ++ idl_types_methods = list(zip(idl_types, methods)) + + # We can’t do a single loop through all methods or simply sort them, because + # a method may be listed in multiple steps of the resolution algorithm, and +diff --git a/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/v8_methods.py b/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/v8_methods.py +index 5f1f89a3d..8d5135c9e 100644 +--- a/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/v8_methods.py ++++ b/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/v8_methods.py +@@ -46,6 +46,10 @@ import v8_types + import v8_utilities + from v8_utilities import (has_extended_attribute_value, is_unforgeable) + ++# TODO: Remove this once Python2 is obsoleted. ++if sys.version_info.major != 2: ++ basestring = str ++ + + def method_is_visible(method, interface_is_partial): + if 'overloads' in method: +@@ -585,7 +589,7 @@ def argument_set_default_value(argument): + return '/* null default value */' + if default_value.value == "{}": + member_type = idl_type.dictionary_member_type +- elif isinstance(default_value.value, basestring): ++ elif isinstance(default_value.value, str): + member_type = idl_type.string_member_type + elif isinstance(default_value.value, (int, float)): + member_type = idl_type.numeric_member_type +diff --git a/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/v8_utilities.py b/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/v8_utilities.py +index 2ecd69233..fcfc48371 100644 +--- a/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/v8_utilities.py ++++ b/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/v8_utilities.py +@@ -271,7 +271,7 @@ EXPOSED_WORKERS = set([ + ]) + + +-class ExposureSet: ++class ExposureSet(object): + """An ExposureSet is a collection of Exposure instructions.""" + + def __init__(self, exposures=None): +diff --git a/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/web_idl/callback_interface.py b/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/web_idl/callback_interface.py +index 13fb7c706..8e0106aeb 100644 +--- a/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/web_idl/callback_interface.py ++++ b/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/web_idl/callback_interface.py +@@ -93,8 +93,7 @@ class CallbackInterface(UserDefinedType, WithExtendedAttributes, + self._operation_groups = tuple([ + OperationGroup( + operation_group_ir, +- filter(lambda x: x.identifier == operation_group_ir.identifier, +- self._operations), ++ [x for x in self._operations if x.identifier == operation_group_ir.identifier], + owner=self) for operation_group_ir in ir.operation_groups + ]) + +diff --git a/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/web_idl/composition_parts.py b/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/web_idl/composition_parts.py +index 5e8de8940..c45680ba0 100644 +--- a/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/web_idl/composition_parts.py ++++ b/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/web_idl/composition_parts.py +@@ -249,7 +249,7 @@ class WithOwnerMixin(object): + def __init__(self, owner_mixin=None): + if isinstance(owner_mixin, WithOwnerMixin): + owner_mixin = owner_mixin._owner_mixin +- # In Python2, we need to avoid circular imports. ++ # In Python3, we need to avoid circular imports. + from .reference import RefById + assert owner_mixin is None or isinstance(owner_mixin, RefById) + +@@ -264,7 +264,7 @@ class WithOwnerMixin(object): + return self._owner_mixin.target_object if self._owner_mixin else None + + def set_owner_mixin(self, mixin): +- # In Python2, we need to avoid circular imports. ++ # In Python3, we need to avoid circular imports. + from .reference import RefById + assert isinstance(mixin, RefById) + assert self._owner_mixin is None +diff --git a/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/web_idl/database.py b/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/web_idl/database.py +index c92cf48eb..5b53bed87 100644 +--- a/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/web_idl/database.py ++++ b/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/web_idl/database.py +@@ -53,12 +53,12 @@ class DatabaseBody(object): + + def __init__(self): + self._defs = {} +- for kind in DatabaseBody.Kind.values(): ++ for kind in list(DatabaseBody.Kind.values()): + self._defs[kind] = {} + + def register(self, kind, user_defined_type): + assert isinstance(user_defined_type, (Typedef, Union, UserDefinedType)) +- assert kind in DatabaseBody.Kind.values() ++ assert kind in list(DatabaseBody.Kind.values()) + try: + self.find_by_identifier(user_defined_type.identifier) + assert False, user_defined_type.identifier +@@ -67,7 +67,7 @@ class DatabaseBody(object): + self._defs[kind][user_defined_type.identifier] = user_defined_type + + def find_by_identifier(self, identifier): +- for defs_per_kind in self._defs.values(): ++ for defs_per_kind in list(self._defs.values()): + if identifier in defs_per_kind: + return defs_per_kind[identifier] + raise KeyError(identifier) +@@ -156,4 +156,4 @@ class Database(object): + return self._view_by_kind(Database._Kind.UNION) + + def _view_by_kind(self, kind): +- return self._impl.find_by_kind(kind).values() ++ return list(self._impl.find_by_kind(kind).values()) +diff --git a/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/web_idl/exposure.py b/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/web_idl/exposure.py +index abaeef39c..e36cf7439 100644 +--- a/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/web_idl/exposure.py ++++ b/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/web_idl/exposure.py +@@ -8,8 +8,11 @@ from .runtime_enabled_features import RuntimeEnabledFeatures + class _Feature(str): + """Represents a runtime-enabled feature.""" + ++ def __new__(cls, value): ++ return str.__new__(cls, value) ++ + def __init__(self, value): +- str.__init__(self, value) ++ str.__init__(self) + self._is_context_dependent = ( + RuntimeEnabledFeatures.is_context_dependent(self)) + +diff --git a/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/web_idl/extended_attribute.py b/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/web_idl/extended_attribute.py +index 36eec862f..b988f2882 100644 +--- a/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/web_idl/extended_attribute.py ++++ b/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/web_idl/extended_attribute.py +@@ -190,7 +190,7 @@ class ExtendedAttributes(object): + def _on_ext_attrs_updated(self): + self._keys = tuple(sorted(self._ext_attrs.keys())) + self._length = 0 +- for ext_attrs in self._ext_attrs.values(): ++ for ext_attrs in list(self._ext_attrs.values()): + self._length += len(ext_attrs) + + @classmethod +@@ -206,7 +206,7 @@ class ExtendedAttributes(object): + if not all(isinstance(x, cls) for x in (lhs, rhs)): + return False + +- if lhs.keys() != rhs.keys(): ++ if list(lhs.keys()) != list(rhs.keys()): + return False + if len(lhs) != len(rhs): + return False +diff --git a/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/web_idl/file_io.py b/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/web_idl/file_io.py +index 19e3327a8..bfa7dbd34 100644 +--- a/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/web_idl/file_io.py ++++ b/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/web_idl/file_io.py +@@ -6,7 +6,7 @@ import os + import sys + + if sys.version_info.major == 2: +- import cPickle as pickle # 'cPickle' is faster than 'pickle' on Py2 ++ import pickle as pickle # 'cPickle' is faster than 'pickle' on Py2 + else: + import pickle + +diff --git a/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/web_idl/function_like.py b/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/web_idl/function_like.py +index 648c70d80..2ff709fe8 100644 +--- a/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/web_idl/function_like.py ++++ b/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/web_idl/function_like.py +@@ -71,8 +71,7 @@ class FunctionLike(WithIdentifier): + def num_of_required_arguments(self): + """Returns the number of required arguments.""" + return len( +- filter(lambda arg: not (arg.is_optional or arg.is_variadic), +- self.arguments)) ++ [arg for arg in self.arguments if not (arg.is_optional or arg.is_variadic)]) + + + class OverloadGroup(WithIdentifier): +@@ -164,7 +163,7 @@ class OverloadGroup(WithIdentifier): + Returns the minimum number of required arguments of overloaded + functions. + """ +- return min(map(lambda func: func.num_of_required_arguments, self)) ++ return min([func.num_of_required_arguments for func in self]) + + def effective_overload_set(self, argument_count=None): + """ +@@ -172,13 +171,13 @@ class OverloadGroup(WithIdentifier): + https://heycam.github.io/webidl/#compute-the-effective-overload-set + """ + assert argument_count is None or isinstance(argument_count, +- (int, long)) ++ int) + + N = argument_count + S = [] + F = self + +- maxarg = max(map(lambda X: len(X.arguments), F)) ++ maxarg = max([len(X.arguments) for X in F]) + if N is None: + arg_sizes = [len(X.arguments) for X in F if not X.is_variadic] + N = 1 + (max(arg_sizes) if arg_sizes else 0) +@@ -188,21 +187,21 @@ class OverloadGroup(WithIdentifier): + + S.append( + OverloadGroup.EffectiveOverloadItem( +- X, map(lambda arg: arg.idl_type, X.arguments), +- map(lambda arg: arg.optionality, X.arguments))) ++ X, [arg.idl_type for arg in X.arguments], ++ [arg.optionality for arg in X.arguments])) + + if X.is_variadic: +- for i in xrange(n, max(maxarg, N)): +- t = map(lambda arg: arg.idl_type, X.arguments) +- o = map(lambda arg: arg.optionality, X.arguments) +- for _ in xrange(n, i + 1): ++ for i in range(n, max(maxarg, N)): ++ t = [arg.idl_type for arg in X.arguments] ++ o = [arg.optionality for arg in X.arguments] ++ for _ in range(n, i + 1): + t.append(X.arguments[-1].idl_type) + o.append(X.arguments[-1].optionality) + S.append(OverloadGroup.EffectiveOverloadItem(X, t, o)) + +- t = map(lambda arg: arg.idl_type, X.arguments) +- o = map(lambda arg: arg.optionality, X.arguments) +- for i in xrange(n - 1, -1, -1): ++ t = [arg.idl_type for arg in X.arguments] ++ o = [arg.optionality for arg in X.arguments] ++ for i in range(n - 1, -1, -1): + if X.arguments[i].optionality == IdlType.Optionality.REQUIRED: + break + S.append(OverloadGroup.EffectiveOverloadItem(X, t[:i], o[:i])) +@@ -222,7 +221,7 @@ class OverloadGroup(WithIdentifier): + for item in items) + assert len(items) > 1 + +- for index in xrange(len(items[0].type_list)): ++ for index in range(len(items[0].type_list)): + # Assume that the given items are valid, and we only need to test + # the two types. + if OverloadGroup.are_distinguishable_types( +diff --git a/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/web_idl/idl_compiler.py b/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/web_idl/idl_compiler.py +index c5ee2bd8a..ae45d5d97 100644 +--- a/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/web_idl/idl_compiler.py ++++ b/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/web_idl/idl_compiler.py +@@ -149,8 +149,8 @@ class IdlCompiler(object): + for old_ir in old_irs: + new_ir = make_copy(old_ir) + self._ir_map.add(new_ir) +- new_ir.attributes = filter(not_disabled, new_ir.attributes) +- new_ir.operations = filter(not_disabled, new_ir.operations) ++ new_ir.attributes = list(filter(not_disabled, new_ir.attributes)) ++ new_ir.operations = list(filter(not_disabled, new_ir.operations)) + + def _record_defined_in_partial_and_mixin(self): + old_irs = self._ir_map.irs_of_kinds( +@@ -231,7 +231,7 @@ class IdlCompiler(object): + only_to_members_of_partial_or_mixin=False) + propagate_to_exposure(propagate) + +- map(process_member_like, ir.iter_all_members()) ++ list(map(process_member_like, ir.iter_all_members())) + + def process_member_like(ir): + propagate = functools.partial(propagate_extattr, ir=ir) +@@ -257,7 +257,7 @@ class IdlCompiler(object): + + self._ir_map.move_to_new_phase() + +- map(process_interface_like, old_irs) ++ list(map(process_interface_like, old_irs)) + + def _determine_blink_headers(self): + irs = self._ir_map.irs_of_kinds( +@@ -310,7 +310,7 @@ class IdlCompiler(object): + + self._ir_map.move_to_new_phase() + +- for identifier, old_dictionary in old_dictionaries.items(): ++ for identifier, old_dictionary in list(old_dictionaries.items()): + new_dictionary = make_copy(old_dictionary) + self._ir_map.add(new_dictionary) + for partial_dictionary in old_partial_dictionaries.get( +@@ -342,7 +342,7 @@ class IdlCompiler(object): + ir_sets_to_merge = [(interface, [ + mixins[include.mixin_identifier] + for include in includes.get(identifier, []) +- ]) for identifier, interface in interfaces.items()] ++ ]) for identifier, interface in list(interfaces.items())] + + self._ir_map.move_to_new_phase() + +@@ -393,7 +393,7 @@ class IdlCompiler(object): + + identifier_to_derived_set = {} + +- for old_interface in old_interfaces.values(): ++ for old_interface in list(old_interfaces.values()): + new_interface = make_copy(old_interface) + self._ir_map.add(new_interface) + inheritance_chain = create_inheritance_chain( +@@ -422,9 +422,7 @@ class IdlCompiler(object): + assert not new_interface.deriveds + derived_set = identifier_to_derived_set.get( + new_interface.identifier, set()) +- new_interface.deriveds = map( +- lambda id_: self._ref_to_idl_def_factory.create(id_), +- sorted(derived_set)) ++ new_interface.deriveds = [self._ref_to_idl_def_factory.create(id_) for id_ in sorted(derived_set)] + + def _supplement_missing_html_constructor_operation(self): + # Temporary mitigation of misuse of [HTMLConstructor] +@@ -553,7 +551,7 @@ class IdlCompiler(object): + self._ir_map.add(new_ir) + + for group in new_ir.iter_all_overload_groups(): +- exposures = map(lambda overload: overload.exposure, group) ++ exposures = [overload.exposure for overload in group] + + # [Exposed] + if any(not exposure.global_names_and_features +@@ -653,8 +651,8 @@ class IdlCompiler(object): + constructs = set() + for global_name in global_names: + constructs.update(exposed_map.get(global_name, [])) +- new_ir.exposed_constructs = map( +- self._ref_to_idl_def_factory.create, sorted(constructs)) ++ new_ir.exposed_constructs = list(map( ++ self._ref_to_idl_def_factory.create, sorted(constructs))) + + assert not new_ir.legacy_window_aliases + if new_ir.identifier != 'Window': +@@ -771,13 +769,13 @@ class IdlCompiler(object): + + grouped_typedefs = {} # {unique key: list of typedefs to the union} + all_typedefs = self._db.find_by_kind(DatabaseBody.Kind.TYPEDEF) +- for typedef in all_typedefs.values(): ++ for typedef in list(all_typedefs.values()): + if not typedef.idl_type.is_union: + continue + key = unique_key(typedef.idl_type) + grouped_typedefs.setdefault(key, []).append(typedef) + +- for key, union_types in grouped_unions.items(): ++ for key, union_types in list(grouped_unions.items()): + self._db.register( + DatabaseBody.Kind.UNION, + Union( +diff --git a/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/web_idl/idl_type.py b/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/web_idl/idl_type.py +index 1d7ae8026..3a6dea2ea 100644 +--- a/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/web_idl/idl_type.py ++++ b/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/web_idl/idl_type.py +@@ -265,14 +265,14 @@ class IdlType(WithExtendedAttributes, WithDebugInfo): + } + + value_counts = {None: 0, False: 0, True: 0} +- for value in switches.values(): ++ for value in list(switches.values()): + assert value is None or isinstance(value, bool) + value_counts[value] += 1 + assert value_counts[False] == 0 or value_counts[True] == 0, ( + "Specify only True or False arguments. Unspecified arguments are " + "automatically set to the opposite value.") + default = value_counts[True] == 0 +- for arg, value in switches.items(): ++ for arg, value in list(switches.items()): + if value is None: + switches[arg] = default + +@@ -1214,7 +1214,7 @@ class UnionType(IdlType): + return self._union_definition_object + + def set_union_definition_object(self, union_definition_object): +- # In Python2, we need to avoid circular imports. ++ # In Python3, we need to avoid circular imports. + from .union import Union + assert isinstance(union_definition_object, Union) + assert self._union_definition_object is None +diff --git a/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/web_idl/idl_type_test.py b/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/web_idl/idl_type_test.py +index b3d097a74..4efae30be 100644 +--- a/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/web_idl/idl_type_test.py ++++ b/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/web_idl/idl_type_test.py +@@ -83,7 +83,7 @@ class IdlTypesTest(unittest.TestCase): + 'void': 'Void', + 'symbol': 'Symbol', + } +- for name, expect in type_names.items(): ++ for name, expect in list(type_names.items()): + self.assertEqual(expect, factory.simple_type(name).type_name) + + short_type = factory.simple_type('short') +diff --git a/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/web_idl/interface.py b/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/web_idl/interface.py +index 65d24e529..063afb8a4 100644 +--- a/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/web_idl/interface.py ++++ b/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/web_idl/interface.py +@@ -180,8 +180,7 @@ class Interface(UserDefinedType, WithExtendedAttributes, WithCodeGeneratorInfo, + self._constructor_groups = tuple([ + ConstructorGroup( + group_ir, +- filter(lambda x: x.identifier == group_ir.identifier, +- self._constructors), ++ [x for x in self._constructors if x.identifier == group_ir.identifier], + owner=self) for group_ir in ir.constructor_groups + ]) + assert len(self._constructor_groups) <= 1 +@@ -192,8 +191,7 @@ class Interface(UserDefinedType, WithExtendedAttributes, WithCodeGeneratorInfo, + self._named_constructor_groups = tuple([ + ConstructorGroup( + group_ir, +- filter(lambda x: x.identifier == group_ir.identifier, +- self._named_constructors), ++ [x for x in self._named_constructors if x.identifier == group_ir.identifier], + owner=self) for group_ir in ir.named_constructor_groups + ]) + self._operations = tuple([ +@@ -203,22 +201,18 @@ class Interface(UserDefinedType, WithExtendedAttributes, WithCodeGeneratorInfo, + self._operation_groups = tuple([ + OperationGroup( + group_ir, +- filter(lambda x: x.identifier == group_ir.identifier, +- self._operations), ++ [x for x in self._operations if x.identifier == group_ir.identifier], + owner=self) for group_ir in ir.operation_groups + ]) + self._exposed_constructs = tuple(ir.exposed_constructs) + self._legacy_window_aliases = tuple(ir.legacy_window_aliases) + self._indexed_and_named_properties = None +- indexed_and_named_property_operations = filter( +- lambda x: x.is_indexed_or_named_property_operation, +- self._operations) ++ indexed_and_named_property_operations = [x for x in self._operations if x.is_indexed_or_named_property_operation] + if indexed_and_named_property_operations: + self._indexed_and_named_properties = IndexedAndNamedProperties( + indexed_and_named_property_operations, owner=self) + self._stringifier = None +- stringifier_operation_irs = filter(lambda x: x.is_stringifier, +- ir.operations) ++ stringifier_operation_irs = [x for x in ir.operations if x.is_stringifier] + if stringifier_operation_irs: + assert len(stringifier_operation_irs) == 1 + op_ir = make_copy(stringifier_operation_irs[0]) +@@ -231,8 +225,7 @@ class Interface(UserDefinedType, WithExtendedAttributes, WithCodeGeneratorInfo, + attribute = None + if operation.stringifier_attribute: + attr_id = operation.stringifier_attribute +- attributes = filter(lambda x: x.identifier == attr_id, +- self._attributes) ++ attributes = [x for x in self._attributes if x.identifier == attr_id] + assert len(attributes) == 1 + attribute = attributes[0] + self._stringifier = Stringifier(operation, attribute, owner=self) +@@ -254,7 +247,7 @@ class Interface(UserDefinedType, WithExtendedAttributes, WithCodeGeneratorInfo, + @property + def deriveds(self): + """Returns the list of the derived interfaces.""" +- return tuple(map(lambda ref: ref.target_object, self._deriveds)) ++ return tuple([ref.target_object for ref in self._deriveds]) + + @property + def inclusive_inherited_interfaces(self): +@@ -346,7 +339,7 @@ class Interface(UserDefinedType, WithExtendedAttributes, WithCodeGeneratorInfo, + Returns a list of the constructs that are exposed on this global object. + """ + return tuple( +- map(lambda ref: ref.target_object, self._exposed_constructs)) ++ [ref.target_object for ref in self._exposed_constructs]) + + @property + def legacy_window_aliases(self): +@@ -578,8 +571,7 @@ class Iterable(WithDebugInfo): + self._operation_groups = tuple([ + OperationGroup( + group_ir, +- filter(lambda x: x.identifier == group_ir.identifier, +- self._operations), ++ [x for x in self._operations if x.identifier == group_ir.identifier], + owner=owner) for group_ir in ir.operation_groups + ]) + +@@ -666,8 +658,7 @@ class Maplike(WithDebugInfo): + self._operation_groups = tuple([ + OperationGroup( + group_ir, +- filter(lambda x: x.identifier == group_ir.identifier, +- self._operations), ++ [x for x in self._operations if x.identifier == group_ir.identifier], + owner=owner) for group_ir in ir.operation_groups + ]) + +@@ -755,8 +746,7 @@ class Setlike(WithDebugInfo): + self._operation_groups = tuple([ + OperationGroup( + group_ir, +- filter(lambda x: x.identifier == group_ir.identifier, +- self._operations), ++ [x for x in self._operations if x.identifier == group_ir.identifier], + owner=owner) for group_ir in ir.operation_groups + ]) + +diff --git a/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/web_idl/ir_builder.py b/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/web_idl/ir_builder.py +index e9aeff4ab..1d21148e2 100644 +--- a/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/web_idl/ir_builder.py ++++ b/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/web_idl/ir_builder.py +@@ -2,6 +2,7 @@ + # Use of this source code is governed by a BSD-style license that can be + # found in the LICENSE file. + ++import sys + from .argument import Argument + from .ast_group import AstGroup + from .attribute import Attribute +@@ -29,6 +30,9 @@ from .namespace import Namespace + from .operation import Operation + from .typedef import Typedef + ++# TODO: Remove this once Python2 is obsoleted. ++if sys.version_info.major != 2: ++ long = int + + def load_and_register_idl_definitions(filepaths, register_ir, + create_ref_to_idl_def, idl_type_factory): +@@ -120,7 +124,7 @@ class _IRBuilder(object): + for child in child_nodes + ] + if stringifier_members: +- members.extend(filter(None, stringifier_members)) ++ members.extend([_f for _f in stringifier_members if _f]) + attributes = [] + constants = [] + constructors = [] +@@ -160,7 +164,7 @@ class _IRBuilder(object): + child_nodes = list(node.GetChildren()) + extended_attributes = self._take_extended_attributes(child_nodes) + +- members = map(self._build_interface_member, child_nodes) ++ members = list(map(self._build_interface_member, child_nodes)) + attributes = [] + constants = [] + operations = [] +@@ -302,7 +306,7 @@ class _IRBuilder(object): + child_nodes = list(node.GetChildren()) + inherited = self._take_inheritance(child_nodes) + extended_attributes = self._take_extended_attributes(child_nodes) +- own_members = map(self._build_dictionary_member, child_nodes) ++ own_members = list(map(self._build_dictionary_member, child_nodes)) + + return Dictionary.IR( + identifier=Identifier(node.GetName()), +@@ -336,7 +340,7 @@ class _IRBuilder(object): + + child_nodes = list(node.GetChildren()) + extended_attributes = self._take_extended_attributes(child_nodes) +- members = map(self._build_interface_member, child_nodes) ++ members = list(map(self._build_interface_member, child_nodes)) + constants = [] + operations = [] + for member in members: +@@ -456,8 +460,8 @@ class _IRBuilder(object): + assert len(child_nodes) == 1 + child = child_nodes[0] + if child.GetClass() == 'Arguments': +- arguments = map(build_extattr_argument, +- child.GetChildren()) ++ arguments = list(map(build_extattr_argument, ++ child.GetChildren())) + elif child.GetClass() == 'Call': + assert len(child.GetChildren()) == 1 + grand_child = child.GetChildren()[0] +@@ -486,7 +490,7 @@ class _IRBuilder(object): + + assert node.GetClass() == 'ExtAttributes' + return ExtendedAttributes( +- filter(None, map(build_extended_attribute, node.GetChildren()))) ++ [_f for _f in map(build_extended_attribute, node.GetChildren()) if _f]) + + def _build_inheritance(self, node): + assert node.GetClass() == 'Inherit' +@@ -506,7 +510,7 @@ class _IRBuilder(object): + + def _build_iterable(self, node): + assert node.GetClass() == 'Iterable' +- types = map(self._build_type, node.GetChildren()) ++ types = list(map(self._build_type, node.GetChildren())) + assert len(types) == 1 or len(types) == 2 + if len(types) == 1: # value iterator + key_type, value_type = (None, types[0]) +@@ -548,7 +552,7 @@ class _IRBuilder(object): + elif type_token == 'integer': + idl_type = factory.simple_type(name='long', debug_info=debug_info) + assert isinstance(value_token, str) +- value = long(value_token, base=0) ++ value = int(value_token, base=0) + literal = value_token + elif type_token == 'float': + idl_type = factory.simple_type( +@@ -584,7 +588,7 @@ class _IRBuilder(object): + def _build_maplike(self, node, interface_identifier): + assert node.GetClass() == 'Maplike' + assert isinstance(interface_identifier, Identifier) +- types = map(self._build_type, node.GetChildren()) ++ types = list(map(self._build_type, node.GetChildren())) + assert len(types) == 2 + key_type, value_type = types + is_readonly = bool(node.GetProperty('READONLY')) +@@ -676,7 +680,7 @@ class _IRBuilder(object): + def _build_setlike(self, node, interface_identifier): + assert node.GetClass() == 'Setlike' + assert isinstance(interface_identifier, Identifier) +- types = map(self._build_type, node.GetChildren()) ++ types = list(map(self._build_type, node.GetChildren())) + assert len(types) == 1 + value_type = types[0] + is_readonly = bool(node.GetProperty('READONLY')) +@@ -838,7 +842,7 @@ class _IRBuilder(object): + + def build_union_type(node, extended_attributes): + return self._idl_type_factory.union_type( +- member_types=map(self._build_type, node.GetChildren()), ++ member_types=list(map(self._build_type, node.GetChildren())), + is_optional=is_optional, + extended_attributes=extended_attributes, + debug_info=self._build_debug_info(node)) +@@ -993,7 +997,7 @@ class _IRBuilder(object): + + return ExtendedAttributes([ + ExtendedAttribute(key=key, values=values) +- for key, values in key_values.items() ++ for key, values in list(key_values.items()) + ]) + + def _create_iterator_operations(self, node): +diff --git a/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/web_idl/ir_map.py b/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/web_idl/ir_map.py +index 918065232..05222a6f3 100644 +--- a/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/web_idl/ir_map.py ++++ b/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/web_idl/ir_map.py +@@ -168,7 +168,7 @@ class IRMap(object): + """ + assert isinstance(identifier, Identifier) + for irs_per_phase in self._single_value_irs[self._current_phase::-1]: +- for irs_per_kind in irs_per_phase.values(): ++ for irs_per_kind in list(irs_per_phase.values()): + if identifier in irs_per_kind: + return irs_per_kind[identifier] + raise KeyError(identifier) +@@ -190,7 +190,7 @@ class IRMap(object): + """Returns a flattened list of IRs of the given kind.""" + if IRMap.IR.Kind.does_support_multiple_defs(kind): + accumulated = [] +- for irs in self.find_by_kind(kind).values(): ++ for irs in list(self.find_by_kind(kind).values()): + accumulated.extend(irs) + return accumulated + else: +diff --git a/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/web_idl/make_copy.py b/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/web_idl/make_copy.py +index a7a2b11f3..d2145ee27 100644 +--- a/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/web_idl/make_copy.py ++++ b/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/web_idl/make_copy.py +@@ -2,6 +2,13 @@ + # Use of this source code is governed by a BSD-style license that can be + # found in the LICENSE file. + ++import sys ++ ++# TODO: Remove this once Python2 is obsoleted. ++if sys.version_info.major != 2: ++ long = int ++ basestring = str ++ + + def make_copy(obj, memo=None): + """ +@@ -16,7 +23,7 @@ def make_copy(obj, memo=None): + memo = dict() + + if (obj is None +- or isinstance(obj, (bool, int, long, float, complex, basestring))): ++ or isinstance(obj, (bool, int, float, complex, str))): + # Do not make a copy if the object is of an immutable primitive type + # (or its subclass). + # +@@ -43,16 +50,16 @@ def make_copy(obj, memo=None): + cls = type(obj) + + if isinstance(obj, (list, tuple, set, frozenset)): +- return memoize(cls(map(lambda x: make_copy(x, memo), obj))) ++ return memoize(cls([make_copy(x, memo) for x in obj])) + + if isinstance(obj, dict): + return memoize( + cls([(make_copy(key, memo), make_copy(value, memo)) +- for key, value in obj.items()])) ++ for key, value in list(obj.items())])) + + if hasattr(obj, '__dict__'): + copy = memoize(cls.__new__(cls)) +- for name, value in obj.__dict__.items(): ++ for name, value in list(obj.__dict__.items()): + setattr(copy, name, make_copy(value, memo)) + return copy + +diff --git a/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/web_idl/namespace.py b/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/web_idl/namespace.py +index eeabef97c..682c7c33d 100644 +--- a/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/web_idl/namespace.py ++++ b/src/3rdparty/chromium/third_party/blink/renderer/bindings/scripts/web_idl/namespace.py +@@ -109,8 +109,7 @@ class Namespace(UserDefinedType, WithExtendedAttributes, WithCodeGeneratorInfo, + self._operation_groups = tuple([ + OperationGroup( + operation_group_ir, +- filter(lambda x: x.identifier == operation_group_ir.identifier, +- self._operations), ++ [x for x in self._operations if x.identifier == operation_group_ir.identifier], + owner=self) for operation_group_ir in ir.operation_groups + ]) + +diff --git a/src/3rdparty/chromium/third_party/blink/renderer/bindings/templates/dictionary_v8.cc.tmpl b/src/3rdparty/chromium/third_party/blink/renderer/bindings/templates/dictionary_v8.cc.tmpl +index 0add9c45a..dc910f6d2 100644 +--- a/src/3rdparty/chromium/third_party/blink/renderer/bindings/templates/dictionary_v8.cc.tmpl ++++ b/src/3rdparty/chromium/third_party/blink/renderer/bindings/templates/dictionary_v8.cc.tmpl +@@ -59,9 +59,9 @@ void {{v8_class}}::ToImpl(v8::Isolate* isolate, v8::Local v8_value, { + DCHECK(executionContext); + {% endif %}{# has_origin_trial_members #} + {% endif %}{# members #} +- {% for origin_trial_test, origin_trial_member_list in members | groupby('origin_trial_feature_name') %} ++ {% for origin_trial_test, origin_trial_member_list in members | stringifykeygroupby('origin_trial_feature_name') %} + {% filter origin_trial_enabled(origin_trial_test, "executionContext") %} +- {% for feature_name, member_list in origin_trial_member_list | groupby('runtime_enabled_feature_name') %} ++ {% for feature_name, member_list in origin_trial_member_list | stringifykeygroupby('runtime_enabled_feature_name') %} + {% filter runtime_enabled(feature_name) %} + {% for member in member_list %} + v8::Local {{member.v8_value}}; +@@ -147,9 +147,9 @@ bool toV8{{cpp_class}}(const {{cpp_class}}* impl, v8::Local dictiona + DCHECK(executionContext); + {% endif %}{# has_origin_trial_members #} + {% endif %}{# members #} +- {% for origin_trial_test, origin_trial_member_list in members | groupby('origin_trial_feature_name') %} ++ {% for origin_trial_test, origin_trial_member_list in members | stringifykeygroupby('origin_trial_feature_name') %} + {% filter origin_trial_enabled(origin_trial_test, "executionContext") %} +- {% for feature_name, member_list in origin_trial_member_list | groupby('runtime_enabled_feature_name') %} ++ {% for feature_name, member_list in origin_trial_member_list | stringifykeygroupby('runtime_enabled_feature_name') %} + {% filter runtime_enabled(feature_name) %} + {% for member in member_list %} + v8::Local {{member.v8_value}}; +diff --git a/src/3rdparty/chromium/third_party/blink/renderer/build/scripts/aria_properties.py b/src/3rdparty/chromium/third_party/blink/renderer/build/scripts/aria_properties.py +index 742e7c27c..df8b7fd21 100644 +--- a/src/3rdparty/chromium/third_party/blink/renderer/build/scripts/aria_properties.py ++++ b/src/3rdparty/chromium/third_party/blink/renderer/build/scripts/aria_properties.py +@@ -21,4 +21,4 @@ class ARIAReader(object): + self._data = json5.loads(json5_file.read()) + + def attributes_list(self): +- return {'data': [item[u'name'] for item in self._data['attributes']]} ++ return {'data': [item['name'] for item in self._data['attributes']]} +diff --git a/src/3rdparty/chromium/third_party/blink/renderer/build/scripts/blinkbuild/PRESUBMIT.py b/src/3rdparty/chromium/third_party/blink/renderer/build/scripts/blinkbuild/PRESUBMIT.py +index 75dda00cb..7e4b77d04 100644 +--- a/src/3rdparty/chromium/third_party/blink/renderer/build/scripts/blinkbuild/PRESUBMIT.py ++++ b/src/3rdparty/chromium/third_party/blink/renderer/build/scripts/blinkbuild/PRESUBMIT.py +@@ -2,7 +2,7 @@ + # Use of this source code is governed by a BSD-style license that can be + # found in the LICENSE file. + +-from __future__ import print_function ++ + + + def _RunBindingsTests(input_api, output_api): +diff --git a/src/3rdparty/chromium/third_party/blink/renderer/build/scripts/blinkbuild/name_style_converter_test.py b/src/3rdparty/chromium/third_party/blink/renderer/build/scripts/blinkbuild/name_style_converter_test.py +index 5102efa57..eec4921b2 100644 +--- a/src/3rdparty/chromium/third_party/blink/renderer/build/scripts/blinkbuild/name_style_converter_test.py ++++ b/src/3rdparty/chromium/third_party/blink/renderer/build/scripts/blinkbuild/name_style_converter_test.py +@@ -7,8 +7,8 @@ + + import unittest + +-from name_style_converter import NameStyleConverter +-from name_style_converter import tokenize_name ++from .name_style_converter import NameStyleConverter ++from .name_style_converter import tokenize_name + + + class SmartTokenizerTest(unittest.TestCase): +diff --git a/src/3rdparty/chromium/third_party/blink/renderer/build/scripts/core/css/make_css_property_names.py b/src/3rdparty/chromium/third_party/blink/renderer/build/scripts/core/css/make_css_property_names.py +index bc7c717cf..5dffed5e1 100755 +--- a/src/3rdparty/chromium/third_party/blink/renderer/build/scripts/core/css/make_css_property_names.py ++++ b/src/3rdparty/chromium/third_party/blink/renderer/build/scripts/core/css/make_css_property_names.py +@@ -52,7 +52,7 @@ class CSSPropertyNamesWriter(json5_generator.Writer): + 'property_id_bit_length': + self._css_properties.property_id_bit_length, + 'max_name_length': +- max(map(len, self._css_properties.properties_by_id)), ++ max(list(map(len, self._css_properties.properties_by_id))), + } + + @gperf.use_jinja_gperf_template( +diff --git a/src/3rdparty/chromium/third_party/blink/renderer/build/scripts/core/css/make_css_value_id_mappings.py b/src/3rdparty/chromium/third_party/blink/renderer/build/scripts/core/css/make_css_value_id_mappings.py +index 996b82dc4..c0d15042b 100755 +--- a/src/3rdparty/chromium/third_party/blink/renderer/build/scripts/core/css/make_css_value_id_mappings.py ++++ b/src/3rdparty/chromium/third_party/blink/renderer/build/scripts/core/css/make_css_value_id_mappings.py +@@ -58,7 +58,7 @@ def _find_largest_segment(segments): + longest_segment: the start and end indices of the longest segment + + """ +- segment_list = zip(segments[:-1], segments[1:]) ++ segment_list = list(zip(segments[:-1], segments[1:])) + return max(segment_list, key=lambda x: x[1] - x[0]) + + +@@ -85,11 +85,11 @@ def _find_enum_longest_continuous_segment(property_, + Build the switch case statements of other enums not in the + segment. Enums in the segment will be computed in default clause. + """ +- property_enum_order = range(len(property_['keywords'])) ++ property_enum_order = list(range(len(property_['keywords']))) + css_enum_order = [ + name_to_position_dictionary[x] for x in property_['keywords'] + ] +- enum_pair_list = zip(css_enum_order, property_enum_order) ++ enum_pair_list = list(zip(css_enum_order, property_enum_order)) + enum_segment, enum_pair_list = _find_continuous_segment(enum_pair_list) + longest_segment = _find_largest_segment(enum_segment) + +@@ -128,8 +128,8 @@ class CSSValueIDMappingsWriter(make_style_builder.StyleBuilderWriter): + [self.css_values_dictionary_file], + default_parameters=self.default_parameters).name_dictionaries + name_to_position_dictionary = dict( +- zip([x['name'].original for x in css_values_dictionary], +- range(len(css_values_dictionary)))) ++ list(zip([x['name'].original for x in css_values_dictionary], ++ list(range(len(css_values_dictionary)))))) + + for property_ in self.css_properties.properties_including_aliases: + include_paths.update(property_['include_paths']) +diff --git a/src/3rdparty/chromium/third_party/blink/renderer/build/scripts/core/css/make_cssom_types.py b/src/3rdparty/chromium/third_party/blink/renderer/build/scripts/core/css/make_cssom_types.py +index ef5c5f57b..239c45fce 100755 +--- a/src/3rdparty/chromium/third_party/blink/renderer/build/scripts/core/css/make_cssom_types.py ++++ b/src/3rdparty/chromium/third_party/blink/renderer/build/scripts/core/css/make_cssom_types.py +@@ -31,8 +31,8 @@ class CSSOMTypesWriter(json5_generator.Writer): + property_['typedom_types'] = types + + # Generate CSSValueID values from keywords. +- property_['keywordIDs'] = map(enum_key_for_css_keyword, +- property_['keywords']) ++ property_['keywordIDs'] = list(map(enum_key_for_css_keyword, ++ property_['keywords'])) + + self._outputs = { + 'cssom_types.cc': self.generate_types, +diff --git a/src/3rdparty/chromium/third_party/blink/renderer/build/scripts/core/css/make_style_shorthands.py b/src/3rdparty/chromium/third_party/blink/renderer/build/scripts/core/css/make_style_shorthands.py +index 1799cd5a1..2a700120d 100755 +--- a/src/3rdparty/chromium/third_party/blink/renderer/build/scripts/core/css/make_style_shorthands.py ++++ b/src/3rdparty/chromium/third_party/blink/renderer/build/scripts/core/css/make_style_shorthands.py +@@ -71,7 +71,7 @@ class Expansion(object): + def enabled_longhands(self): + include = lambda longhand: not longhand[ + 'runtime_flag'] or self.is_enabled(longhand['runtime_flag']) +- return filter(include, self._longhands) ++ return list(filter(include, self._longhands)) + + @property + def index(self): +@@ -87,8 +87,7 @@ class Expansion(object): + + def create_expansions(longhands): + flags = collect_runtime_flags(longhands) +- expansions = map(lambda mask: Expansion(longhands, flags, mask), +- range(1 << len(flags))) ++ expansions = [Expansion(longhands, flags, mask) for mask in range(1 << len(flags))] + assert len(expansions) > 0 + # We generate 2^N expansions for N flags, so enforce some limit. + assert len(flags) <= 4, 'Too many runtime flags for a single shorthand' +@@ -114,19 +113,17 @@ class StylePropertyShorthandWriter(json5_generator.Writer): + + self._longhand_dictionary = defaultdict(list) + for property_ in json5_properties.shorthands: +- property_['longhand_enum_keys'] = map(enum_key_for_css_property, +- property_['longhands']) +- property_['longhand_property_ids'] = map(id_for_css_property, +- property_['longhands']) +- +- longhands = map( +- lambda name: json5_properties.properties_by_name[name], +- property_['longhands']) ++ property_['longhand_enum_keys'] = list(map(enum_key_for_css_property, ++ property_['longhands'])) ++ property_['longhand_property_ids'] = list(map(id_for_css_property, ++ property_['longhands'])) ++ ++ longhands = [json5_properties.properties_by_name[name] for name in property_['longhands']] + property_['expansions'] = create_expansions(longhands) + for longhand_enum_key in property_['longhand_enum_keys']: + self._longhand_dictionary[longhand_enum_key].append(property_) + +- for longhands in self._longhand_dictionary.values(): ++ for longhands in list(self._longhand_dictionary.values()): + # Sort first by number of longhands in decreasing order, then + # alphabetically + longhands.sort( +diff --git a/src/3rdparty/chromium/third_party/blink/renderer/build/scripts/core/css/properties/make_css_property_instances.py b/src/3rdparty/chromium/third_party/blink/renderer/build/scripts/core/css/properties/make_css_property_instances.py +index 75030ac57..f72aadee1 100755 +--- a/src/3rdparty/chromium/third_party/blink/renderer/build/scripts/core/css/properties/make_css_property_instances.py ++++ b/src/3rdparty/chromium/third_party/blink/renderer/build/scripts/core/css/properties/make_css_property_instances.py +@@ -42,8 +42,8 @@ class CSSPropertyInstancesWriter(json5_generator.Writer): + aliases = self._css_properties.aliases + + # Lists of PropertyClassData. +- self._property_classes_by_id = map(self.get_class, properties) +- self._alias_classes_by_id = map(self.get_class, aliases) ++ self._property_classes_by_id = list(map(self.get_class, properties)) ++ self._alias_classes_by_id = list(map(self.get_class, aliases)) + + # Sort by enum value. + self._property_classes_by_id.sort(key=lambda t: t.enum_value) +diff --git a/src/3rdparty/chromium/third_party/blink/renderer/build/scripts/core/style/make_computed_style_base.py b/src/3rdparty/chromium/third_party/blink/renderer/build/scripts/core/style/make_computed_style_base.py +index 731f354bc..879561879 100755 +--- a/src/3rdparty/chromium/third_party/blink/renderer/build/scripts/core/style/make_computed_style_base.py ++++ b/src/3rdparty/chromium/third_party/blink/renderer/build/scripts/core/style/make_computed_style_base.py +@@ -135,7 +135,7 @@ def _create_groups(properties): + fields_in_current_group = group_dict.pop(None) + subgroups = [ + _dict_to_group(subgroup_name, subgroup_dict) +- for subgroup_name, subgroup_dict in group_dict.items() ++ for subgroup_name, subgroup_dict in list(group_dict.items()) + ] + return Group(name, subgroups, _reorder_fields(fields_in_current_group)) + +@@ -240,7 +240,7 @@ def _create_enums(properties): + enums[enum.type_name] = enum + + # Return the enums sorted by type name +- return list(sorted(enums.values(), key=lambda e: e.type_name)) ++ return list(sorted(list(enums.values()), key=lambda e: e.type_name)) + + + def _create_property_field(property_): +@@ -412,11 +412,11 @@ def _get_properties_ranking_using_partition_rule(properties_ranking, + popularity in the ranking. + """ + return dict( +- zip(properties_ranking, [ ++ list(zip(properties_ranking, [ + bisect.bisect_left(partition_rule, + float(i) / len(properties_ranking)) + 1 + for i in range(len(properties_ranking)) +- ])) ++ ]))) + + + def _best_rank(prop, ranking_map): +diff --git a/src/3rdparty/chromium/third_party/blink/renderer/build/scripts/gperf.py b/src/3rdparty/chromium/third_party/blink/renderer/build/scripts/gperf.py +index 5ee49056b..db72660d4 100644 +--- a/src/3rdparty/chromium/third_party/blink/renderer/build/scripts/gperf.py ++++ b/src/3rdparty/chromium/third_party/blink/renderer/build/scripts/gperf.py +@@ -95,7 +95,7 @@ def main(): + + open(args.output_file, 'wb').write( + generate_gperf(gperf_path, +- open(infile).read(), gperf_args)) ++ open(infile).read(), gperf_args).encode('utf-8')) + + + if __name__ == '__main__': +diff --git a/src/3rdparty/chromium/third_party/blink/renderer/build/scripts/hasher.py b/src/3rdparty/chromium/third_party/blink/renderer/build/scripts/hasher.py +index 2b356fd49..03503b935 100644 +--- a/src/3rdparty/chromium/third_party/blink/renderer/build/scripts/hasher.py ++++ b/src/3rdparty/chromium/third_party/blink/renderer/build/scripts/hasher.py +@@ -28,16 +28,16 @@ if sys.version_info.major != 2: + + class uint32_t(long): + def __rshift__(self, other): +- return uint32_t(long.__rshift__(self, other) & ((1 << 32) - 1)) ++ return uint32_t(int.__rshift__(self, other) & ((1 << 32) - 1)) + + def __lshift__(self, other): +- return uint32_t(long.__lshift__(self, other) & ((1 << 32) - 1)) ++ return uint32_t(int.__lshift__(self, other) & ((1 << 32) - 1)) + + def __add__(self, other): +- return uint32_t(long.__add__(self, other) & ((1 << 32) - 1)) ++ return uint32_t(int.__add__(self, other) & ((1 << 32) - 1)) + + def __xor__(self, other): +- return uint32_t(long.__xor__(self, other) & ((1 << 32) - 1)) ++ return uint32_t(int.__xor__(self, other) & ((1 << 32) - 1)) + + + def hash(string): +diff --git a/src/3rdparty/chromium/third_party/blink/renderer/build/scripts/in_file.py b/src/3rdparty/chromium/third_party/blink/renderer/build/scripts/in_file.py +index 28adc050f..826ea86a4 100644 +--- a/src/3rdparty/chromium/third_party/blink/renderer/build/scripts/in_file.py ++++ b/src/3rdparty/chromium/third_party/blink/renderer/build/scripts/in_file.py +@@ -26,7 +26,7 @@ + # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +-from __future__ import print_function ++ + + import copy + import os +@@ -66,7 +66,7 @@ class InFile(object): + self._defaults = defaults + self._valid_values = copy.deepcopy( + valid_values if valid_values else {}) +- self._parse(map(str.strip, lines)) ++ self._parse(list(map(str.strip, lines))) + + @classmethod + def load_from_files(self, file_paths, defaults, valid_values, +@@ -143,7 +143,7 @@ class InFile(object): + if not name in self.parameters: + self._fatal( + "Unknown parameter: '%s' in line:\n%s\nKnown parameters: %s" % +- (name, line, self.parameters.keys())) ++ (name, line, list(self.parameters.keys()))) + self.parameters[name] = value + + def _parse_line(self, line): +@@ -163,7 +163,7 @@ class InFile(object): + if arg_name not in self._defaults: + self._fatal( + "Unknown argument: '%s' in line:\n%s\nKnown arguments: %s" +- % (arg_name, line, self._defaults.keys())) ++ % (arg_name, line, list(self._defaults.keys()))) + valid_values = self._valid_values.get(arg_name) + if valid_values and arg_value not in valid_values: + self._fatal( +diff --git a/src/3rdparty/chromium/third_party/blink/renderer/build/scripts/in_file_unittest.py b/src/3rdparty/chromium/third_party/blink/renderer/build/scripts/in_file_unittest.py +index eb93ee502..1117a9def 100755 +--- a/src/3rdparty/chromium/third_party/blink/renderer/build/scripts/in_file_unittest.py ++++ b/src/3rdparty/chromium/third_party/blink/renderer/build/scripts/in_file_unittest.py +@@ -56,7 +56,7 @@ name2 + 'arg2': [] + }, + ] +- self.assertEquals(in_file.name_dictionaries, expected_values) ++ self.assertEqual(in_file.name_dictionaries, expected_values) + + def test_with_parameters(self): + contents = """namespace=TestNamespace +@@ -82,7 +82,7 @@ name2 + 'namespace': 'TestNamespace', + 'fruit': True, + } +- self.assertEquals(in_file.parameters, expected_parameters) ++ self.assertEqual(in_file.parameters, expected_parameters) + + def test_assertion_for_non_in_files(self): + in_files = ['some_sample_file.json'] +diff --git a/src/3rdparty/chromium/third_party/blink/renderer/build/scripts/in_generator.py b/src/3rdparty/chromium/third_party/blink/renderer/build/scripts/in_generator.py +index e46740a2e..9ffd950c1 100644 +--- a/src/3rdparty/chromium/third_party/blink/renderer/build/scripts/in_generator.py ++++ b/src/3rdparty/chromium/third_party/blink/renderer/build/scripts/in_generator.py +@@ -26,16 +26,20 @@ + # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +-from __future__ import print_function ++ + + import os + import os.path + import shlex + import shutil ++import sys + import optparse +- + from in_file import InFile + ++# TODO: Remove this once Python2 is obsoleted. ++if sys.version_info.major != 2: ++ basestring = str ++ + + ######################################################### + # This is now deprecated - use json5_generator.py instead +@@ -61,7 +65,7 @@ class GenericWriter(object): + output_file.write(contents) + + def write_files(self, output_dir): +- for file_name, generator in self._outputs.items(): ++ for file_name, generator in list(self._outputs.items()): + self._write_file_if_changed(output_dir, generator(), file_name) + + def set_gperf_path(self, gperf_path): +@@ -78,7 +82,7 @@ class Writer(GenericWriter): + def __init__(self, in_files): + super(Writer, self).__init__(in_files) + +- if isinstance(in_files, basestring): ++ if isinstance(in_files, str): + in_files = [in_files] + if in_files: + self.in_file = InFile.load_from_files(in_files, self.defaults, +diff --git a/src/3rdparty/chromium/third_party/blink/renderer/build/scripts/json5_generator.py b/src/3rdparty/chromium/third_party/blink/renderer/build/scripts/json5_generator.py +index 2977f7be6..c4737323b 100644 +--- a/src/3rdparty/chromium/third_party/blink/renderer/build/scripts/json5_generator.py ++++ b/src/3rdparty/chromium/third_party/blink/renderer/build/scripts/json5_generator.py +@@ -103,7 +103,7 @@ def _is_valid(valid_values, value, valid_keys=None): + assert valid_keys, "'valid_keys' must be declared when using a dict value" + return all([(key in valid_keys or key == "default") + and (val in valid_values or val == "") +- for key, val in value.items()]) ++ for key, val in list(value.items())]) + else: + return value in valid_values + +@@ -142,7 +142,7 @@ class Json5File(object): + + def _process(self, doc): + # Process optional metadata map entries. +- for key, value in doc.get("metadata", {}).items(): ++ for key, value in list(doc.get("metadata", {}).items()): + self._process_metadata(key, value) + # Get optional parameters map, and get the default value map from it. + self.parameters.update(doc.get("parameters", {})) +@@ -155,7 +155,7 @@ class Json5File(object): + entry = self._get_entry(item) + self.name_dictionaries.append(entry) + else: +- for key, value in items.items(): ++ for key, value in list(items.items()): + value["name"] = key + entry = self._get_entry(value) + self.name_dictionaries.append(entry) +@@ -164,11 +164,11 @@ class Json5File(object): + def _process_metadata(self, key, value): + if key not in self.metadata: + raise Exception("Unknown metadata: '%s'\nKnown metadata: %s" % +- (key, self.metadata.keys())) ++ (key, list(self.metadata.keys()))) + self.metadata[key] = value + + def _get_defaults(self): +- for key, value in self.parameters.items(): ++ for key, value in list(self.parameters.items()): + if value and "default" in value: + self._defaults[key] = value["default"] + else: +@@ -188,10 +188,10 @@ class Json5File(object): + "The parameter 'name' is reserved, use a different name." + entry["name"] = NameStyleConverter(item.pop("name")) + # Validate parameters if it's specified. +- for key, value in item.items(): ++ for key, value in list(item.items()): + if key not in self.parameters: + raise Exception("Unknown parameter: '%s'\nKnown params: %s" % +- (key, self.parameters.keys())) ++ (key, list(self.parameters.keys()))) + assert self.parameters[key] is not None, \ + "Specification for parameter 'key' cannot be None. Use {} instead." + self._validate_parameter(self.parameters[key], value) +@@ -300,7 +300,7 @@ class Writer(object): + output_file.write(contents) + + def write_files(self, output_dir): +- for file_name, generator in self._outputs.items(): ++ for file_name, generator in list(self._outputs.items()): + self._write_file_if_changed(output_dir, generator(), file_name) + + def cleanup_files(self, output_dir): +diff --git a/src/3rdparty/chromium/third_party/blink/renderer/build/scripts/keyword_utils.py b/src/3rdparty/chromium/third_party/blink/renderer/build/scripts/keyword_utils.py +index 8a424d0aa..2cf216209 100644 +--- a/src/3rdparty/chromium/third_party/blink/renderer/build/scripts/keyword_utils.py ++++ b/src/3rdparty/chromium/third_party/blink/renderer/build/scripts/keyword_utils.py +@@ -23,7 +23,7 @@ def sort_keyword_properties_by_canonical_order( + default_parameters=json5_file_parameters).name_dictionaries + css_values_dictionary = [x['name'].original for x in css_values_dictionary] + name_to_position_dictionary = dict( +- zip(css_values_dictionary, range(len(css_values_dictionary)))) ++ list(zip(css_values_dictionary, list(range(len(css_values_dictionary)))))) + for css_property in css_properties: + if css_property['field_template'] == 'keyword' and len( + css_property['include_paths']) == 0: +diff --git a/src/3rdparty/chromium/third_party/blink/renderer/build/scripts/make_event_factory.py b/src/3rdparty/chromium/third_party/blink/renderer/build/scripts/make_event_factory.py +index a7ceb4518..6365aa775 100755 +--- a/src/3rdparty/chromium/third_party/blink/renderer/build/scripts/make_event_factory.py ++++ b/src/3rdparty/chromium/third_party/blink/renderer/build/scripts/make_event_factory.py +@@ -27,7 +27,7 @@ + # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +-from __future__ import print_function ++ + + import os.path + import sys +@@ -122,7 +122,7 @@ class EventFactoryWriter(json5_generator.Writer): + 'third_party/blink/renderer/platform/instrumentation/use_counter.h', + 'third_party/blink/renderer/platform/runtime_enabled_features.h', + } +- includes.update(map(self._headers_header_include_path, entries)) ++ includes.update(list(map(self._headers_header_include_path, entries))) + return sorted([x for x in includes if x]) + + @template_expander.use_jinja( +diff --git a/src/3rdparty/chromium/third_party/blink/renderer/build/scripts/make_instrumenting_probes.py b/src/3rdparty/chromium/third_party/blink/renderer/build/scripts/make_instrumenting_probes.py +index 1b43f5786..da57af695 100644 +--- a/src/3rdparty/chromium/third_party/blink/renderer/build/scripts/make_instrumenting_probes.py ++++ b/src/3rdparty/chromium/third_party/blink/renderer/build/scripts/make_instrumenting_probes.py +@@ -151,8 +151,8 @@ class Method(object): + # Splitting parameters by a comma, assuming that attribute + # lists contain no more than one attribute. + self.params = list( +- map(Parameter, map(str.strip, +- match.group(3).split(",")))) ++ map(Parameter, list(map(str.strip, ++ match.group(3).split(","))))) + + + class Parameter(object): +diff --git a/src/3rdparty/chromium/third_party/blink/renderer/build/scripts/make_origin_trials.py b/src/3rdparty/chromium/third_party/blink/renderer/build/scripts/make_origin_trials.py +index db5b93a99..f207eefa4 100755 +--- a/src/3rdparty/chromium/third_party/blink/renderer/build/scripts/make_origin_trials.py ++++ b/src/3rdparty/chromium/third_party/blink/renderer/build/scripts/make_origin_trials.py +@@ -48,7 +48,7 @@ class OriginTrialsWriter(make_runtime_features.BaseRuntimeFeatureWriter): + self._implied_mappings = self._make_implied_mappings() + self._trial_to_features_map = self._make_trial_to_features_map() + self._max_features_per_trial = max( +- len(features) for features in self._trial_to_features_map.values()) ++ len(features) for features in list(self._trial_to_features_map.values())) + self._set_trial_types() + + @property +diff --git a/src/3rdparty/chromium/third_party/blink/renderer/build/scripts/make_runtime_features.py b/src/3rdparty/chromium/third_party/blink/renderer/build/scripts/make_runtime_features.py +index cafe8d94a..ddf89d124 100755 +--- a/src/3rdparty/chromium/third_party/blink/renderer/build/scripts/make_runtime_features.py ++++ b/src/3rdparty/chromium/third_party/blink/renderer/build/scripts/make_runtime_features.py +@@ -32,7 +32,7 @@ import os + import sys + + if sys.version_info.major == 2: +- import cPickle as pickle ++ import pickle as pickle + else: + import pickle + +@@ -138,7 +138,7 @@ class RuntimeFeatureWriter(BaseRuntimeFeatureWriter): + except Exception: + # If trouble unpickling, overwrite + pass +- with open(os.path.abspath(file_name), 'w') as pickle_file: ++ with open(os.path.abspath(file_name), 'wb') as pickle_file: + pickle.dump(features_map, pickle_file) + + def _template_inputs(self): +diff --git a/src/3rdparty/chromium/third_party/blink/renderer/build/scripts/make_runtime_features_utilities_unittest.py b/src/3rdparty/chromium/third_party/blink/renderer/build/scripts/make_runtime_features_utilities_unittest.py +index 0f46950d7..c7545056d 100644 +--- a/src/3rdparty/chromium/third_party/blink/renderer/build/scripts/make_runtime_features_utilities_unittest.py ++++ b/src/3rdparty/chromium/third_party/blink/renderer/build/scripts/make_runtime_features_utilities_unittest.py +@@ -23,7 +23,7 @@ def _feature(name, + class MakeRuntimeFeaturesUtilitiesTest(unittest.TestCase): + def test_cycle(self): + # Cycle: 'c' => 'd' => 'e' => 'c' +- with self.assertRaisesRegexp( ++ with self.assertRaisesRegex( + AssertionError, 'Cycle found in depends_on/implied_by graph'): + util.origin_trials([ + _feature('a', depends_on=['b']), +@@ -34,15 +34,15 @@ class MakeRuntimeFeaturesUtilitiesTest(unittest.TestCase): + ]) + + def test_bad_dependency(self): +- with self.assertRaisesRegexp(AssertionError, ++ with self.assertRaisesRegex(AssertionError, + 'a: Depends on non-existent-feature: x'): + util.origin_trials([_feature('a', depends_on=['x'])]) + + def test_bad_implication(self): +- with self.assertRaisesRegexp(AssertionError, ++ with self.assertRaisesRegex(AssertionError, + 'a: Implied by non-existent-feature: x'): + util.origin_trials([_feature('a', implied_by=['x'])]) +- with self.assertRaisesRegexp( ++ with self.assertRaisesRegex( + AssertionError, + 'a: A feature must be in origin trial if implied by an origin trial feature: b' + ): +@@ -52,7 +52,7 @@ class MakeRuntimeFeaturesUtilitiesTest(unittest.TestCase): + ]) + + def test_both_dependency_and_implication(self): +- with self.assertRaisesRegexp( ++ with self.assertRaisesRegex( + AssertionError, + 'c: Only one of implied_by and depends_on is allowed'): + util.origin_trials([ +diff --git a/src/3rdparty/chromium/third_party/blink/renderer/build/scripts/rule_bison.py b/src/3rdparty/chromium/third_party/blink/renderer/build/scripts/rule_bison.py +index 3d0ae0f8c..b3c2d612f 100755 +--- a/src/3rdparty/chromium/third_party/blink/renderer/build/scripts/rule_bison.py ++++ b/src/3rdparty/chromium/third_party/blink/renderer/build/scripts/rule_bison.py +@@ -47,8 +47,8 @@ from blinkbuild.name_style_converter import NameStyleConverter + + + def modify_file(path, prefix_lines, suffix_lines, replace_list=[]): +- prefix_lines = map(lambda s: s + '\n', prefix_lines) +- suffix_lines = map(lambda s: s + '\n', suffix_lines) ++ prefix_lines = [s + '\n' for s in prefix_lines] ++ suffix_lines = [s + '\n' for s in suffix_lines] + with open(path, 'r') as f: + old_lines = f.readlines() + for i in range(len(old_lines)): +diff --git a/src/3rdparty/chromium/third_party/blink/renderer/build/scripts/templates/element_factory.cc.tmpl b/src/3rdparty/chromium/third_party/blink/renderer/build/scripts/templates/element_factory.cc.tmpl +index dc3f44c5b..3eefcf9f0 100644 +--- a/src/3rdparty/chromium/third_party/blink/renderer/build/scripts/templates/element_factory.cc.tmpl ++++ b/src/3rdparty/chromium/third_party/blink/renderer/build/scripts/templates/element_factory.cc.tmpl +@@ -26,7 +26,7 @@ using {{namespace}}FunctionMap = HashMapGetExecutionContext())) { +diff --git a/src/3rdparty/chromium/third_party/blink/renderer/build/scripts/templates/element_type_helpers.h.tmpl b/src/3rdparty/chromium/third_party/blink/renderer/build/scripts/templates/element_type_helpers.h.tmpl +index 1b5297d52..edecc81d9 100644 +--- a/src/3rdparty/chromium/third_party/blink/renderer/build/scripts/templates/element_type_helpers.h.tmpl ++++ b/src/3rdparty/chromium/third_party/blink/renderer/build/scripts/templates/element_type_helpers.h.tmpl +@@ -15,7 +15,7 @@ namespace blink { + class Document; + + // Type checking. +-{% for tag in tags|sort if not tag.multipleTagNames and not tag.noTypeHelpers %} ++{% for tag in tags|sort(attribute='name') if not tag.multipleTagNames and not tag.noTypeHelpers %} + class {{tag.interface}}; + template <> + inline bool IsElementOfType(const Node& node) { +diff --git a/src/3rdparty/chromium/third_party/blink/renderer/build/scripts/templates/macros.tmpl b/src/3rdparty/chromium/third_party/blink/renderer/build/scripts/templates/macros.tmpl +index 0244433af..dcdbb02a5 100644 +--- a/src/3rdparty/chromium/third_party/blink/renderer/build/scripts/templates/macros.tmpl ++++ b/src/3rdparty/chromium/third_party/blink/renderer/build/scripts/templates/macros.tmpl +@@ -25,7 +25,7 @@ + + + {% macro trie_leaf(index, object, return_macro, lowercase_data) %} +-{% set name, value = object.items()[0] %} ++{% set name, value = (object.items()|list)[0] %} + {% if name|length %} + if ( + {%- for c in name -%} +@@ -45,7 +45,7 @@ return {{ return_macro(value) }}; + + + {% macro trie_switch(trie, index, return_macro, lowercase_data) %} +-{% if trie|length == 1 and trie.values()[0] is string %} ++{% if trie|length == 1 and (trie.values()|list)[0] is string %} + {{ trie_leaf(index, trie, return_macro, lowercase_data) -}} + {% else %} + {% if lowercase_data %} +diff --git a/src/3rdparty/chromium/third_party/blink/renderer/build/scripts/templates/make_qualified_names.h.tmpl b/src/3rdparty/chromium/third_party/blink/renderer/build/scripts/templates/make_qualified_names.h.tmpl +index cb05c6c43..bd5566b03 100644 +--- a/src/3rdparty/chromium/third_party/blink/renderer/build/scripts/templates/make_qualified_names.h.tmpl ++++ b/src/3rdparty/chromium/third_party/blink/renderer/build/scripts/templates/make_qualified_names.h.tmpl +@@ -24,12 +24,12 @@ namespace {{cpp_namespace}} { + {{symbol_export}}extern const WTF::AtomicString& {{namespace_prefix}}NamespaceURI; + + // Tags +-{% for tag in tags|sort %} ++{% for tag in tags|sort(attribute='name') %} + {{symbol_export}}extern const blink::{{namespace}}QualifiedName& {{tag|symbol}}Tag; + {% endfor %} + + // Attributes +-{% for attr in attrs|sort %} ++{% for attr in attrs|sort(attribute='name') %} + {{symbol_export}}extern const blink::QualifiedName& {{attr|symbol}}Attr; + {% endfor %} + +diff --git a/src/3rdparty/chromium/third_party/blink/renderer/build/scripts/trie_builder.py b/src/3rdparty/chromium/third_party/blink/renderer/build/scripts/trie_builder.py +index fe4eb9cb7..7022a047e 100644 +--- a/src/3rdparty/chromium/third_party/blink/renderer/build/scripts/trie_builder.py ++++ b/src/3rdparty/chromium/third_party/blink/renderer/build/scripts/trie_builder.py +@@ -30,7 +30,7 @@ def _single_trie(string_to_value_pairs, index): + dicts_by_indexed_letter[string[index]].append((string, value)) + + output = {} +- for char, d in dicts_by_indexed_letter.items(): ++ for char, d in list(dicts_by_indexed_letter.items()): + if len(d) == 1: + string = d[0][0] + value = d[0][1] +@@ -47,11 +47,11 @@ def trie_list_by_str_length(str_to_return_value_dict): + All strings should be all lower case. + """ + dicts_by_length = defaultdict(list) +- for string, value in str_to_return_value_dict.items(): ++ for string, value in list(str_to_return_value_dict.items()): + dicts_by_length[len(string)].append((string, value)) + + output = [] +- for length, pairs in dicts_by_length.items(): ++ for length, pairs in list(dicts_by_length.items()): + output.append((length, _single_trie(sorted(pairs), 0))) + + return output +diff --git a/src/3rdparty/chromium/third_party/blink/renderer/build/scripts/update_css_ranking.py b/src/3rdparty/chromium/third_party/blink/renderer/build/scripts/update_css_ranking.py +index 22d5f72e0..4928de4c5 100644 +--- a/src/3rdparty/chromium/third_party/blink/renderer/build/scripts/update_css_ranking.py ++++ b/src/3rdparty/chromium/third_party/blink/renderer/build/scripts/update_css_ranking.py +@@ -7,7 +7,7 @@ + # Run `python update_css_ranking.py ` + # to update the ranking from API to + +-import urllib2 ++import urllib.request, urllib.error, urllib.parse + import json + import sys + import cluster +@@ -36,7 +36,7 @@ def update_css_ranking(css_ranking_file, css_ranking_api): + css_ranking_api: url to CSS ranking api + + """ +- css_ranking = json.loads(urllib2.urlopen(css_ranking_api).read()) ++ css_ranking = json.loads(urllib.request.urlopen(css_ranking_api).read()) + css_ranking_content = {"properties": {}, "data": []} + css_ranking_content["data"] = [ + property_["property_name"] for property_ in sorted( +@@ -90,16 +90,16 @@ def produce_partition_rule(config_file, css_ranking_api): + + """ + css_ranking = sorted( +- json.loads(urllib2.urlopen(css_ranking_api).read()), ++ json.loads(urllib.request.urlopen(css_ranking_api).read()), + key=lambda x: -x["day_percentage"]) + total_css_properties = len(css_ranking) + css_ranking_dictionary = dict( + [(x["property_name"], x["day_percentage"] * 100) for x in css_ranking]) + css_ranking_cdf = dict( +- zip([x["property_name"] for x in css_ranking], [ ++ list(zip([x["property_name"] for x in css_ranking], [ + float(i) / total_css_properties + for i in range(total_css_properties) +- ])) ++ ]))) + css_properties = json5_generator.Json5File.load_from_files( + [CSS_PROPERTIES]).name_dictionaries + +diff --git a/src/3rdparty/chromium/third_party/blink/renderer/modules/bluetooth/testing/clusterfuzz/PRESUBMIT.py b/src/3rdparty/chromium/third_party/blink/renderer/modules/bluetooth/testing/clusterfuzz/PRESUBMIT.py +index 9c3f101ac..87b95c5ca 100644 +--- a/src/3rdparty/chromium/third_party/blink/renderer/modules/bluetooth/testing/clusterfuzz/PRESUBMIT.py ++++ b/src/3rdparty/chromium/third_party/blink/renderer/modules/bluetooth/testing/clusterfuzz/PRESUBMIT.py +@@ -11,7 +11,7 @@ def _RunTests(input_api, output_api): + test_cmd = input_api.Command( + name=cmd_name, cmd=cmd, kwargs={}, message=output_api.PresubmitError) + if input_api.verbose: +- print 'Running ' + cmd_name ++ print('Running ' + cmd_name) + return input_api.RunTests([test_cmd]) + + +diff --git a/src/3rdparty/chromium/third_party/blink/renderer/modules/bluetooth/testing/clusterfuzz/constraints.py b/src/3rdparty/chromium/third_party/blink/renderer/modules/bluetooth/testing/clusterfuzz/constraints.py +index 01e2f10c3..f3ae84f3e 100644 +--- a/src/3rdparty/chromium/third_party/blink/renderer/modules/bluetooth/testing/clusterfuzz/constraints.py ++++ b/src/3rdparty/chromium/third_party/blink/renderer/modules/bluetooth/testing/clusterfuzz/constraints.py +@@ -64,7 +64,7 @@ CHARACTERISTICS_RETRIEVED_BASE = \ + + + def _ToJsStr(s): +- return u'\'{}\''.format(s) ++ return '\'{}\''.format(s) + + + def _get_random_number(): +@@ -84,7 +84,7 @@ def _GetFuzzedJsString(s): + try: + fuzzed_string = fuzzed_string.decode('utf8') + except UnicodeDecodeError: +- print 'Can\'t decode fuzzed string. Trying again.' ++ print('Can\'t decode fuzzed string. Trying again.') + else: + # Escape 'escape' characters. + fuzzed_string = fuzzed_string.replace('\\', r'\\') +@@ -101,7 +101,7 @@ def _get_array_of_random_ints(max_length, max_value): + exp_max_value = math.log(max_value, 2) + return '[{}]'.format(', '.join( + str(utils.UniformExpoInteger(0, exp_max_value)) +- for _ in xrange(length))) ++ for _ in range(length))) + + + def _get_typed_array(): +@@ -359,8 +359,8 @@ def get_characteristics_retrieved_base(): + + def get_get_primary_services_call(): + call = random.choice([ +- u'getPrimaryService({service_uuid})', +- u'getPrimaryServices({optional_service_uuid})' ++ 'getPrimaryService({service_uuid})', ++ 'getPrimaryServices({optional_service_uuid})' + ]) + + return call.format( +@@ -370,8 +370,8 @@ def get_get_primary_services_call(): + + def get_characteristics_call(): + call = random.choice([ +- u'getCharacteristic({characteristic_uuid})', +- u'getCharacteristics({optional_characteristic_uuid})' ++ 'getCharacteristic({characteristic_uuid})', ++ 'getCharacteristics({optional_characteristic_uuid})' + ]) + + return call.format( +@@ -389,7 +389,7 @@ def get_pick_a_service(): + ' service = Array.isArray(services)'\ + ' ? services[{} % services.length]'\ + ' : services' +- return string.format(random.randint(0, sys.maxint)) ++ return string.format(random.randint(0, sys.maxsize)) + + + def get_pick_a_characteristic(): +@@ -401,7 +401,7 @@ def get_pick_a_characteristic(): + ' characteristic = Array.isArray(characteristics)'\ + ' ? characteristics[{} % characteristics.length]'\ + ' : characteristics' +- return string.format(random.randint(0, sys.maxint)) ++ return string.format(random.randint(0, sys.maxsize)) + + + def get_reload_id(): +diff --git a/src/3rdparty/chromium/third_party/blink/renderer/modules/bluetooth/testing/clusterfuzz/fuzz_integration_test.py b/src/3rdparty/chromium/third_party/blink/renderer/modules/bluetooth/testing/clusterfuzz/fuzz_integration_test.py +index 9b37e5a1a..daada4c55 100644 +--- a/src/3rdparty/chromium/third_party/blink/renderer/modules/bluetooth/testing/clusterfuzz/fuzz_integration_test.py ++++ b/src/3rdparty/chromium/third_party/blink/renderer/modules/bluetooth/testing/clusterfuzz/fuzz_integration_test.py +@@ -33,7 +33,7 @@ class WebBluetoothFuzzerTest(unittest.TestCase): + + written_files = glob.glob(os.path.join(self._output_dir, '*.html')) + +- self.assertEquals(100, len(written_files), 'Should have written 100 ' ++ self.assertEqual(100, len(written_files), 'Should have written 100 ' + 'test files.') + + for test_case in written_files: +diff --git a/src/3rdparty/chromium/third_party/blink/renderer/modules/bluetooth/testing/clusterfuzz/fuzz_main_run.py b/src/3rdparty/chromium/third_party/blink/renderer/modules/bluetooth/testing/clusterfuzz/fuzz_main_run.py +index d9b834f27..bc7d76a75 100644 +--- a/src/3rdparty/chromium/third_party/blink/renderer/modules/bluetooth/testing/clusterfuzz/fuzz_main_run.py ++++ b/src/3rdparty/chromium/third_party/blink/renderer/modules/bluetooth/testing/clusterfuzz/fuzz_main_run.py +@@ -20,12 +20,12 @@ from fuzzer_helpers import FillInParameter + import parameter_fuzzer + import test_case_fuzzer + +-JS_FILES_AND_PARAMETERS = ((u'testharness.js', u'INCLUDE_TESTHARNESS'), +- (u'testharnessreport.js', +- u'INCLUDE_REPORT'), (u'bluetooth-test.js', +- u'INCLUDE_BLUETOOTH_TEST'), +- (u'bluetooth-fake-devices.js', +- u'INCLUDE_BLUETOOTH_FAKE_DEVICES')) ++JS_FILES_AND_PARAMETERS = (('testharness.js', 'INCLUDE_TESTHARNESS'), ++ ('testharnessreport.js', ++ 'INCLUDE_REPORT'), ('bluetooth-test.js', ++ 'INCLUDE_BLUETOOTH_TEST'), ++ ('bluetooth-fake-devices.js', ++ 'INCLUDE_BLUETOOTH_FAKE_DEVICES')) + + SCRIPT_PREFIX = '\n' +@@ -83,7 +83,7 @@ def FuzzTemplate(template_path, resources_path): + Returns: + A string containing the test case. + """ +- print 'Generating test file based on {}'.format(template_path) ++ print('Generating test file based on {}'.format(template_path)) + + # Read the template. + template_file_handle = open(template_path) +@@ -128,8 +128,8 @@ def WriteTestFile(test_file_data, test_file_prefix, output_dir): + prefix=test_file_prefix, suffix='.html', dir=output_dir) + + with os.fdopen(file_descriptor, 'wb') as output: +- print 'Writing {} bytes to \'{}\''.format( +- len(test_file_data), file_path) ++ print('Writing {} bytes to \'{}\''.format( ++ len(test_file_data), file_path)) + output.write(test_file_data) + + return file_path +@@ -138,10 +138,10 @@ def WriteTestFile(test_file_data, test_file_prefix, output_dir): + def main(): + args = _GetArguments() + +- print 'Generating {} test file(s).'.format(args.no_of_files) +- print 'Writing test files to: \'{}\''.format(args.output_dir) ++ print('Generating {} test file(s).'.format(args.no_of_files)) ++ print('Writing test files to: \'{}\''.format(args.output_dir)) + if args.input_dir: +- print 'Reading data bundle from: \'{}\''.format(args.input_dir) ++ print('Reading data bundle from: \'{}\''.format(args.input_dir)) + + # Get Templates + current_path = os.path.dirname(os.path.realpath(__file__)) +@@ -165,8 +165,8 @@ def main(): + args.output_dir) + + if args.content_shell_dir: +- print '{} --run-web-tests {}'.format(args.content_shell_dir, +- test_file_path) ++ print('{} --run-web-tests {}'.format(args.content_shell_dir, ++ test_file_path)) + + + if __name__ == '__main__': +diff --git a/src/3rdparty/chromium/third_party/blink/renderer/modules/bluetooth/testing/clusterfuzz/test_case_fuzzer.py b/src/3rdparty/chromium/third_party/blink/renderer/modules/bluetooth/testing/clusterfuzz/test_case_fuzzer.py +index 8e6bb1471..e2669fb9a 100644 +--- a/src/3rdparty/chromium/third_party/blink/renderer/modules/bluetooth/testing/clusterfuzz/test_case_fuzzer.py ++++ b/src/3rdparty/chromium/third_party/blink/renderer/modules/bluetooth/testing/clusterfuzz/test_case_fuzzer.py +@@ -164,7 +164,7 @@ def _GenerateSequenceOfRandomTokens(): + """ + result = random.choice(BASE_TOKENS) + +- for _ in xrange(random.randint(1, MAX_NUM_OF_TOKENS)): ++ for _ in range(random.randint(1, MAX_NUM_OF_TOKENS)): + # Get random token. + token = random.choice(TOKENS) + +diff --git a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/bindings/bindings_tests.py b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/bindings/bindings_tests.py +index ca0bbd220..b99b3fdb6 100644 +--- a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/bindings/bindings_tests.py ++++ b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/bindings/bindings_tests.py +@@ -136,7 +136,7 @@ def generate_interface_dependencies(runtime_enabled_features): + # So the files will be generated under + # output_dir/core/bindings/tests/idls/core. + # To avoid this issue, we need to clear relative_dir here. +- for value in info['interfaces_info'].itervalues(): ++ for value in info['interfaces_info'].values(): + value['relative_dir'] = '' + component_info = info_collector.get_component_info_as_dict( + runtime_enabled_features) +@@ -169,14 +169,14 @@ def generate_interface_dependencies(runtime_enabled_features): + non_test_idl_paths) + test_interfaces_info = {} + test_component_info = {} +- for component, paths in test_idl_paths.iteritems(): ++ for component, paths in test_idl_paths.items(): + test_interfaces_info[component], test_component_info[component] = \ + collect_interfaces_info(paths) + # In order to allow test IDL files to override the production IDL files if + # they have the same interface name, process the test IDL files after the + # non-test IDL files. + info_individuals = [non_test_interfaces_info] + \ +- test_interfaces_info.values() ++ list(test_interfaces_info.values()) + compute_interfaces_info_overall(info_individuals) + # Add typedefs which are specified in the actual IDL files to the testing + # component info. +@@ -239,22 +239,22 @@ def bindings_tests(output_directory, verbose, suppress_diff): + reference_basename = os.path.basename(reference_filename) + + if not os.path.isfile(reference_filename): +- print 'Missing reference file!' +- print '(if adding new test, update reference files)' +- print reference_basename +- print ++ print('Missing reference file!') ++ print('(if adding new test, update reference files)') ++ print(reference_basename) ++ print() + return False + + if not filecmp.cmp(reference_filename, output_filename): + # cmp is much faster than diff, and usual case is "no difference", + # so only run diff if cmp detects a difference +- print 'FAIL: %s' % reference_basename ++ print('FAIL: %s' % reference_basename) + if not suppress_diff: +- print diff(reference_filename, output_filename) ++ print(diff(reference_filename, output_filename)) + return False + + if verbose: +- print 'PASS: %s' % reference_basename ++ print('PASS: %s' % reference_basename) + return True + + def identical_output_files(output_files): +@@ -379,11 +379,11 @@ def bindings_tests(output_directory, verbose, suppress_diff): + + if passed: + if verbose: +- print +- print PASS_MESSAGE ++ print() ++ print(PASS_MESSAGE) + return 0 +- print +- print FAIL_MESSAGE ++ print() ++ print(FAIL_MESSAGE) + return 1 + + +@@ -391,7 +391,7 @@ def run_bindings_tests(reset_results, verbose, suppress_diff): + # Generate output into the reference directory if resetting results, or + # a temp directory if not. + if reset_results: +- print 'Resetting results' ++ print('Resetting results') + return bindings_tests(REFERENCE_DIRECTORY, verbose, suppress_diff) + with TemporaryDirectory() as temp_dir: + # TODO(peria): Remove this hack. +diff --git a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/bindings/collect_idls_into_json.py b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/bindings/collect_idls_into_json.py +index 243980c9b..bdfc5839a 100755 +--- a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/bindings/collect_idls_into_json.py ++++ b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/bindings/collect_idls_into_json.py +@@ -411,7 +411,7 @@ def merge_partial_dicts(interfaces_dict, partials_dict): + Returns: + A merged dictionary of |interface_dict| with |partial_dict|. + """ +- for interface_name, partial in partials_dict.iteritems(): ++ for interface_name, partial in partials_dict.items(): + interface = interfaces_dict.get(interface_name) + if not interface: + raise Exception( +@@ -435,8 +435,8 @@ def merge_implement_nodes(interfaces_dict, implement_node_list): + for implement in implement_node_list: + reference = implement.GetProperty(_PROP_REFERENCE) + implement = implement.GetName() +- if (reference not in interfaces_dict.keys() +- or implement not in interfaces_dict.keys()): ++ if (reference not in list(interfaces_dict.keys()) ++ or implement not in list(interfaces_dict.keys())): + raise Exception( + 'There is not corresponding implement or reference interface.') + for member in _MEMBERS: +diff --git a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/bindings/collect_idls_into_json_test.py b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/bindings/collect_idls_into_json_test.py +index a77ff7588..eb5c3a000 100755 +--- a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/bindings/collect_idls_into_json_test.py ++++ b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/bindings/collect_idls_into_json_test.py +@@ -77,8 +77,8 @@ class TestFunctions(unittest.TestCase): + collect_idls_into_json.get_const_value(const), '1') + self.assertTrue( + const_member.issuperset( +- collect_idls_into_json.const_node_to_dict(const). +- keys())) ++ list(collect_idls_into_json.const_node_to_dict(const). ++ keys()))) + else: + self.assertEqual(const, None) + +@@ -95,8 +95,8 @@ class TestFunctions(unittest.TestCase): + 'Node') + self.assertTrue( + attribute_member.issuperset( +- collect_idls_into_json.attribute_node_to_dict( +- attribute).keys())) ++ list(collect_idls_into_json.attribute_node_to_dict( ++ attribute).keys()))) + else: + self.assertEqual(attribute, None) + +@@ -114,8 +114,8 @@ class TestFunctions(unittest.TestCase): + 'Node') + self.assertTrue( + operate_member.issuperset( +- collect_idls_into_json.operation_node_to_dict( +- operation).keys())) ++ list(collect_idls_into_json.operation_node_to_dict( ++ operation).keys()))) + for argument in collect_idls_into_json.get_argument_node_list( + operation): + if argument: +@@ -126,8 +126,8 @@ class TestFunctions(unittest.TestCase): + 'Node') + self.assertTrue( + argument_member.issuperset( +- collect_idls_into_json.argument_node_to_dict( +- argument).keys())) ++ list(collect_idls_into_json.argument_node_to_dict( ++ argument).keys()))) + else: + self.assertEqual(argument, None) + else: +@@ -140,19 +140,19 @@ class TestFunctions(unittest.TestCase): + self.assertEqual(extattr.GetClass(), 'ExtAttribute') + self.assertEqual(extattr.GetName(), 'CustomToV8') + self.assertEqual( +- collect_idls_into_json.extattr_node_to_dict(extattr). +- keys(), ['Name']) ++ list(collect_idls_into_json.extattr_node_to_dict(extattr). ++ keys()), ['Name']) + self.assertEqual( +- collect_idls_into_json.extattr_node_to_dict(extattr). +- values(), ['CustomToV8']) ++ list(collect_idls_into_json.extattr_node_to_dict(extattr). ++ values()), ['CustomToV8']) + else: + self.assertEqual(extattr, None) + + def test_inherit_node_to_dict(self): + inherit = collect_idls_into_json.inherit_node_to_dict(self.definition) + if inherit: +- self.assertEqual(inherit.keys(), ['Parent']) +- self.assertEqual(inherit.values(), ['EventTarget']) ++ self.assertEqual(list(inherit.keys()), ['Parent']) ++ self.assertEqual(list(inherit.values()), ['EventTarget']) + else: + self.assertEqual(inherit, []) + +diff --git a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/bindings/generate_idl_diff.py b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/bindings/generate_idl_diff.py +index 1e66d6206..34abbc626 100755 +--- a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/bindings/generate_idl_diff.py ++++ b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/bindings/generate_idl_diff.py +@@ -137,7 +137,7 @@ def interfaces_diff(old_interfaces, new_interfaces): + |new_interfaces| + """ + annotated = {} +- for interface_name, interface in new_interfaces.items(): ++ for interface_name, interface in list(new_interfaces.items()): + if interface_name in old_interfaces: + annotated_interface, is_changed = members_diff( + old_interfaces[interface_name], interface) +@@ -148,7 +148,7 @@ def interfaces_diff(old_interfaces, new_interfaces): + interface = annotate_all_members(interface, DIFF_TAG_ADDED) + interface[DIFF_TAG] = DIFF_TAG_ADDED + annotated[interface_name] = interface +- for interface_name, interface in old_interfaces.items(): ++ for interface_name, interface in list(old_interfaces.items()): + interface = annotate_all_members(interface, DIFF_TAG_DELETED) + interface[DIFF_TAG] = DIFF_TAG_DELETED + annotated.update(old_interfaces) +diff --git a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/bindings/print_idl_diff.py b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/bindings/print_idl_diff.py +index fd3cb868b..a697b6a66 100755 +--- a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/bindings/print_idl_diff.py ++++ b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/bindings/print_idl_diff.py +@@ -183,14 +183,14 @@ def sort_interface_names_by_tags(interfaces): + Returns: + A list of sorted interface names + """ +- interface_list = interfaces.values() ++ interface_list = list(interfaces.values()) + removed, added, unspecified = group_by_tag(interface_list) + # pylint: disable=W0110 +- removed = map(lambda interface: interface['Name'], removed) ++ removed = [interface['Name'] for interface in removed] + # pylint: disable=W0110 +- added = map(lambda interface: interface['Name'], added) ++ added = [interface['Name'] for interface in added] + # pylint: disable=W0110 +- unspecified = map(lambda interface: interface['Name'], unspecified) ++ unspecified = [interface['Name'] for interface in unspecified] + sorted_interface_names = removed + added + unspecified + return sorted_interface_names + +@@ -382,14 +382,14 @@ def print_diff(diff, out): + Args: + A sorted diff + """ +- for interface_name, interface in diff.iteritems(): ++ for interface_name, interface in diff.items(): + print_member_with_color(interface, out) + out.change_color(Colorize.YELLOW) + out.write('[[') + out.write(interface_name) + out.writeln(']]') + out.reset_color() +- for member_name, member in interface.iteritems(): ++ for member_name, member in interface.items(): + if member_name == 'ExtAttributes': + out.writeln('ExtAttributes') + print_extattributes(member, out) +diff --git a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/common/checkout/baseline_optimizer.py b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/common/checkout/baseline_optimizer.py +index d6960a96f..6abffafb4 100644 +--- a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/common/checkout/baseline_optimizer.py ++++ b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/common/checkout/baseline_optimizer.py +@@ -106,7 +106,7 @@ class BaselineOptimizer(object): + """ + results_by_directory = {} + directories = set() +- for port in self._ports.values(): ++ for port in list(self._ports.values()): + directories.update(set(self._relative_baseline_search_path(port))) + + for directory in directories: +@@ -160,14 +160,14 @@ class BaselineOptimizer(object): + def _move_baselines(self, baseline_name, results_by_directory, + new_results_by_directory): + data_for_result = {} +- for directory, result in results_by_directory.items(): ++ for directory, result in list(results_by_directory.items()): + if result not in data_for_result: + source = self._join_directory(directory, baseline_name) + data_for_result[result] = self._filesystem.read_binary_file( + source) + + fs_files = [] +- for directory, result in results_by_directory.items(): ++ for directory, result in list(results_by_directory.items()): + if new_results_by_directory.get(directory) != result: + file_name = self._join_directory(directory, baseline_name) + if self._filesystem.exists(file_name): +@@ -184,7 +184,7 @@ class BaselineOptimizer(object): + _log.debug(' (Nothing to delete)') + + file_names = [] +- for directory, result in new_results_by_directory.items(): ++ for directory, result in list(new_results_by_directory.items()): + if results_by_directory.get(directory) != result: + destination = self._join_directory(directory, baseline_name) + self._filesystem.maybe_make_directory( +@@ -221,7 +221,7 @@ class BaselineOptimizer(object): + def _port_from_baseline_dir(self, baseline_dir): + """Returns a Port object from the given baseline directory.""" + baseline_dir = self._filesystem.basename(baseline_dir) +- for port in self._ports.values(): ++ for port in list(self._ports.values()): + if self._filesystem.basename( + port.baseline_version_dir()) == baseline_dir: + return port +@@ -305,7 +305,7 @@ class BaselineOptimizer(object): + test_name, self._virtual_base(baseline_name)) + results_by_port_name = self._results_by_port_name(results_by_directory) + +- for port_name in self._ports.keys(): ++ for port_name in list(self._ports.keys()): + assert port_name in results_by_port_name + if results_by_port_name[port_name] != virtual_root_digest: + return +@@ -383,7 +383,7 @@ class BaselineOptimizer(object): + A dictionary mapping port names to their baselines. + """ + results_by_port_name = {} +- for port_name, port in self._ports.items(): ++ for port_name, port in list(self._ports.items()): + for directory in self._relative_baseline_search_path(port): + if directory in results_by_directory: + results_by_port_name[port_name] = results_by_directory[ +@@ -399,7 +399,7 @@ class BaselineOptimizer(object): + """Returns a list of directories immediately preceding the root on + search paths.""" + directories = set() +- for port in self._ports.values(): ++ for port in list(self._ports.values()): + directory = self._filesystem.relpath( + self._baseline_search_path(port)[-1], self._parent_of_tests) + directories.add(directory) +@@ -462,7 +462,7 @@ class BaselineOptimizer(object): + # baseline is found (or the root is reached), i.e., keep the most + # generic one among duplicate baselines. + new_results_by_directory = copy.copy(results_by_directory) +- for port_name, port in self._ports.items(): ++ for port_name, port in list(self._ports.items()): + current_result = results_by_port_name.get(port_name) + + # This happens if we're missing baselines for a port. +diff --git a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/common/checkout/baseline_optimizer_unittest.py b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/common/checkout/baseline_optimizer_unittest.py +index 725ff1afa..2530d523c 100644 +--- a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/common/checkout/baseline_optimizer_unittest.py ++++ b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/common/checkout/baseline_optimizer_unittest.py +@@ -109,7 +109,7 @@ class BaselineOptimizerTest(unittest.TestCase): + self.fs.join(web_tests_dir, 'VirtualTestSuites'), + '[{"prefix": "gpu", "bases": ["fast/canvas"], "args": ["--foo"]}]') + +- for dirname, contents in results_by_directory.items(): ++ for dirname, contents in list(results_by_directory.items()): + self.fs.write_binary_file( + self.fs.join(web_tests_dir, dirname, baseline_name), contents) + +@@ -120,7 +120,7 @@ class BaselineOptimizerTest(unittest.TestCase): + baseline_optimizer.optimize( + self.fs.join(baseline_dirname, test_name), suffix)) + +- for dirname, contents in directory_to_new_results.items(): ++ for dirname, contents in list(directory_to_new_results.items()): + path = self.fs.join(web_tests_dir, dirname, baseline_name) + if contents is None: + # Check files that are explicitly marked as absent. +diff --git a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/common/checkout/git_unittest.py b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/common/checkout/git_unittest.py +index da3be9a29..cef022021 100644 +--- a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/common/checkout/git_unittest.py ++++ b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/common/checkout/git_unittest.py +@@ -168,7 +168,7 @@ class GitTestWithRealFilesystemAndExecutive(unittest.TestCase): + # Even if diff.noprefix is enabled, create_patch() produces diffs with prefixes. + self._run(['git', 'config', 'diff.noprefix', 'true']) + patch = git.create_patch() +- self.assertRegexpMatches( ++ self.assertRegex( + patch, r'^diff --git a/test_file_commit1 b/test_file_commit1') + + def test_rename_files(self): +diff --git a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/common/message_pool.py b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/common/message_pool.py +index 859fa2161..c22ea05d5 100644 +--- a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/common/message_pool.py ++++ b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/common/message_pool.py +@@ -39,10 +39,10 @@ If you don't need these features, use multiprocessing.Pool or concurrency.future + instead. + """ + +-import cPickle ++import pickle + import logging + import multiprocessing +-import Queue ++import queue + import sys + import traceback + +@@ -68,8 +68,8 @@ class _MessagePool(object): + self._name = 'manager' + self._running_inline = (self._num_workers == 1) + if self._running_inline: +- self._messages_to_worker = Queue.Queue() +- self._messages_to_manager = Queue.Queue() ++ self._messages_to_worker = queue.Queue() ++ self._messages_to_manager = queue.Queue() + else: + self._messages_to_worker = multiprocessing.Queue() + self._messages_to_manager = multiprocessing.Queue() +@@ -92,7 +92,7 @@ class _MessagePool(object): + from_user=True, + logs=())) + +- for _ in xrange(self._num_workers): ++ for _ in range(self._num_workers): + self._messages_to_worker.put( + _Message( + self._name, +@@ -110,7 +110,7 @@ class _MessagePool(object): + if self._running_inline or self._can_pickle(self._host): + host = self._host + +- for worker_number in xrange(self._num_workers): ++ for worker_number in range(self._num_workers): + worker = _Worker(host, self._messages_to_manager, + self._messages_to_worker, self._worker_factory, + worker_number, self._running_inline, +@@ -171,7 +171,7 @@ class _MessagePool(object): + + def _can_pickle(self, host): + try: +- cPickle.dumps(host) ++ pickle.dumps(host) + return True + except TypeError: + return False +@@ -190,7 +190,7 @@ class _MessagePool(object): + method = getattr(self, '_handle_' + message.name) + assert method, 'bad message %s' % repr(message) + method(message.src, *message.args) +- except Queue.Empty: ++ except queue.Empty: + pass + + +@@ -273,7 +273,7 @@ class _Worker(multiprocessing.Process): + break + + _log.debug('%s exiting', self.name) +- except Queue.Empty: ++ except queue.Empty: + assert False, '%s: ran out of messages in worker queue.' % self.name + except KeyboardInterrupt: + self._raise(sys.exc_info()) +@@ -307,7 +307,7 @@ class _Worker(multiprocessing.Process): + def _raise(self, exc_info): + exception_type, exception_value, exception_traceback = exc_info + if self._running_inline: +- raise exception_type, exception_value, exception_traceback ++ raise exception_type(exception_value).with_traceback(exception_traceback) + + if exception_type == KeyboardInterrupt: + _log.debug('%s: interrupted, exiting', self.name) +diff --git a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/common/net/file_uploader.py b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/common/net/file_uploader.py +index 3cc222c3c..74a39bb13 100644 +--- a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/common/net/file_uploader.py ++++ b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/common/net/file_uploader.py +@@ -27,7 +27,7 @@ + # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + import mimetypes +-import urllib2 ++import urllib.request, urllib.error, urllib.parse + + from blinkpy.common.net.network_transaction import NetworkTransaction + +@@ -58,7 +58,7 @@ def _encode_multipart_form_data(fields, files): + lines.append('--' + BOUNDARY) + lines.append('Content-Disposition: form-data; name="%s"' % key) + lines.append('') +- if isinstance(value, unicode): ++ if isinstance(value, str): + value = value.encode('utf-8') + lines.append(value) + +@@ -68,7 +68,7 @@ def _encode_multipart_form_data(fields, files): + % (key, filename)) + lines.append('Content-Type: %s' % get_mime_type(filename)) + lines.append('') +- if isinstance(value, unicode): ++ if isinstance(value, str): + value = value.encode('utf-8') + lines.append(value) + +@@ -103,9 +103,9 @@ class FileUploader(object): + # FIXME: Setting a timeout, either globally using socket.setdefaulttimeout() + # or in urlopen(), doesn't appear to work on Mac 10.5 with Python 2.7. + # For now we will ignore the timeout value and hope for the best. +- request = urllib2.Request(self._url, data, ++ request = urllib.request.Request(self._url, data, + {'Content-Type': content_type}) +- return urllib2.urlopen(request) ++ return urllib.request.urlopen(request) + + return NetworkTransaction( + timeout_seconds=self._timeout_seconds).run(callback) +diff --git a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/common/net/git_cl.py b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/common/net/git_cl.py +index 516231a0f..f8eb38e44 100644 +--- a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/common/net/git_cl.py ++++ b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/common/net/git_cl.py +@@ -258,8 +258,8 @@ class GitCL(object): + """Returns the latest entries from from a Build to TryJobStatus dict.""" + if try_results is None: + return None +- latest_builds = filter_latest_builds(try_results.keys()) +- return {b: s for b, s in try_results.items() if b in latest_builds} ++ latest_builds = filter_latest_builds(list(try_results.keys())) ++ return {b: s for b, s in list(try_results.items()) if b in latest_builds} + + def try_job_results(self, + issue_number=None, +@@ -399,13 +399,13 @@ class GitCL(object): + + @staticmethod + def all_finished(try_results): +- return all(s.status == 'COMPLETED' for s in try_results.values()) ++ return all(s.status == 'COMPLETED' for s in list(try_results.values())) + + @staticmethod + def all_success(try_results): + return all(s.status == 'COMPLETED' and s.result == 'SUCCESS' +- for s in try_results.values()) ++ for s in list(try_results.values())) + + @staticmethod + def some_failed(try_results): +- return any(s.result == 'FAILURE' for s in try_results.values()) ++ return any(s.result == 'FAILURE' for s in list(try_results.values())) +diff --git a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/common/net/git_cl_mock.py b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/common/net/git_cl_mock.py +index 4c1875ff8..99020a626 100644 +--- a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/common/net/git_cl_mock.py ++++ b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/common/net/git_cl_mock.py +@@ -37,7 +37,7 @@ class MockGitCL(object): + def run(self, args): + self.calls.append(['git', 'cl'] + args) + arg_key = "".join(args) +- if self._git_error_output and arg_key in self._git_error_output.keys(): ++ if self._git_error_output and arg_key in list(self._git_error_output.keys()): + raise ScriptError(output=self._git_error_output[arg_key]) + return 'mock output' + +diff --git a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/common/net/network_transaction.py b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/common/net/network_transaction.py +index 52361192f..c71893997 100644 +--- a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/common/net/network_transaction.py ++++ b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/common/net/network_transaction.py +@@ -28,7 +28,7 @@ + + import logging + import time +-import urllib2 ++import urllib.request, urllib.error, urllib.parse + + _log = logging.getLogger(__name__) + +@@ -57,7 +57,7 @@ class NetworkTransaction(object): + while True: + try: + return request() +- except urllib2.HTTPError as error: ++ except urllib.error.HTTPError as error: + if self._return_none_on_404 and error.code == 404: + return None + self._check_for_timeout() +diff --git a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/common/net/network_transaction_unittest.py b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/common/net/network_transaction_unittest.py +index b827cafa3..17cc72d29 100644 +--- a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/common/net/network_transaction_unittest.py ++++ b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/common/net/network_transaction_unittest.py +@@ -26,7 +26,7 @@ + # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +-from urllib2 import HTTPError ++from urllib.error import HTTPError + from blinkpy.common.net.network_transaction import NetworkTransaction, NetworkTimeout + from blinkpy.common.system.log_testing import LoggingTestCase + +diff --git a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/common/net/results_fetcher.py b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/common/net/results_fetcher.py +index fb9fd6f97..66992c112 100644 +--- a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/common/net/results_fetcher.py ++++ b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/common/net/results_fetcher.py +@@ -30,7 +30,7 @@ import collections + import logging + import json + import re +-import urllib ++import urllib.request, urllib.parse, urllib.error + + from blinkpy.common.memoized import memoized + from blinkpy.common.net.web import Web +@@ -81,7 +81,7 @@ class TestResultsFetcher(object): + Build(builder_name, build_number)) + if step_name: + return '%s/%s/%s/layout-test-results' % ( +- url_base, build_number, urllib.quote(step_name)) ++ url_base, build_number, urllib.parse.quote(step_name)) + return '%s/%s/layout-test-results' % (url_base, build_number) + return self.accumulated_results_url_base(builder_name) + +@@ -140,7 +140,7 @@ class TestResultsFetcher(object): + + url = '%s/testfile?%s' % ( + TEST_RESULTS_SERVER, +- urllib.urlencode({ ++ urllib.parse.urlencode({ + 'builder': build.builder_name, + 'buildnumber': build.build_number, + 'name': 'full_results.json', +@@ -195,7 +195,7 @@ class TestResultsFetcher(object): + + url = '%s/testfile?%s' % ( + TEST_RESULTS_SERVER, +- urllib.urlencode({ ++ urllib.parse.urlencode({ + 'builder': build.builder_name, + 'buildnumber': build.build_number, + 'name': 'full_results.json', +diff --git a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/common/net/results_fetcher_test.py b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/common/net/results_fetcher_test.py +index 3640e2854..26571d7c1 100644 +--- a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/common/net/results_fetcher_test.py ++++ b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/common/net/results_fetcher_test.py +@@ -59,7 +59,7 @@ class BuilderTest(LoggingTestCase): + '/10/blink_web_tests%20%28with%20patch%29/layout-test-results') + + def test_results_url_with_non_numeric_build_number(self): +- with self.assertRaisesRegexp(AssertionError, ++ with self.assertRaisesRegex(AssertionError, + 'expected numeric build number'): + TestResultsFetcher().results_url('Test Builder', 'ba5eba11') + +diff --git a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/common/net/web.py b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/common/net/web.py +index ff76cf61d..eb3ec84b7 100644 +--- a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/common/net/web.py ++++ b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/common/net/web.py +@@ -26,13 +26,13 @@ + # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +-import urllib2 ++import urllib.request, urllib.error, urllib.parse + + from blinkpy.common.net.network_transaction import NetworkTransaction + + + class Web(object): +- class _HTTPRedirectHandler2(urllib2.HTTPRedirectHandler): # pylint:disable=no-init ++ class _HTTPRedirectHandler2(urllib.request.HTTPRedirectHandler): # pylint:disable=no-init + """A subclass of HTTPRedirectHandler to support 308 Permanent Redirect.""" + + def http_error_308(self, req, fp, code, msg, headers): # pylint:disable=unused-argument +@@ -45,13 +45,13 @@ class Web(object): + lambda: self.request('GET', url).read()) + + def request(self, method, url, data=None, headers=None): +- opener = urllib2.build_opener(Web._HTTPRedirectHandler2) +- request = urllib2.Request(url=url, data=data) ++ opener = urllib.request.build_opener(Web._HTTPRedirectHandler2) ++ request = urllib.request.Request(url=url, data=data) + + request.get_method = lambda: method + + if headers: +- for key, value in headers.items(): ++ for key, value in list(headers.items()): + request.add_header(key, value) + + return opener.open(request) +diff --git a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/common/net/web_mock.py b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/common/net/web_mock.py +index bb0671ce7..05c8d923f 100644 +--- a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/common/net/web_mock.py ++++ b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/common/net/web_mock.py +@@ -26,7 +26,7 @@ + # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +-import urllib2 ++import urllib.request, urllib.error, urllib.parse + + + class MockWeb(object): +@@ -55,7 +55,7 @@ class MockResponse(object): + self._info = MockInfo(values.get('headers', {})) + + if int(self.status_code) >= 400: +- raise urllib2.HTTPError( ++ raise urllib.error.HTTPError( + url=self.url, + code=self.status_code, + msg='Received error status code: {}'.format(self.status_code), +@@ -77,7 +77,7 @@ class MockInfo(object): + # The name of the headers (keys) are case-insensitive, and values are stripped. + self._headers = { + key.lower(): value.strip() +- for key, value in headers.iteritems() ++ for key, value in headers.items() + } + + def getheader(self, header): +diff --git a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/common/net/web_test_results.py b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/common/net/web_test_results.py +index 4e5421a85..893a6dcaf 100644 +--- a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/common/net/web_test_results.py ++++ b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/common/net/web_test_results.py +@@ -39,7 +39,7 @@ class WebTestResult(object): + + def suffixes_for_test_result(self): + suffixes = set() +- artifact_names = self._result_dict.get('artifacts', {}).keys() ++ artifact_names = list(self._result_dict.get('artifacts', {}).keys()) + # Add extensions for mismatches. + if 'actual_text' in artifact_names: + suffixes.add('txt') +@@ -96,7 +96,7 @@ class WebTestResult(object): + baseline, including an implicit all-PASS testharness baseline (i.e. a + previously all-PASS testharness test starts to fail).""" + actual_results = self.actual_results().split(' ') +- artifact_names = self._result_dict.get('artifacts', {}).keys() ++ artifact_names = list(self._result_dict.get('artifacts', {}).keys()) + return ('FAIL' in actual_results and any( + artifact_name.startswith('actual') + for artifact_name in artifact_names) +diff --git a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/common/pretty_diff_unittest.py b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/common/pretty_diff_unittest.py +index 448aae691..4d72da6a6 100644 +--- a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/common/pretty_diff_unittest.py ++++ b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/common/pretty_diff_unittest.py +@@ -21,7 +21,7 @@ class TestFileDiff(unittest.TestCase): + lines = [] + diff, remaining_lines = DiffFile.parse(lines) + self.assertIsNone(diff) +- self.assertEquals(remaining_lines, []) ++ self.assertEqual(remaining_lines, []) + + def test_100percent_similarity(self): + # crrev.com/c576df77d72abe47154ff2489bb035aa20892f7f +@@ -35,7 +35,7 @@ class TestFileDiff(unittest.TestCase): + ] + diff, remaining_lines = DiffFile.parse(lines) + self.assertIsNotNone(diff) +- self.assertEquals(remaining_lines[0], lines[4]) ++ self.assertEqual(remaining_lines[0], lines[4]) + + def test_emptify_text(self): + lines = [ +@@ -47,7 +47,7 @@ class TestFileDiff(unittest.TestCase): + ] + diff, remaining_lines = DiffFile.parse(lines) + self.assertIsNotNone(diff) +- self.assertEquals(remaining_lines, []) ++ self.assertEqual(remaining_lines, []) + self._assert_file_status(diff, 'M') + + def test_remove_text(self): +@@ -59,7 +59,7 @@ class TestFileDiff(unittest.TestCase): + ] + diff, remaining_lines = DiffFile.parse(lines) + self.assertIsNotNone(diff) +- self.assertEquals(remaining_lines, []) ++ self.assertEqual(remaining_lines, []) + self._assert_file_status(diff, 'D') + + def test_remove_zero_byte_text(self): +@@ -69,7 +69,7 @@ class TestFileDiff(unittest.TestCase): + ] + diff, remaining_lines = DiffFile.parse(lines) + self.assertIsNotNone(diff) +- self.assertEquals(remaining_lines, []) ++ self.assertEqual(remaining_lines, []) + self._assert_file_status(diff, 'D') + + def test_add_empty_text(self): +@@ -79,7 +79,7 @@ class TestFileDiff(unittest.TestCase): + ] + diff, remaining_lines = DiffFile.parse(lines) + self.assertIsNotNone(diff) +- self.assertEquals(remaining_lines, []) ++ self.assertEqual(remaining_lines, []) + self._assert_file_status(diff, 'A') + + def test_emptify_binary(self): +@@ -91,7 +91,7 @@ class TestFileDiff(unittest.TestCase): + ] + diff, remaining_lines = DiffFile.parse(lines) + self.assertIsNotNone(diff) +- self.assertEquals(remaining_lines, []) ++ self.assertEqual(remaining_lines, []) + self._assert_file_status(diff, 'M') + + def test_remove_binary(self): +@@ -104,7 +104,7 @@ class TestFileDiff(unittest.TestCase): + ] + diff, remaining_lines = DiffFile.parse(lines) + self.assertIsNotNone(diff) +- self.assertEquals(remaining_lines, []) ++ self.assertEqual(remaining_lines, []) + self._assert_file_status(diff, 'D') + + def test_add_binary(self): +@@ -117,44 +117,44 @@ class TestFileDiff(unittest.TestCase): + ] + diff, remaining_lines = DiffFile.parse(lines) + self.assertIsNotNone(diff) +- self.assertEquals(remaining_lines, []) ++ self.assertEqual(remaining_lines, []) + self._assert_file_status(diff, 'A') + + + class TestDiffHunk(unittest.TestCase): + def test_find_operations(self): +- self.assertEquals(DiffHunk._find_operations([]), []) +- self.assertEquals(DiffHunk._find_operations([' ']), []) ++ self.assertEqual(DiffHunk._find_operations([]), []) ++ self.assertEqual(DiffHunk._find_operations([' ']), []) + +- self.assertEquals(DiffHunk._find_operations(['-']), [([0], [])]) +- self.assertEquals( ++ self.assertEqual(DiffHunk._find_operations(['-']), [([0], [])]) ++ self.assertEqual( + DiffHunk._find_operations(['-', '-']), [([0, 1], [])]) +- self.assertEquals( ++ self.assertEqual( + DiffHunk._find_operations([' ', '-', '-']), [([1, 2], [])]) +- self.assertEquals( ++ self.assertEqual( + DiffHunk._find_operations(['-', '-', ' ']), [([0, 1], [])]) + +- self.assertEquals(DiffHunk._find_operations(['+']), [([], [0])]) +- self.assertEquals( ++ self.assertEqual(DiffHunk._find_operations(['+']), [([], [0])]) ++ self.assertEqual( + DiffHunk._find_operations(['+', '+']), [([], [0, 1])]) +- self.assertEquals( ++ self.assertEqual( + DiffHunk._find_operations([' ', '+', '+']), [([], [1, 2])]) +- self.assertEquals( ++ self.assertEqual( + DiffHunk._find_operations(['+', '+', ' ']), [([], [0, 1])]) + +- self.assertEquals(DiffHunk._find_operations(['-', '+']), [([0], [1])]) +- self.assertEquals( ++ self.assertEqual(DiffHunk._find_operations(['-', '+']), [([0], [1])]) ++ self.assertEqual( + DiffHunk._find_operations(['-', '-', '+', '+']), + [([0, 1], [2, 3])]) +- self.assertEquals( ++ self.assertEqual( + DiffHunk._find_operations([' ', '-', '-', '+']), [([1, 2], [3])]) +- self.assertEquals( ++ self.assertEqual( + DiffHunk._find_operations(['-', '-', '+', '+', ' ']), + [([0, 1], [2, 3])]) +- self.assertEquals( ++ self.assertEqual( + DiffHunk._find_operations(['-', '-', '+', '+', '-']), + [([0, 1], [2, 3]), ([4], [])]) +- self.assertEquals( ++ self.assertEqual( + DiffHunk._find_operations(['-', '+', '-', '+']), [([0], [1]), + ([2], [3])]) + +@@ -164,15 +164,15 @@ class TestDiffHunk(unittest.TestCase): + return annotations + + def test_annotate(self): +- self.assertEquals(self._annotate(['-abcdef'], 0, 2, 4), [[(2, 4)]]) +- self.assertEquals( ++ self.assertEqual(self._annotate(['-abcdef'], 0, 2, 4), [[(2, 4)]]) ++ self.assertEqual( + self._annotate(['-abcdef', '-ghi'], 0, 2, 6), [[(2, 6)], None]) +- self.assertEquals( ++ self.assertEqual( + self._annotate(['-abcdef', '-ghi'], 0, 2, 7), [[(2, 6)], [(0, 1)]]) +- self.assertEquals( ++ self.assertEqual( + self._annotate(['-abcdef', '-ghi', '-jkl'], 0, 2, 11), + [[(2, 6)], [(0, 3)], [(0, 2)]]) +- self.assertEquals( ++ self.assertEqual( + self._annotate(['+', '+abc', ' de'], 0, 0, 2), + [[(0, 0)], [(0, 2)], None]) + +@@ -187,7 +187,7 @@ class TestBinaryHunk(unittest.TestCase): + lines = ['literal 6', 'NcmZSh&&2%iKL7{~0|Ed5', '', 'literal 0...'] + binary, remaining_lines = BinaryHunk.parse(lines) + self.assertIsNotNone(binary) +- self.assertEquals(remaining_lines[0], lines[3]) ++ self.assertEqual(remaining_lines[0], lines[3]) + self.assertTrue( + 'data:image/png;base64,' in binary.prettify('image/png', 'add')) + +@@ -195,6 +195,6 @@ class TestBinaryHunk(unittest.TestCase): + lines = ['literal 6', 'NcmZSh&&2%iKL7{~0|Ed5', ''] + binary, remaining_lines = BinaryHunk.parse(lines) + self.assertIsNotNone(binary) +- self.assertEquals(remaining_lines, []) ++ self.assertEqual(remaining_lines, []) + self.assertTrue( + '\xe6\xf9\xd9\xcf\x00\x17\x93''' + ) + checksum = read_checksum_from_png.read_checksum(filehandle) +diff --git a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/common/system/executive.py b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/common/system/executive.py +index c71326429..246645642 100644 +--- a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/common/system/executive.py ++++ b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/common/system/executive.py +@@ -72,7 +72,7 @@ class ScriptError(Exception): + self.cwd = cwd + + def message_with_output(self): +- return unicode(self) ++ return str(self) + + def command_name(self): + command_path = self.script_args +@@ -286,7 +286,7 @@ class Executive(object): + # See https://bugs.webkit.org/show_bug.cgi?id=37528 + # for an example of a regression caused by passing a unicode string directly. + # FIXME: We may need to encode differently on different platforms. +- if isinstance(user_input, unicode): ++ if isinstance(user_input, str): + user_input = user_input.encode(self._child_process_encoding()) + return (self.PIPE, user_input) + +@@ -297,7 +297,7 @@ class Executive(object): + args = self._stringify_args(args) + escaped_args = [] + for arg in args: +- if isinstance(arg, unicode): ++ if isinstance(arg, str): + # Escape any non-ascii characters for easy copy/paste + arg = arg.encode('unicode_escape') + # FIXME: Do we need to fix quotes here? +@@ -403,9 +403,9 @@ class Executive(object): + + def _stringify_args(self, args): + # Popen will throw an exception if args are non-strings (like int()) +- string_args = map(unicode, args) ++ string_args = list(map(str, args)) + # The Windows implementation of Popen cannot handle unicode strings. :( +- return map(self._encode_argument_if_needed, string_args) ++ return list(map(self._encode_argument_if_needed, string_args)) + + def popen(self, args, **kwargs): + assert not kwargs.get('shell') +@@ -427,7 +427,7 @@ class Executive(object): + + def map(self, thunk, arglist, processes=None): + if sys.platform == 'win32' or len(arglist) == 1: +- return map(thunk, arglist) ++ return list(map(thunk, arglist)) + pool = multiprocessing.Pool( + processes=(processes or multiprocessing.cpu_count())) + try: +diff --git a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/common/system/executive_mock.py b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/common/system/executive_mock.py +index e9820aa01..ac38adea4 100644 +--- a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/common/system/executive_mock.py ++++ b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/common/system/executive_mock.py +@@ -29,7 +29,7 @@ + import collections + import logging + import os +-import StringIO ++import io + + from blinkpy.common.system.executive import ScriptError + +@@ -39,9 +39,9 @@ _log = logging.getLogger(__name__) + class MockProcess(object): + def __init__(self, stdout='MOCK STDOUT\n', stderr='', returncode=0): + self.pid = 42 +- self.stdout = StringIO.StringIO(stdout) +- self.stderr = StringIO.StringIO(stderr) +- self.stdin = StringIO.StringIO() ++ self.stdout = io.StringIO(stdout) ++ self.stderr = io.StringIO(stderr) ++ self.stdin = io.StringIO() + self.returncode = returncode + + def wait(self): +@@ -101,11 +101,11 @@ class MockExecutive(object): + self.full_calls.append(MockCall(args=args, kwargs=kwargs)) + + def check_running_pid(self, pid): +- return pid in self._running_pids.values() ++ return pid in list(self._running_pids.values()) + + def running_pids(self, process_name_filter): + running_pids = [] +- for process_name, process_pid in self._running_pids.iteritems(): ++ for process_name, process_pid in self._running_pids.items(): + if process_name_filter(process_name): + running_pids.append(process_pid) + +@@ -113,7 +113,7 @@ class MockExecutive(object): + return running_pids + + def command_for_printing(self, args): +- string_args = map(unicode, args) ++ string_args = list(map(str, args)) + return ' '.join(string_args) + + # The argument list should match Executive.run_command, even if +@@ -166,7 +166,7 @@ class MockExecutive(object): + output = self._output + if return_stderr: + output += self._stderr +- if decode_output and not isinstance(output, unicode): ++ if decode_output and not isinstance(output, str): + output = output.decode('utf-8') + + return output +@@ -181,7 +181,7 @@ class MockExecutive(object): + pass + + def popen(self, args, cwd=None, env=None, **_): +- assert all(isinstance(arg, basestring) for arg in args) ++ assert all(isinstance(arg, str) for arg in args) + self._append_call(args, cwd=cwd, env=env) + if self._should_log: + cwd_string = '' +@@ -199,7 +199,7 @@ class MockExecutive(object): + return self._proc + + def call(self, args, **_): +- assert all(isinstance(arg, basestring) for arg in args) ++ assert all(isinstance(arg, str) for arg in args) + self._append_call(args) + _log.info('Mock call: %s', args) + +@@ -209,7 +209,7 @@ class MockExecutive(object): + num_previous_calls = len(self.full_calls) + command_outputs = [] + for cmd_line, cwd in commands: +- assert all(isinstance(arg, basestring) for arg in cmd_line) ++ assert all(isinstance(arg, str) for arg in cmd_line) + command_outputs.append( + [0, self.run_command(cmd_line, cwd=cwd), '']) + +@@ -219,7 +219,7 @@ class MockExecutive(object): + return command_outputs + + def map(self, thunk, arglist, processes=None): +- return map(thunk, arglist) ++ return list(map(thunk, arglist)) + + @property + def calls(self): +diff --git a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/common/system/executive_unittest.py b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/common/system/executive_unittest.py +index ef631b754..c6c5cc05f 100644 +--- a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/common/system/executive_unittest.py ++++ b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/common/system/executive_unittest.py +@@ -100,7 +100,7 @@ class ExecutiveTest(unittest.TestCase): + with self.assertRaises(AssertionError): + executive.run_command('echo') + with self.assertRaises(AssertionError): +- executive.run_command(u'echo') ++ executive.run_command('echo') + executive.run_command(command_line('echo', 'foo')) + executive.run_command(tuple(command_line('echo', 'foo'))) + +@@ -121,7 +121,7 @@ class ExecutiveTest(unittest.TestCase): + to Executive.run* methods, and they will return unicode() + objects by default unless decode_output=False + """ +- unicode_tor_input = u"WebKit \u2661 Tor Arne Vestb\u00F8!" ++ unicode_tor_input = "WebKit \u2661 Tor Arne Vestb\u00F8!" + if sys.platform == 'win32': + encoding = 'mbcs' + else: +diff --git a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/common/system/filesystem.py b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/common/system/filesystem.py +index d4642cc80..36617b69e 100644 +--- a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/common/system/filesystem.py ++++ b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/common/system/filesystem.py +@@ -75,7 +75,7 @@ class FileSystem(object): + """ + if sys.platform == 'win32' and len(path) >= self.WINDOWS_MAX_PATH: + assert not path.startswith(r'\\'), "must not already be UNC" +- return ur'\\?\%s' % (self.abspath(path), ) ++ return r'\\?\%s' % (self.abspath(path), ) + return path + + def abspath(self, path): +diff --git a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/common/system/filesystem_mock.py b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/common/system/filesystem_mock.py +index 9e9dc7bc5..b316c432c 100644 +--- a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/common/system/filesystem_mock.py ++++ b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/common/system/filesystem_mock.py +@@ -30,7 +30,7 @@ import errno + import hashlib + import os + import re +-import StringIO ++import io + import unittest + + from blinkpy.common.system.filesystem import _remove_contents, _sanitize_filename +@@ -178,11 +178,11 @@ class MockFileSystem(object): + + # We could use fnmatch.fnmatch, but that might not do the right thing on Windows. + existing_files = [ +- path for path, contents in self.files.items() ++ path for path, contents in list(self.files.items()) + if contents is not None + ] +- return filter(path_filter, existing_files) + filter( +- path_filter, self.dirs) ++ return list(filter(path_filter, existing_files)) + list(filter( ++ path_filter, self.dirs)) + + def isabs(self, path): + return path.startswith(self.sep) +@@ -524,7 +524,7 @@ class ReadableBinaryFileObject(object): + class ReadableTextFileObject(ReadableBinaryFileObject): + def __init__(self, fs, path, data): + super(ReadableTextFileObject, self).__init__( +- fs, path, StringIO.StringIO(data.decode('utf-8'))) ++ fs, path, io.StringIO(data.decode('utf-8'))) + + def close(self): + self.data.close() +@@ -542,8 +542,8 @@ class ReadableTextFileObject(ReadableBinaryFileObject): + def __iter__(self): + return self.data.__iter__() + +- def next(self): +- return self.data.next() ++ def __next__(self): ++ return next(self.data) + + def seek(self, offset, whence=os.SEEK_SET): + self.data.seek(offset, whence) +diff --git a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/common/system/filesystem_mock_unittest.py b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/common/system/filesystem_mock_unittest.py +index 20fb36327..6b2c13b64 100644 +--- a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/common/system/filesystem_mock_unittest.py ++++ b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/common/system/filesystem_mock_unittest.py +@@ -111,7 +111,7 @@ class MockFileSystemTest(unittest.TestCase, + mock_files = {'foo/bar/baz': '', 'foo/a': '', 'foo/b': '', 'foo/c': ''} + host = MockHost() + host.filesystem = MockFileSystem(files=mock_files) +- self.assertEquals( ++ self.assertEqual( + host.filesystem.walk(mock_dir), [('foo', ['bar'], ['a', 'b', 'c']), + ('foo/bar', [], ['baz'])]) + +@@ -128,7 +128,7 @@ class MockFileSystemTest(unittest.TestCase, + } + host = MockHost() + host.filesystem = MockFileSystem(files=mock_files) +- self.assertEquals( ++ self.assertEqual( + host.filesystem.walk(mock_dir), [('foo', ['a', 'bar'], ['c', 'b']), + ('foo/a', ['z'], ['x', 'y']), + ('foo/a/z', [], ['lyrics']), +@@ -145,4 +145,4 @@ class MockFileSystemTest(unittest.TestCase, + mock_files = {'foo': '', 'bar': '', 'a': ''} + filesystem = MockFileSystem(files=mock_files) + filesystem.make_executable('foo') +- self.assertEquals(filesystem.executable_files, set(['foo'])) ++ self.assertEqual(filesystem.executable_files, set(['foo'])) +diff --git a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/common/system/filesystem_unittest.py b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/common/system/filesystem_unittest.py +index 51987ed65..06967d9b5 100644 +--- a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/common/system/filesystem_unittest.py ++++ b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/common/system/filesystem_unittest.py +@@ -223,7 +223,7 @@ class RealFileSystemTest(unittest.TestCase, GenericFileSystemTests): + with fs.mkdtemp(prefix='filesystem_unittest_') as d: + self.assertEqual(fs.listdir(d), []) + new_file = os.path.join(d, 'foo') +- fs.write_text_file(new_file, u'foo') ++ fs.write_text_file(new_file, 'foo') + self.assertEqual(fs.listdir(d), ['foo']) + os.remove(new_file) + +@@ -232,7 +232,7 @@ class RealFileSystemTest(unittest.TestCase, GenericFileSystemTests): + with fs.mkdtemp(prefix='filesystem_unittest_') as d: + self.assertEqual(list(fs.walk(d)), [(d, [], [])]) + new_file = os.path.join(d, 'foo') +- fs.write_text_file(new_file, u'foo') ++ fs.write_text_file(new_file, 'foo') + self.assertEqual(list(fs.walk(d)), [(d, [], ['foo'])]) + os.remove(new_file) + +@@ -284,7 +284,7 @@ class RealFileSystemTest(unittest.TestCase, GenericFileSystemTests): + fs = FileSystem() + text_path = None + +- unicode_text_string = u'\u016An\u012Dc\u014Dde\u033D' ++ unicode_text_string = '\u016An\u012Dc\u014Dde\u033D' + try: + text_path = tempfile.mktemp(prefix='tree_unittest_') + file = fs.open_text_file_for_writing(text_path) +@@ -305,7 +305,7 @@ class RealFileSystemTest(unittest.TestCase, GenericFileSystemTests): + text_path = None + binary_path = None + +- unicode_text_string = u'\u016An\u012Dc\u014Dde\u033D' ++ unicode_text_string = '\u016An\u012Dc\u014Dde\u033D' + hex_equivalent = '\xC5\xAA\x6E\xC4\xAD\x63\xC5\x8D\x64\x65\xCC\xBD' + try: + text_path = tempfile.mktemp(prefix='tree_unittest_') +diff --git a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/common/system/log_utils.py b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/common/system/log_utils.py +index 6d3896155..d166876e3 100644 +--- a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/common/system/log_utils.py ++++ b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/common/system/log_utils.py +@@ -99,7 +99,7 @@ def configure_logging(logging_level=None, + # + # Traceback (most recent call last): + # File "/System/Library/Frameworks/Python.framework/Versions/2.6/... +- # lib/python2.6/logging/__init__.py", line 761, in emit ++ # lib/python3.6/logging/__init__.py", line 761, in emit + # self.stream.write(fs % msg.encode(self.stream.encoding)) + # LookupError: unknown encoding: unknown + if logging_level is None: +diff --git a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/common/system/output_capture.py b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/common/system/output_capture.py +index 4dfbf0137..3bf0a12c1 100644 +--- a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/common/system/output_capture.py ++++ b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/common/system/output_capture.py +@@ -31,7 +31,7 @@ + import logging + import sys + +-from StringIO import StringIO ++from io import StringIO + + + class OutputCapture(object): +diff --git a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/common/system/path.py b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/common/system/path.py +index 560807cba..16fd4af81 100644 +--- a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/common/system/path.py ++++ b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/common/system/path.py +@@ -27,7 +27,7 @@ + # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + """Generic routines to convert platform-specific paths to URIs.""" + +-import urllib ++import urllib.request, urllib.parse, urllib.error + + + def abspath_to_uri(platform, path): +@@ -41,7 +41,7 @@ def _escape(path): + # when converting filenames to files. Instead of using urllib's default + # rules, we allow a small list of other characters through un-escaped. + # It's unclear if this is the best possible solution. +- return urllib.quote(path, safe='/+:') ++ return urllib.parse.quote(path, safe='/+:') + + + def _convert_path(platform, path): +diff --git a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/common/system/platform_info.py b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/common/system/platform_info.py +index 908902674..3e833cf6a 100644 +--- a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/common/system/platform_info.py ++++ b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/common/system/platform_info.py +@@ -95,7 +95,7 @@ class PlatformInfo(object): + + def total_bytes_memory(self): + if self.is_mac(): +- return long( ++ return int( + self._executive.run_command(['sysctl', '-n', 'hw.memsize'])) + return None + +diff --git a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/common/system/platform_info_unittest.py b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/common/system/platform_info_unittest.py +index 82d32d240..c459197be 100644 +--- a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/common/system/platform_info_unittest.py ++++ b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/common/system/platform_info_unittest.py +@@ -84,9 +84,9 @@ class TestPlatformInfo(unittest.TestCase): + def test_real_code(self): + # This test makes sure the real (unmocked) code actually works. + info = PlatformInfo(sys, platform, FileSystem(), Executive()) +- self.assertNotEquals(info.os_name, '') +- self.assertNotEquals(info.os_version, '') +- self.assertNotEquals(info.display_name(), '') ++ self.assertNotEqual(info.os_name, '') ++ self.assertNotEqual(info.os_version, '') ++ self.assertNotEqual(info.display_name(), '') + self.assertTrue(info.is_mac() or info.is_win() or info.is_linux() + or info.is_freebsd()) + self.assertIsNotNone(info.terminal_width()) +@@ -239,16 +239,16 @@ class TestPlatformInfo(unittest.TestCase): + + def test_display_name(self): + info = self.make_info(fake_sys('darwin')) +- self.assertNotEquals(info.display_name(), '') ++ self.assertNotEqual(info.display_name(), '') + + info = self.make_info(fake_sys('win32', tuple([6, 1, 7600]))) +- self.assertNotEquals(info.display_name(), '') ++ self.assertNotEqual(info.display_name(), '') + + info = self.make_info(fake_sys('linux2')) +- self.assertNotEquals(info.display_name(), '') ++ self.assertNotEqual(info.display_name(), '') + + info = self.make_info(fake_sys('freebsd9')) +- self.assertNotEquals(info.display_name(), '') ++ self.assertNotEqual(info.display_name(), '') + + def test_total_bytes_memory(self): + info = self.make_info( +diff --git a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/common/system/profiler.py b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/common/system/profiler.py +index fa1580b95..1226628b8 100644 +--- a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/common/system/profiler.py ++++ b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/common/system/profiler.py +@@ -44,7 +44,7 @@ class ProfilerFactory(object): + profiler_name = profiler_name or cls.default_profiler_name( + host.platform) + profiler_class = next( +- itertools.ifilter(lambda profiler: profiler.name == profiler_name, ++ filter(lambda profiler: profiler.name == profiler_name, + profilers), None) + if not profiler_class: + return None +@@ -143,7 +143,7 @@ class GooglePProf(SingleFileOutputProfiler): + def profile_after_exit(self): + # google-pprof doesn't check its arguments, so we have to. + if not self._host.filesystem.exists(self._output_path): +- print 'Failed to gather profile, %s does not exist.' % self._output_path ++ print('Failed to gather profile, %s does not exist.' % self._output_path) + return + + pprof_args = [ +@@ -151,13 +151,13 @@ class GooglePProf(SingleFileOutputProfiler): + self._output_path + ] + profile_text = self._host.executive.run_command(pprof_args) +- print 'First 10 lines of pprof --text:' +- print self._first_ten_lines_of_profile(profile_text) +- print 'http://google-perftools.googlecode.com/svn/trunk/doc/cpuprofile.html documents output.' +- print +- print 'To interact with the the full profile, including produce graphs:' +- print ' '.join( +- [self._pprof_path(), self._executable_path, self._output_path]) ++ print('First 10 lines of pprof --text:') ++ print(self._first_ten_lines_of_profile(profile_text)) ++ print('http://google-perftools.googlecode.com/svn/trunk/doc/cpuprofile.html documents output.') ++ print() ++ print('To interact with the the full profile, including produce graphs:') ++ print(' '.join( ++ [self._pprof_path(), self._executable_path, self._output_path])) + + + class Perf(SingleFileOutputProfiler): +@@ -199,22 +199,22 @@ class Perf(SingleFileOutputProfiler): + perf_exitcode = self._perf_process.wait() + # The exit code should always be -2, as we're always interrupting perf. + if perf_exitcode not in (0, -2): +- print "'perf record' failed (exit code: %i), can't process results:" % perf_exitcode ++ print("'perf record' failed (exit code: %i), can't process results:" % perf_exitcode) + return + + perf_args = [ + self._perf_path(), 'report', '--call-graph', 'none', '--input', + self._output_path + ] +- print "First 10 lines of 'perf report --call-graph=none':" ++ print("First 10 lines of 'perf report --call-graph=none':") + +- print ' '.join(perf_args) ++ print(' '.join(perf_args)) + perf_output = self._host.executive.run_command(perf_args) +- print self._first_ten_lines_of_profile(perf_output) ++ print(self._first_ten_lines_of_profile(perf_output)) + +- print 'To view the full profile, run:' +- print ' '.join([self._perf_path(), 'report', '-i', self._output_path]) +- print # An extra line between tests looks nicer. ++ print('To view the full profile, run:') ++ print(' '.join([self._perf_path(), 'report', '-i', self._output_path])) ++ print() # An extra line between tests looks nicer. + + + class Sample(SingleFileOutputProfiler): +diff --git a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/common/system/stack_utils.py b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/common/system/stack_utils.py +index 3ad89f15e..f4f22a236 100644 +--- a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/common/system/stack_utils.py ++++ b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/common/system/stack_utils.py +@@ -45,7 +45,7 @@ def _find_thread_stack(thread_id): + """Returns a stack object that can be used to dump a stack trace for + the given thread id (or None if the id is not found). + """ +- for tid, stack in sys._current_frames().items(): ++ for tid, stack in list(sys._current_frames().items()): + if tid == thread_id: + return stack + return None +diff --git a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/common/system/stack_utils_unittest.py b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/common/system/stack_utils_unittest.py +index 6d8474f43..efbdfa42d 100644 +--- a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/common/system/stack_utils_unittest.py ++++ b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/common/system/stack_utils_unittest.py +@@ -33,7 +33,7 @@ from blinkpy.common.system import stack_utils + + + def current_thread_id(): +- thread_id, _ = sys._current_frames().items()[0] ++ thread_id, _ = list(sys._current_frames().items())[0] + return thread_id + + +diff --git a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/common/system/system_host_mock.py b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/common/system/system_host_mock.py +index b28baf5e2..9d526200e 100644 +--- a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/common/system/system_host_mock.py ++++ b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/common/system/system_host_mock.py +@@ -26,7 +26,7 @@ + # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +-from StringIO import StringIO ++from io import StringIO + + from blinkpy.common.system.executive_mock import MockExecutive + from blinkpy.common.system.filesystem_mock import MockFileSystem +diff --git a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/common/system/user.py b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/common/system/user.py +index e6cd7f96a..bcc10baa2 100644 +--- a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/common/system/user.py ++++ b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/common/system/user.py +@@ -75,7 +75,7 @@ class User(object): + for value in re.split(r"\s*,\s*", response): + parts = value.split('-') + if len(parts) == 2: +- indices += range(int(parts[0]) - 1, int(parts[1])) ++ indices += list(range(int(parts[0]) - 1, int(parts[1]))) + else: + indices.append(int(value) - 1) + except ValueError: +@@ -97,11 +97,11 @@ class User(object): + list_items, + can_choose_multiple=False, + input_func=raw_input): +- print list_title ++ print(list_title) + i = 0 + for item in list_items: + i += 1 +- print '%2d. %s' % (i, item) ++ print('%2d. %s' % (i, item)) + return cls._wait_on_list_response(list_items, can_choose_multiple, + input_func) + +diff --git a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/common/unified_diff.py b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/common/unified_diff.py +index 947748c9c..b731d9c8b 100644 +--- a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/common/unified_diff.py ++++ b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/common/unified_diff.py +@@ -25,7 +25,7 @@ def unified_diff(expected_text, actual_text, expected_filename, + + + def _to_raw_bytes(string_value): +- if isinstance(string_value, unicode): ++ if isinstance(string_value, str): + return string_value.encode('utf-8') + return string_value + +diff --git a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/common/unified_diff_unittest.py b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/common/unified_diff_unittest.py +index 33392a543..18c32d249 100644 +--- a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/common/unified_diff_unittest.py ++++ b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/common/unified_diff_unittest.py +@@ -25,17 +25,17 @@ class TestUnifiedDiff(unittest.TestCase): + # filenames are unicode, with regular or malformed input (expected or + # actual input is always raw bytes, not unicode). + unified_diff('exp', 'act', 'exp.txt', 'act.txt') +- unified_diff('exp', 'act', u'exp.txt', 'act.txt') +- unified_diff('exp', 'act', u'a\xac\u1234\u20ac\U00008000', 'act.txt') ++ unified_diff('exp', 'act', 'exp.txt', 'act.txt') ++ unified_diff('exp', 'act', 'a\xac\u1234\u20ac\U00008000', 'act.txt') + + def test_unified_diff_handles_non_ascii_chars(self): + unified_diff('exp' + chr(255), 'act', 'exp.txt', 'act.txt') +- unified_diff('exp' + chr(255), 'act', u'exp.txt', 'act.txt') ++ unified_diff('exp' + chr(255), 'act', 'exp.txt', 'act.txt') + + def test_unified_diff_handles_unicode_inputs(self): + # Though expected and actual files should always be read in with no + # encoding (and be stored as str objects), test unicode inputs just to + # be safe. +- unified_diff(u'exp', 'act', 'exp.txt', 'act.txt') +- unified_diff(u'a\xac\u1234\u20ac\U00008000', 'act', 'exp.txt', ++ unified_diff('exp', 'act', 'exp.txt', 'act.txt') ++ unified_diff('a\xac\u1234\u20ac\U00008000', 'act', 'exp.txt', + 'act.txt') +diff --git a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/presubmit/audit_non_blink_usage.py b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/presubmit/audit_non_blink_usage.py +index fe8b5e5d6..639e180f7 100755 +--- a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/presubmit/audit_non_blink_usage.py ++++ b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/presubmit/audit_non_blink_usage.py +@@ -1366,11 +1366,11 @@ def main(): + path, + [(i + 1, l) for i, l in enumerate(contents.splitlines())]) + if disallowed_identifiers: +- print '%s uses disallowed identifiers:' % path ++ print('%s uses disallowed identifiers:' % path) + for i in disallowed_identifiers: + print(i.line, i.identifier, i.advice) + except IOError as e: +- print 'could not open %s: %s' % (path, e) ++ print('could not open %s: %s' % (path, e)) + + + if __name__ == '__main__': +diff --git a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/style/checker.py b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/style/checker.py +index 8d1d82c79..51cc90696 100644 +--- a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/style/checker.py ++++ b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/style/checker.py +@@ -325,7 +325,7 @@ def configure_logging(stream, logger=None, is_verbose=False): + # + # Traceback (most recent call last): + # File "/System/Library/Frameworks/Python.framework/Versions/2.6/... +- # lib/python2.6/logging/__init__.py", line 761, in emit ++ # lib/python3.6/logging/__init__.py", line 761, in emit + # self.stream.write(fs % msg.encode(self.stream.encoding)) + # LookupError: unknown encoding: unknown + if logger is None: +diff --git a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/style/checker_unittest.py b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/style/checker_unittest.py +index fc987c919..549f35fb6 100644 +--- a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/style/checker_unittest.py ++++ b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/style/checker_unittest.py +@@ -215,7 +215,7 @@ class GlobalVariablesTest(unittest.TestCase): + def test_max_reports_per_category(self): + """Check that _MAX_REPORTS_PER_CATEGORY is valid.""" + all_categories = self._all_categories() +- for category in _MAX_REPORTS_PER_CATEGORY.iterkeys(): ++ for category in _MAX_REPORTS_PER_CATEGORY.keys(): + self.assertIn(category, all_categories, + 'Key "%s" is not a category' % category) + +@@ -286,7 +286,7 @@ class CheckerDispatcherCarriageReturnTest(unittest.TestCase): + } + + dispatcher = CheckerDispatcher() +- for file_path, expected_result in files.items(): ++ for file_path, expected_result in list(files.items()): + self.assertEqual( + dispatcher.should_check_and_strip_carriage_returns(file_path), + expected_result, 'Checking: %s' % file_path) +diff --git a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/style/checkers/cpp.py b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/style/checkers/cpp.py +index 0cc3758ca..b4156b910 100644 +--- a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/style/checkers/cpp.py ++++ b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/style/checkers/cpp.py +@@ -862,7 +862,7 @@ def check_for_copyright(lines, error): + + # We'll say it should occur by line 10. Don't forget there's a + # dummy line at the front. +- for line in xrange(1, min(len(lines), 11)): ++ for line in range(1, min(len(lines), 11)): + if re.search(r'Copyright', lines[line], re.I): + break + else: # means no copyright line was found +@@ -946,7 +946,7 @@ def check_for_unicode_replacement_characters(lines, error): + error: The function to call with any errors found. + """ + for line_number, line in enumerate(lines): +- if u'\ufffd' in line: ++ if '\ufffd' in line: + error( + line_number, 'readability/utf8', 5, + 'Line contains invalid UTF-8 (or Unicode replacement character).' +@@ -1318,7 +1318,7 @@ def detect_functions(clean_lines, line_number, function_state, error): + return + + joined_line = '' +- for start_line_number in xrange(line_number, clean_lines.num_lines()): ++ for start_line_number in range(line_number, clean_lines.num_lines()): + start_line = clean_lines.elided[start_line_number] + joined_line += ' ' + start_line.lstrip() + body_match = search(r'{|;', start_line) +@@ -1578,7 +1578,7 @@ def get_line_width(line): + The width of the line in column positions, accounting for Unicode + combining characters and wide characters. + """ +- if isinstance(line, unicode): ++ if isinstance(line, str): + width = 0 + for c in unicodedata.normalize('NFC', line): + if unicodedata.east_asian_width(c) in ('W', 'F'): +@@ -1751,7 +1751,7 @@ def check_redundant_virtual(clean_lines, linenum, error): + # that this is rare. + end_position = Position(-1, -1) + start_col = len(virtual.group(2)) +- for start_line in xrange(linenum, min(linenum + 3, ++ for start_line in range(linenum, min(linenum + 3, + clean_lines.num_lines())): + line = clean_lines.elided[start_line][start_col:] + parameter_list = match(r'^([^(]*)\(', line) +@@ -1768,7 +1768,7 @@ def check_redundant_virtual(clean_lines, linenum, error): + + # Look for "override" or "final" after the parameter list + # (possibly on the next few lines). +- for i in xrange(end_position.row, ++ for i in range(end_position.row, + min(end_position.row + 3, clean_lines.num_lines())): + line = clean_lines.elided[i][end_position.column:] + override_or_final = search(r'\b(override|final)\b', line) +@@ -2194,7 +2194,7 @@ def check_for_toFoo_definition(filename, pattern, error): + def grep(lines, pattern, error): + matches = [] + function_state = None +- for line_number in xrange(lines.num_lines()): ++ for line_number in range(lines.num_lines()): + line = (lines.elided[line_number]).rstrip() + try: + if pattern in line: +@@ -2566,7 +2566,7 @@ def check_for_include_what_you_use(filename, clean_lines, include_state, + required = {} + # Example of required: { '': (1219, 'less<>') } + +- for line_number in xrange(clean_lines.num_lines()): ++ for line_number in range(clean_lines.num_lines()): + line = clean_lines.elided[line_number] + if not line or line[0] == '#': + continue +@@ -2609,7 +2609,7 @@ def check_for_include_what_you_use(filename, clean_lines, include_state, + + # include_state is modified during iteration, so we iterate over a copy of + # the keys. +- for header in include_state.keys(): # NOLINT ++ for header in list(include_state.keys()): # NOLINT + (same_module, common_path) = files_belong_to_same_module( + abs_filename, header) + fullpath = common_path + header +@@ -2703,7 +2703,7 @@ def _process_lines(filename, file_extension, lines, error, min_confidence): + check_for_header_guard(filename, clean_lines, error) + + file_state = _FileState(clean_lines, file_extension) +- for line in xrange(clean_lines.num_lines()): ++ for line in range(clean_lines.num_lines()): + process_line(filename, file_extension, clean_lines, line, + include_state, function_state, class_state, file_state, + error) +diff --git a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/style/checkers/cpp_unittest.py b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/style/checkers/cpp_unittest.py +index f410f9dea..06f0fc1c1 100644 +--- a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/style/checkers/cpp_unittest.py ++++ b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/style/checkers/cpp_unittest.py +@@ -469,8 +469,8 @@ class CppStyleTest(CppStyleTestBase): + # Test get line width. + def test_get_line_width(self): + self.assertEqual(0, cpp_style.get_line_width('')) +- self.assertEqual(10, cpp_style.get_line_width(u'x' * 10)) +- self.assertEqual(16, cpp_style.get_line_width(u'都|道|府|県|支庁')) ++ self.assertEqual(10, cpp_style.get_line_width('x' * 10)) ++ self.assertEqual(16, cpp_style.get_line_width('都|道|府|県|支庁')) + + def test_find_next_multi_line_comment_start(self): + self.assertEqual(1, +@@ -1362,7 +1362,7 @@ class CppStyleTest(CppStyleTestBase): + error_collector = ErrorCollector(self.assertTrue) + self.process_file_data( + 'foo.cpp', 'cpp', +- unicode(raw_bytes, 'utf8', 'replace').split('\n'), ++ str(raw_bytes, 'utf8', 'replace').split('\n'), + error_collector) + # The warning appears only once. + self.assertEqual( +diff --git a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/style/checkers/python_unittest.py b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/style/checkers/python_unittest.py +index 106b81901..ae8a02f26 100644 +--- a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/style/checkers/python_unittest.py ++++ b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/style/checkers/python_unittest.py +@@ -63,6 +63,6 @@ class PythonCheckerTest(unittest.TestCase): + (2, 'pylint/C0303(trailing-whitespace)', 5, + '[] Trailing whitespace'), + (2, 'pylint/E0602(undefined-variable)', 5, +- u"[] Undefined variable 'error'"), ++ "[] Undefined variable 'error'"), + (3, 'pylint/W0611(unused-import)', 5, '[] Unused import math'), + ], errors) +diff --git a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/style/checkers/python_unittest_input.py b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/style/checkers/python_unittest_input.py +index 239242c74..bc017e019 100644 +--- a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/style/checkers/python_unittest_input.py ++++ b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/style/checkers/python_unittest_input.py +@@ -1,3 +1,3 @@ + # This file is sample input for python_unittest.py and includes problems. +-print error() ++print(error()) + import math#unused +diff --git a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/style/checkers/test_expectations_unittest.py b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/style/checkers/test_expectations_unittest.py +index 35a613103..46a34dcfc 100644 +--- a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/style/checkers/test_expectations_unittest.py ++++ b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/style/checkers/test_expectations_unittest.py +@@ -85,7 +85,7 @@ class TestExpectationsTestCase(unittest.TestCase): + self.assertEqual(expected_output, + self._error_collector.get_errors()) + else: +- self.assertNotEquals('', self._error_collector.get_errors()) ++ self.assertNotEqual('', self._error_collector.get_errors()) + + # Note that a patch might change a line that introduces errors elsewhere, but we + # don't want to lint the whole file (it can unfairly punish patches for pre-existing errors). +diff --git a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/style/checkers/xml.py b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/style/checkers/xml.py +index 079c5ca6d..5aa8914e8 100644 +--- a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/style/checkers/xml.py ++++ b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/style/checkers/xml.py +@@ -21,7 +21,7 @@ + # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + """Checks WebKit style for XML files.""" + +-from __future__ import absolute_import ++ + + from xml.parsers import expat + +diff --git a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/style/filter.py b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/style/filter.py +index f1e62da94..46c6f2aac 100644 +--- a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/style/filter.py ++++ b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/style/filter.py +@@ -192,7 +192,7 @@ class FilterConfiguration(object): + if self._path_specific_lower is None: + self._path_specific_lower = [] + for (sub_paths, path_rules) in self._path_specific: +- sub_paths = map(str.lower, sub_paths) ++ sub_paths = list(map(str.lower, sub_paths)) + self._path_specific_lower.append((sub_paths, path_rules)) + return self._path_specific_lower + +diff --git a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/style/patchreader.py b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/style/patchreader.py +index 434f6dcc3..56011d8d7 100644 +--- a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/style/patchreader.py ++++ b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/style/patchreader.py +@@ -50,7 +50,7 @@ class PatchReader(object): + """Checks style in the given patch.""" + patch_files = DiffParser(patch_string.splitlines()).files + +- for path, diff_file in patch_files.iteritems(): ++ for path, diff_file in patch_files.items(): + line_numbers = diff_file.added_or_modified_line_numbers() + _log.debug('Found %s new or modified lines in: %s', + len(line_numbers), path) +diff --git a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/pep8.py b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/pep8.py +index f605f189f..b22c5311b 100755 +--- a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/pep8.py ++++ b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/pep8.py +@@ -44,7 +44,7 @@ W warnings + 700 statements + 900 syntax error + """ +-from __future__ import with_statement ++ + + __version__ = '1.5.7' + +@@ -61,7 +61,7 @@ try: + from configparser import RawConfigParser + from io import TextIOWrapper + except ImportError: +- from ConfigParser import RawConfigParser ++ from configparser import RawConfigParser + + DEFAULT_EXCLUDE = '.svn,CVS,.bzr,.hg,.git,__pycache__' + DEFAULT_IGNORE = 'E123,E226,E24' +@@ -1122,14 +1122,14 @@ def parse_udiff(diff, patterns=None, parent='.'): + if line[:3] == '@@ ': + hunk_match = HUNK_REGEX.match(line) + (row, nrows) = [int(g or '1') for g in hunk_match.groups()] +- rv[path].update(range(row, row + nrows)) ++ rv[path].update(list(range(row, row + nrows))) + elif line[:3] == '+++': + path = line[4:].split('\t', 1)[0] + if path[:2] == 'b/': + path = path[2:] + rv[path] = set() + return dict([(os.path.join(parent, path), rows) +- for (path, rows) in rv.items() ++ for (path, rows) in list(rv.items()) + if rows and filename_match(path, patterns)]) + + +@@ -1740,7 +1740,7 @@ class StyleGuide(object): + starts with argument_name and which contain selected tests. + """ + checks = [] +- for check, attrs in _checks[argument_name].items(): ++ for check, attrs in list(_checks[argument_name].items()): + (codes, args) = attrs + if any(not (code and self.ignore_code(code)) for code in codes): + checks.append((check.__name__, check, args)) +diff --git a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/update_certs.py b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/update_certs.py +index 1a9dc17ed..8eb7a2c9d 100755 +--- a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/update_certs.py ++++ b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/update_certs.py +@@ -18,14 +18,13 @@ _DOMAIN = '127.0.0.1' + def main(): + cert_dir = os.path.join(_THIS_DIR, 'certs') + +- print '===> Removing old files...' +- old_files = filter(lambda filename: '.sxg.' not in filename, +- os.listdir(cert_dir)) ++ print('===> Removing old files...') ++ old_files = [filename for filename in os.listdir(cert_dir) if '.sxg.' not in filename] + old_files = [os.path.join(cert_dir, fn) for fn in old_files] + if subprocess.call(['git', 'rm'] + old_files) != 0: + sys.exit(1) + +- print '\n===> Regenerating keys and certificates...' ++ print('\n===> Regenerating keys and certificates...') + env = OpenSSLEnvironment(logging.getLogger(__name__), + base_path=cert_dir, + force_regenerate=True, +@@ -42,7 +41,7 @@ def main(): + if subprocess.call('git add -v ' + os.path.join(cert_dir, '*'), shell=True) != 0: + sys.exit(1) + +- print '\n===> Updating wpt.config.json and base.py...' ++ print('\n===> Updating wpt.config.json and base.py...') + key_basename = os.path.basename(key_path) + pem_basename = os.path.basename(pem_path) + config_path = os.path.join(_THIS_DIR, 'wpt.config.json') +@@ -65,7 +64,7 @@ def main(): + if subprocess.call(['git', 'add', '-v', config_path, base_py_path]) != 0: + sys.exit(1) + +- print '\n===> Certificate validity:' ++ print('\n===> Certificate validity:') + subprocess.call(['grep', 'Not After', pem_path]) + + +diff --git a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/lint/fnmatch.py b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/lint/fnmatch.py +index 0c45029b2..d56cbea5c 100644 +--- a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/lint/fnmatch.py ++++ b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/lint/fnmatch.py +@@ -1,4 +1,4 @@ +-from __future__ import absolute_import ++ + + import fnmatch as _stdlib_fnmatch + import os +diff --git a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/lint/lint.py b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/lint/lint.py +index 5027da705..c0bce9d4e 100644 +--- a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/lint/lint.py ++++ b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/lint/lint.py +@@ -1,4 +1,4 @@ +-from __future__ import print_function, unicode_literals ++ + + import abc + import argparse +@@ -294,23 +294,23 @@ def check_css_globally_unique(repo_root, paths): + + for path in paths: + if os.name == "nt": +- path = path.replace(u"\\", u"/") ++ path = path.replace("\\", "/") + +- if not path.startswith(u"css/"): ++ if not path.startswith("css/"): + continue + +- source_file = SourceFile(repo_root, path, u"/") ++ source_file = SourceFile(repo_root, path, "/") + if source_file.name_is_non_test: + # If we're name_is_non_test for a reason apart from support, ignore it. + # We care about support because of the requirement all support files in css/ to be in + # a support directory; see the start of check_parsed. +- offset = path.find(u"/support/") ++ offset = path.find("/support/") + if offset == -1: + continue + + parts = source_file.dir_path.split(os.path.sep) + if (parts[0] in source_file.root_dir_non_test or +- any(item in source_file.dir_non_test - {u"support"} for item in parts) or ++ any(item in source_file.dir_non_test - {"support"} for item in parts) or + any(parts[:len(non_test_path)] == list(non_test_path) for non_test_path in source_file.dir_path_non_test)): + continue + +@@ -320,7 +320,7 @@ def check_css_globally_unique(repo_root, paths): + ref_files[source_file.name].add(path) + else: + test_name = source_file.name # type: Text +- test_name = test_name.replace(u'-manual', u'') ++ test_name = test_name.replace('-manual', '') + test_files[test_name].add(path) + + errors = [] +@@ -331,7 +331,7 @@ def check_css_globally_unique(repo_root, paths): + # Only compute by_spec if there are prima-facie collisions because of cost + by_spec = defaultdict(set) # type: Dict[Text, Set[Text]] + for path in colliding: +- source_file = SourceFile(repo_root, path, u"/") ++ source_file = SourceFile(repo_root, path, "/") + for link in source_file.spec_links: + for r in (drafts_csswg_re, w3c_tr_re, w3c_dev_re): + m = r.match(link) +@@ -388,7 +388,7 @@ def check_unique_testharness_basenames(repo_root, paths): + continue + file_name, file_extension = os.path.splitext(path) + file_dict[file_name].append(file_extension) +- for k, v in file_dict.items(): ++ for k, v in list(file_dict.items()): + if len(v) == 1: + continue + context = (', '.join(v),) +@@ -586,9 +586,9 @@ def check_parsed(repo_root, path, f): + if variant != "" and variant[0] not in ("?", "#"): + errors.append(rules.MalformedVariant.error(path, (path,))) + +- required_elements.extend(key for key, value in {"testharness": True, ++ required_elements.extend(key for key, value in list({"testharness": True, + "testharnessreport": len(testharnessreport_nodes) > 0, +- "timeout": len(source_file.timeout_nodes) > 0}.items() ++ "timeout": len(source_file.timeout_nodes) > 0}.items()) + if value) + + testdriver_vendor_nodes = [] # type: List[ElementTree.Element] +@@ -895,7 +895,7 @@ def output_error_count(error_count): + return + + assert logger is not None +- by_type = " ".join("%s: %d" % item for item in error_count.items()) ++ by_type = " ".join("%s: %d" % item for item in list(error_count.items())) + count = sum(error_count.values()) + logger.info("") + if count == 1: +diff --git a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/lint/rules.py b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/lint/rules.py +index f6e23aef5..ab8a725e1 100644 +--- a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/lint/rules.py ++++ b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/lint/rules.py +@@ -1,4 +1,4 @@ +-from __future__ import unicode_literals ++ + + import abc + import inspect +diff --git a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/manifest/download.py b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/manifest/download.py +index 9d763181d..5dd0a22bb 100644 +--- a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/manifest/download.py ++++ b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/manifest/download.py +@@ -1,4 +1,4 @@ +-from __future__ import absolute_import ++ + + import argparse + import bz2 +diff --git a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/manifest/item.py b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/manifest/item.py +index efb49d7f4..cba208d1c 100644 +--- a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/manifest/item.py ++++ b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/manifest/item.py +@@ -129,7 +129,7 @@ class URLManifestItem(ManifestItem): + @property + def url(self): + # type: () -> Text +- rel_url = self._url or self.path.replace(os.path.sep, u"/") ++ rel_url = self._url or self.path.replace(os.path.sep, "/") + # we can outperform urljoin, because we know we just have path relative URLs + if self.url_base == "/": + return "/" + rel_url +@@ -149,7 +149,7 @@ class URLManifestItem(ManifestItem): + + def to_json(self): + # type: () -> Tuple[Optional[Text], Dict[Any, Any]] +- rel_url = None if self._url == self.path.replace(os.path.sep, u"/") else self._url ++ rel_url = None if self._url == self.path.replace(os.path.sep, "/") else self._url + rv = (rel_url, {}) # type: Tuple[Optional[Text], Dict[Any, Any]] + return rv + +diff --git a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/manifest/manifest.py b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/manifest/manifest.py +index 449cd245a..43e425571 100644 +--- a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/manifest/manifest.py ++++ b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/manifest/manifest.py +@@ -66,15 +66,15 @@ class InvalidCacheError(Exception): + pass + + +-item_classes = {u"testharness": TestharnessTest, +- u"reftest": RefTest, +- u"print-reftest": PrintRefTest, +- u"crashtest": CrashTest, +- u"manual": ManualTest, +- u"wdspec": WebDriverSpecTest, +- u"conformancechecker": ConformanceCheckerTest, +- u"visual": VisualTest, +- u"support": SupportFile} # type: Dict[Text, Type[ManifestItem]] ++item_classes = {"testharness": TestharnessTest, ++ "reftest": RefTest, ++ "print-reftest": PrintRefTest, ++ "crashtest": CrashTest, ++ "manual": ManualTest, ++ "wdspec": WebDriverSpecTest, ++ "conformancechecker": ConformanceCheckerTest, ++ "visual": VisualTest, ++ "support": SupportFile} # type: Dict[Text, Type[ManifestItem]] + + + def compute_manifest_items(source_file): +@@ -151,7 +151,7 @@ class Manifest(object): + # type: (Text) -> Iterable[ManifestItem] + tpath = tuple(path.split(os.path.sep)) + +- for type_tests in self._data.values(): ++ for type_tests in list(self._data.values()): + i = type_tests.get(tpath, set()) + assert i is not None + for test in i: +@@ -162,7 +162,7 @@ class Manifest(object): + tpath = tuple(dir_name.split(os.path.sep)) + tpath_len = len(tpath) + +- for type_tests in self._data.values(): ++ for type_tests in list(self._data.values()): + for path, tests in iteritems(type_tests): + if path[:tpath_len] == tpath: + for test in tests: +@@ -242,9 +242,9 @@ class Manifest(object): + chunksize=max(1, len(to_update) // 10000) + ) # type: Iterator[Tuple[Tuple[Text, ...], Text, Set[ManifestItem], Text]] + elif PY3: +- results = map(compute_manifest_items, to_update) ++ results = list(map(compute_manifest_items, to_update)) + else: +- results = itertools.imap(compute_manifest_items, to_update) ++ results = map(compute_manifest_items, to_update) + + for result in results: + rel_path_parts, new_type, manifest_items, file_hash = result +diff --git a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/manifest/sourcefile.py b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/manifest/sourcefile.py +index 6e90ac054..8b64e633f 100644 +--- a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/manifest/sourcefile.py ++++ b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/manifest/sourcefile.py +@@ -51,7 +51,7 @@ python_meta_re = re.compile(br"#\s*META:\s*(\w*)=(.*)$") + + reference_file_re = re.compile(r'(^|[\-_])(not)?ref[0-9]*([\-_]|$)') + +-space_chars = u"".join(html5lib.constants.spaceCharacters) # type: Text ++space_chars = "".join(html5lib.constants.spaceCharacters) # type: Text + + + def replace_end(s, old, new): +@@ -185,19 +185,19 @@ def _parse_xml(f): + + + class SourceFile(object): +- parsers = {u"html":_parse_html, +- u"xhtml":_parse_xml, +- u"svg":_parse_xml} # type: Dict[Text, Callable[[BinaryIO], ElementTree.ElementTree]] ++ parsers = {"html":_parse_html, ++ "xhtml":_parse_xml, ++ "svg":_parse_xml} # type: Dict[Text, Callable[[BinaryIO], ElementTree.ElementTree]] + +- root_dir_non_test = {u"common"} ++ root_dir_non_test = {"common"} + +- dir_non_test = {u"resources", +- u"support", +- u"tools"} ++ dir_non_test = {"resources", ++ "support", ++ "tools"} + +- dir_path_non_test = {(u"css21", u"archive"), +- (u"css", u"CSS2", u"archive"), +- (u"css", u"common")} # type: Set[Tuple[Text, ...]] ++ dir_path_non_test = {("css21", "archive"), ++ ("css", "CSS2", "archive"), ++ ("css", "common")} # type: Set[Tuple[Text, ...]] + + def __init__(self, tests_root, rel_path, url_base, hash=None, contents=None): + # type: (Text, Text, Text, Optional[Text], Optional[bytes]) -> None +@@ -212,7 +212,7 @@ class SourceFile(object): + assert not os.path.isabs(rel_path), rel_path + if os.name == "nt": + # do slash normalization on Windows +- rel_path = rel_path.replace(u"/", u"\\") ++ rel_path = rel_path.replace("/", "\\") + + dir_path, filename = os.path.split(rel_path) + name, ext = os.path.splitext(filename) +@@ -331,11 +331,11 @@ class SourceFile(object): + """Check if the file name matches the conditions for the file to + be a non-test file""" + return (self.is_dir() or +- self.name_prefix(u"MANIFEST") or +- self.filename == u"META.yml" or +- self.filename.startswith(u".") or +- self.filename.endswith(u".headers") or +- self.filename.endswith(u".ini") or ++ self.name_prefix("MANIFEST") or ++ self.filename == "META.yml" or ++ self.filename.startswith(".") or ++ self.filename.endswith(".headers") or ++ self.filename.endswith(".ini") or + self.in_non_test_dir()) + + @property +@@ -435,14 +435,14 @@ class SourceFile(object): + + if not ext: + return None +- if ext[0] == u".": ++ if ext[0] == ".": + ext = ext[1:] +- if ext in [u"html", u"htm"]: +- return u"html" +- if ext in [u"xhtml", u"xht", u"xml"]: +- return u"xhtml" +- if ext == u"svg": +- return u"svg" ++ if ext in ["html", "htm"]: ++ return "html" ++ if ext in ["xhtml", "xht", "xml"]: ++ return "xhtml" ++ if ext == "svg": ++ return "svg" + return None + + @cached_property +@@ -550,9 +550,9 @@ class SourceFile(object): + + def parse_ref_keyed_meta(self, node): + # type: (ElementTree.Element) -> Tuple[Optional[Tuple[Text, Text, Text]], Text] +- item = node.attrib.get(u"content", u"") # type: Text ++ item = node.attrib.get("content", "") # type: Text + +- parts = item.rsplit(u":", 1) ++ parts = item.rsplit(":", 1) + if len(parts) == 1: + key = None # type: Optional[Tuple[Text, Text, Text]] + value = parts[0] +@@ -563,7 +563,7 @@ class SourceFile(object): + if ref[0] == key_part: + reftype = ref[1] + break +- if reftype not in (u"==", u"!="): ++ if reftype not in ("==", "!="): + raise ValueError("Key %s doesn't correspond to a reference" % key_part) + key = (self.url, key_part, reftype) + value = parts[1] +@@ -590,26 +590,26 @@ class SourceFile(object): + if not self.fuzzy_nodes: + return rv + +- args = [u"maxDifference", u"totalPixels"] ++ args = ["maxDifference", "totalPixels"] + + for node in self.fuzzy_nodes: + key, value = self.parse_ref_keyed_meta(node) +- ranges = value.split(u";") ++ ranges = value.split(";") + if len(ranges) != 2: + raise ValueError("Malformed fuzzy value %s" % value) + arg_values = {} # type: Dict[Text, List[int]] + positional_args = deque() # type: Deque[List[int]] + for range_str_value in ranges: # type: Text + name = None # type: Optional[Text] +- if u"=" in range_str_value: ++ if "=" in range_str_value: + name, range_str_value = [part.strip() +- for part in range_str_value.split(u"=", 1)] ++ for part in range_str_value.split("=", 1)] + if name not in args: + raise ValueError("%s is not a valid fuzzy property" % name) + if arg_values.get(name): + raise ValueError("Got multiple values for argument %s" % name) +- if u"-" in range_str_value: +- range_min, range_max = range_str_value.split(u"-") ++ if "-" in range_str_value: ++ range_min, range_max = range_str_value.split("-") + else: + range_min = range_str_value + range_max = range_str_value +@@ -1017,7 +1017,7 @@ class SourceFile(object): + )] + + elif self.name_is_multi_global: +- globals = u"" ++ globals = "" + script_metadata = self.script_metadata + assert script_metadata is not None + for (key, value) in script_metadata: +diff --git a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/manifest/typedata.py b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/manifest/typedata.py +index 01bb82705..dd17aab27 100644 +--- a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/manifest/typedata.py ++++ b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/manifest/typedata.py +@@ -193,11 +193,11 @@ class TypeData(TypeDataType): + + return count + +- def __nonzero__(self): ++ def __bool__(self): + # type: () -> bool + return bool(self._data) or bool(self._json_data) + +- __bool__ = __nonzero__ ++ __nonzero__ = __bool__ + + def __contains__(self, key): + # type: (Any) -> bool +@@ -260,7 +260,7 @@ class TypeData(TypeDataType): + """ key function to sort lists with None values. + + Python3 is more strict typewise. Comparing None and str for example is valid +- in python2 but throws an exception in python3. ++ in python3 but throws an exception in python3. + """ + if element and not element[0]: + return ("", element[1]) +diff --git a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/manifest/utils.py b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/manifest/utils.py +index 36c1a9831..ed9d26441 100644 +--- a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/manifest/utils.py ++++ b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/manifest/utils.py +@@ -22,52 +22,52 @@ else: + def rel_path_to_url(rel_path, url_base="/"): + # type: (Text, Text) -> Text + assert not os.path.isabs(rel_path), rel_path +- if url_base[0] != u"/": +- url_base = u"/" + url_base +- if url_base[-1] != u"/": +- url_base += u"/" +- return url_base + rel_path.replace(os.sep, u"/") ++ if url_base[0] != "/": ++ url_base = "/" + url_base ++ if url_base[-1] != "/": ++ url_base += "/" ++ return url_base + rel_path.replace(os.sep, "/") + + + def from_os_path(path): + # type: (Text) -> Text +- assert os.path.sep == u"/" or platform.system() == "Windows" +- if u"/" == os.path.sep: ++ assert os.path.sep == "/" or platform.system() == "Windows" ++ if "/" == os.path.sep: + rv = path + else: +- rv = path.replace(os.path.sep, u"/") +- if u"\\" in rv: ++ rv = path.replace(os.path.sep, "/") ++ if "\\" in rv: + raise ValueError("path contains \\ when separator is %s" % os.path.sep) + return rv + + + def to_os_path(path): + # type: (Text) -> Text +- assert os.path.sep == u"/" or platform.system() == "Windows" +- if u"\\" in path: ++ assert os.path.sep == "/" or platform.system() == "Windows" ++ if "\\" in path: + raise ValueError("normalised path contains \\") +- if u"/" == os.path.sep: ++ if "/" == os.path.sep: + return path +- return path.replace(u"/", os.path.sep) ++ return path.replace("/", os.path.sep) + + + def git(path): + # type: (Text) -> Optional[Callable[..., Text]] + def gitfunc(cmd, *args): + # type: (Text, *Text) -> Text +- full_cmd = [u"git", cmd] + list(args) ++ full_cmd = ["git", cmd] + list(args) + try: + return subprocess.check_output(full_cmd, cwd=path, stderr=subprocess.STDOUT).decode('utf8') + except Exception as e: + if platform.uname()[0] == "Windows" and isinstance(e, WindowsError): +- full_cmd[0] = u"git.bat" ++ full_cmd[0] = "git.bat" + return subprocess.check_output(full_cmd, cwd=path, stderr=subprocess.STDOUT).decode('utf8') + else: + raise + + try: + # this needs to be a command that fails if we aren't in a git repo +- gitfunc(u"rev-parse", u"--show-toplevel") ++ gitfunc("rev-parse", "--show-toplevel") + except (subprocess.CalledProcessError, OSError): + return None + else: +diff --git a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/manifest/vcs.py b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/manifest/vcs.py +index 3dfd7c980..282e70a75 100644 +--- a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/manifest/vcs.py ++++ b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/manifest/vcs.py +@@ -32,7 +32,7 @@ def get_tree(tests_root, manifest, manifest_path, cache_root, + # type: (Text, Manifest, Optional[Text], Optional[Text], bool, bool) -> FileSystem + tree = None + if cache_root is None: +- cache_root = os.path.join(tests_root, u".wptcache") ++ cache_root = os.path.join(tests_root, ".wptcache") + if not os.path.exists(cache_root): + try: + os.makedirs(cache_root) +@@ -175,7 +175,7 @@ class CacheFile(with_metaclass(abc.ABCMeta)): + + + class MtimeCache(CacheFile): +- file_name = u"mtime.json" ++ file_name = "mtime.json" + + def __init__(self, cache_root, tests_root, manifest_path, rebuild=False): + # type: (Text, Text, Text, bool) -> None +@@ -196,12 +196,12 @@ class MtimeCache(CacheFile): + + def check_valid(self, data): + # type: (Dict[Any, Any]) -> Dict[Any, Any] +- if data.get(u"/tests_root") != self.tests_root: ++ if data.get("/tests_root") != self.tests_root: + self.modified = True + else: + if self.manifest_path is not None and os.path.exists(self.manifest_path): + mtime = os.path.getmtime(self.manifest_path) +- if data.get(u"/manifest_path") != [self.manifest_path, mtime]: ++ if data.get("/manifest_path") != [self.manifest_path, mtime]: + self.modified = True + else: + self.modified = True +@@ -229,10 +229,10 @@ class GitIgnoreCache(CacheFile, MutableMapping): # type: ignore + # type: (Dict[Any, Any]) -> Dict[Any, Any] + ignore_path = os.path.join(self.tests_root, ".gitignore") + mtime = os.path.getmtime(ignore_path) +- if data.get(u"/gitignore_file") != [ignore_path, mtime]: ++ if data.get("/gitignore_file") != [ignore_path, mtime]: + self.modified = True + data = {} +- data[u"/gitignore_file"] = [ignore_path, mtime] ++ data["/gitignore_file"] = [ignore_path, mtime] + return data + + def __contains__(self, key): +diff --git a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/serve/serve.py b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/serve/serve.py +index 8243acc5e..27ed9891c 100644 +--- a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/serve/serve.py ++++ b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/serve/serve.py +@@ -1,6 +1,6 @@ + # -*- coding: utf-8 -*- + +-from __future__ import print_function ++ + + import abc + import argparse +@@ -179,7 +179,7 @@ class HtmlWrapperHandler(WrapperHandler): + + def check_exposure(self, request): + if self.global_type: +- globals = u"" ++ globals = "" + for (key, value) in self._get_metadata(request): + if key == "global": + globals = value +@@ -342,7 +342,7 @@ class RoutesBuilder(object): + # Using reversed here means that mount points that are added later + # get higher priority. This makes sense since / is typically added + # first. +- for item in reversed(self.mountpoint_routes.values()): ++ for item in reversed(list(self.mountpoint_routes.values())): + routes.extend(item) + return routes + +@@ -526,7 +526,7 @@ def make_hosts_file(config, host): + + def start_servers(host, ports, paths, routes, bind_address, config, **kwargs): + servers = defaultdict(list) +- for scheme, ports in ports.items(): ++ for scheme, ports in list(ports.items()): + assert len(ports) == {"http": 2, "https": 2}.get(scheme, 1) + + # If trying to start HTTP/2.0 server, check compatibility +@@ -788,25 +788,25 @@ def start(config, routes, **kwargs): + + + def iter_procs(servers): +- for servers in servers.values(): ++ for servers in list(servers.values()): + for port, server in servers: + yield server.proc + + + def _make_subdomains_product(s, depth=2): +- return {u".".join(x) for x in chain(*(product(s, repeat=i) for i in range(1, depth+1)))} ++ return {".".join(x) for x in chain(*(product(s, repeat=i) for i in range(1, depth+1)))} + + def _make_origin_policy_subdomains(limit): +- return {u"op%d" % x for x in range(1,limit+1)} ++ return {"op%d" % x for x in range(1,limit+1)} + + +-_subdomains = {u"www", +- u"www1", +- u"www2", +- u"天気の良い日", +- u"élève"} ++_subdomains = {"www", ++ "www1", ++ "www2", ++ "天気の良い日", ++ "élève"} + +-_not_subdomains = {u"nonexistent"} ++_not_subdomains = {"nonexistent"} + + _subdomains = _make_subdomains_product(_subdomains) + +diff --git a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/third_party/atomicwrites/atomicwrites/__init__.py b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/third_party/atomicwrites/atomicwrites/__init__.py +index a182c07af..857149069 100644 +--- a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/third_party/atomicwrites/atomicwrites/__init__.py ++++ b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/third_party/atomicwrites/atomicwrites/__init__.py +@@ -13,7 +13,7 @@ __version__ = '1.1.5' + + PY2 = sys.version_info[0] == 2 + +-text_type = unicode if PY2 else str # noqa ++text_type = str if PY2 else str # noqa + + + def _path_to_unicode(x): +diff --git a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/third_party/enum/enum/__init__.py b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/third_party/enum/enum/__init__.py +index d6ffb3a40..1107b38ed 100644 +--- a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/third_party/enum/enum/__init__.py ++++ b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/third_party/enum/enum/__init__.py +@@ -23,17 +23,17 @@ except ImportError: + OrderedDict = None + + try: +- basestring ++ str + except NameError: + # In Python 2 basestring is the ancestor of both str and unicode + # in Python 3 it's just str, but was missing in 3.1 +- basestring = str ++ str = str + + try: +- unicode ++ str + except NameError: + # In Python 3 unicode no longer exists (it's just str) +- unicode = str ++ str = str + + class _RouteClassAttributeToGetattr(object): + """Route attribute access on a class to __getattr__. +@@ -158,7 +158,7 @@ class EnumMeta(type): + if type(classdict) is dict: + original_dict = classdict + classdict = _EnumDict() +- for k, v in original_dict.items(): ++ for k, v in list(original_dict.items()): + classdict[k] = v + + member_type, first_enum = metacls._get_mixins_(bases) +@@ -175,7 +175,7 @@ class EnumMeta(type): + if _order_ is None: + if pyver < 3.0: + try: +- _order_ = [name for (name, value) in sorted(members.items(), key=lambda item: item[1])] ++ _order_ = [name for (name, value) in sorted(list(members.items()), key=lambda item: item[1])] + except TypeError: + _order_ = [name for name in sorted(members.keys())] + else: +@@ -236,7 +236,7 @@ class EnumMeta(type): + enum_member.__init__(*args) + # If another member with the same value was already defined, the + # new member becomes an alias to the existing one. +- for name, canonical_member in enum_class._member_map_.items(): ++ for name, canonical_member in list(enum_class._member_map_.items()): + if canonical_member.value == enum_member._value_: + enum_member = canonical_member + break +@@ -433,7 +433,7 @@ class EnumMeta(type): + """ + if pyver < 3.0: + # if class_name is unicode, attempt a conversion to ASCII +- if isinstance(class_name, unicode): ++ if isinstance(class_name, str): + try: + class_name = class_name.encode('ascii') + except UnicodeEncodeError: +@@ -447,22 +447,22 @@ class EnumMeta(type): + _order_ = [] + + # special processing needed for names? +- if isinstance(names, basestring): ++ if isinstance(names, str): + names = names.replace(',', ' ').split() +- if isinstance(names, (tuple, list)) and isinstance(names[0], basestring): ++ if isinstance(names, (tuple, list)) and isinstance(names[0], str): + names = [(e, i+start) for (i, e) in enumerate(names)] + + # Here, names is either an iterable of (name, value) or a mapping. + item = None # in case names is empty + for item in names: +- if isinstance(item, basestring): ++ if isinstance(item, str): + member_name, member_value = item, names[item] + else: + member_name, member_value = item + classdict[member_name] = member_value + _order_.append(member_name) + # only set _order_ in classdict if name/value was not from a mapping +- if not isinstance(item, basestring): ++ if not isinstance(item, str): + classdict['_order_'] = ' '.join(_order_) + enum_class = metacls.__new__(metacls, class_name, bases, classdict) + +@@ -656,7 +656,7 @@ def __new__(cls, value): + return cls._value2member_map_[value] + except TypeError: + # not there, now do long search -- O(n) behavior +- for member in cls._member_map_.values(): ++ for member in list(cls._member_map_.values()): + if member.value == value: + return member + raise ValueError("%s is not a valid %s" % (value, cls.__name__)) +@@ -800,7 +800,7 @@ def _convert(cls, name, module, filter, source=None): + source = vars(source) + else: + source = module_globals +- members = dict((name, value) for name, value in source.items() if filter(name)) ++ members = dict((name, value) for name, value in list(source.items()) if list(filter(name))) + cls = cls(name, members, module=module) + cls.__reduce_ex__ = _reduce_ex_by_name + module_globals.update(cls.__members__) +@@ -824,7 +824,7 @@ def _reduce_ex_by_name(self, proto): + def unique(enumeration): + """Class decorator that ensures only unique members exist in an enumeration.""" + duplicates = [] +- for name, member in enumeration.__members__.items(): ++ for name, member in list(enumeration.__members__.items()): + if name != member.name: + duplicates.append((name, member.name)) + if duplicates: +diff --git a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/third_party/h2/h2/connection.py b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/third_party/h2/h2/connection.py +index 4405183e8..c236f565d 100644 +--- a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/third_party/h2/h2/connection.py ++++ b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/third_party/h2/h2/connection.py +@@ -393,7 +393,7 @@ class H2Connection(object): + count = 0 + to_delete = [] + +- for stream_id, stream in self.streams.items(): ++ for stream_id, stream in list(self.streams.items()): + if stream.open and (stream_id % 2 == remainder): + count += 1 + elif stream.closed: +@@ -469,7 +469,7 @@ class H2Connection(object): + s.max_outbound_frame_size = self.max_outbound_frame_size + + self.streams[stream_id] = s +- self.config.logger.debug("Current streams: %s", self.streams.keys()) ++ self.config.logger.debug("Current streams: %s", list(self.streams.keys())) + + if outbound: + self.highest_outbound_stream_id = stream_id +@@ -491,7 +491,7 @@ class H2Connection(object): + preamble = b'' + + f = SettingsFrame(0) +- for setting, value in self.local_settings.items(): ++ for setting, value in list(self.local_settings.items()): + f.settings[setting] = value + self.config.logger.debug( + "Send Settings frame: %s", self.local_settings +@@ -542,7 +542,7 @@ class H2Connection(object): + + if self.config.client_side: + f = SettingsFrame(0) +- for setting, value in self.local_settings.items(): ++ for setting, value in list(self.local_settings.items()): + f.settings[setting] = value + + frame_data = f.serialize_body() +@@ -1393,7 +1393,7 @@ class H2Connection(object): + if SettingCodes.MAX_FRAME_SIZE in changes: + setting = changes[SettingCodes.MAX_FRAME_SIZE] + self.max_outbound_frame_size = setting.new_value +- for stream in self.streams.values(): ++ for stream in list(self.streams.values()): + stream.max_outbound_frame_size = setting.new_value + + f = SettingsFrame(0) +@@ -1412,7 +1412,7 @@ class H2Connection(object): + """ + delta = new_value - old_value + +- for stream in self.streams.values(): ++ for stream in list(self.streams.values()): + stream.outbound_flow_control_window = guard_increment_window( + stream.outbound_flow_control_window, + delta +@@ -1428,7 +1428,7 @@ class H2Connection(object): + """ + delta = new_value - old_value + +- for stream in self.streams.values(): ++ for stream in list(self.streams.values()): + stream._inbound_flow_control_change_from_settings(delta) + + def receive_data(self, data): +diff --git a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/third_party/h2/h2/events.py b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/third_party/h2/h2/events.py +index ff3ec3df3..7038ce6c0 100644 +--- a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/third_party/h2/h2/events.py ++++ b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/third_party/h2/h2/events.py +@@ -342,7 +342,7 @@ class RemoteSettingsChanged(Event): + the form of a dictionary of ``{setting: value}``. + """ + e = cls() +- for setting, new_value in new_settings.items(): ++ for setting, new_value in list(new_settings.items()): + setting = _setting_code_from_int(setting) + original_value = old_settings.get(setting) + change = ChangedSetting(setting, original_value, new_value) +@@ -352,7 +352,7 @@ class RemoteSettingsChanged(Event): + + def __repr__(self): + return "" % ( +- ", ".join(repr(cs) for cs in self.changed_settings.values()), ++ ", ".join(repr(cs) for cs in list(self.changed_settings.values())), + ) + + +@@ -455,7 +455,7 @@ class SettingsAcknowledged(Event): + + def __repr__(self): + return "" % ( +- ", ".join(repr(cs) for cs in self.changed_settings.values()), ++ ", ".join(repr(cs) for cs in list(self.changed_settings.values())), + ) + + +diff --git a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/third_party/h2/h2/frame_buffer.py b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/third_party/h2/h2/frame_buffer.py +index e79f6ec2d..06031f7da 100644 +--- a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/third_party/h2/h2/frame_buffer.py ++++ b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/third_party/h2/h2/frame_buffer.py +@@ -130,7 +130,7 @@ class FrameBuffer(object): + def __iter__(self): + return self + +- def next(self): # Python 2 ++ def __next__(self): # Python 2 + # First, check that we have enough data to successfully parse the + # next frame header. If not, bail. Otherwise, parse it. + if len(self.data) < 9: +@@ -169,7 +169,7 @@ class FrameBuffer(object): + # frame in the sequence instead. Recurse back into ourselves to do + # that. This is safe because the amount of work we have to do here is + # strictly bounded by the length of the buffer. +- return f if f is not None else self.next() ++ return f if f is not None else next(self) + + def __next__(self): # Python 3 +- return self.next() ++ return next(self) +diff --git a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/third_party/h2/h2/settings.py b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/third_party/h2/h2/settings.py +index 3da720329..28ea04568 100644 +--- a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/third_party/h2/h2/settings.py ++++ b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/third_party/h2/h2/settings.py +@@ -137,7 +137,7 @@ class Settings(collections.MutableMapping): + SettingCodes.MAX_FRAME_SIZE: collections.deque([16384]), + } + if initial_values is not None: +- for key, value in initial_values.items(): ++ for key, value in list(initial_values.items()): + invalid = _validate_setting(key, value) + if invalid: + raise InvalidSettingsValueError( +@@ -157,7 +157,7 @@ class Settings(collections.MutableMapping): + + # If there is more than one setting in the list, we have a setting + # value outstanding. Update them. +- for k, v in self._settings.items(): ++ for k, v in list(self._settings.items()): + if len(v) > 1: + old_setting = v.popleft() + new_setting = v[0] +diff --git a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/third_party/h2/h2/utilities.py b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/third_party/h2/h2/utilities.py +index 0cff0fa67..4a2b7fb9c 100644 +--- a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/third_party/h2/h2/utilities.py ++++ b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/third_party/h2/h2/utilities.py +@@ -19,45 +19,45 @@ UPPER_RE = re.compile(b"[A-Z]") + # A set of headers that are hop-by-hop or connection-specific and thus + # forbidden in HTTP/2. This list comes from RFC 7540 § 8.1.2.2. + CONNECTION_HEADERS = frozenset([ +- b'connection', u'connection', +- b'proxy-connection', u'proxy-connection', +- b'keep-alive', u'keep-alive', +- b'transfer-encoding', u'transfer-encoding', +- b'upgrade', u'upgrade', ++ b'connection', 'connection', ++ b'proxy-connection', 'proxy-connection', ++ b'keep-alive', 'keep-alive', ++ b'transfer-encoding', 'transfer-encoding', ++ b'upgrade', 'upgrade', + ]) + + + _ALLOWED_PSEUDO_HEADER_FIELDS = frozenset([ +- b':method', u':method', +- b':scheme', u':scheme', +- b':authority', u':authority', +- b':path', u':path', +- b':status', u':status', ++ b':method', ':method', ++ b':scheme', ':scheme', ++ b':authority', ':authority', ++ b':path', ':path', ++ b':status', ':status', + ]) + + + _SECURE_HEADERS = frozenset([ + # May have basic credentials which are vulnerable to dictionary attacks. +- b'authorization', u'authorization', +- b'proxy-authorization', u'proxy-authorization', ++ b'authorization', 'authorization', ++ b'proxy-authorization', 'proxy-authorization', + ]) + + + _REQUEST_ONLY_HEADERS = frozenset([ +- b':scheme', u':scheme', +- b':path', u':path', +- b':authority', u':authority', +- b':method', u':method' ++ b':scheme', ':scheme', ++ b':path', ':path', ++ b':authority', ':authority', ++ b':method', ':method' + ]) + + +-_RESPONSE_ONLY_HEADERS = frozenset([b':status', u':status']) ++_RESPONSE_ONLY_HEADERS = frozenset([b':status', ':status']) + + + if sys.version_info[0] == 2: # Python 2.X + _WHITESPACE = frozenset(whitespace) + else: # Python 3.3+ +- _WHITESPACE = frozenset(map(ord, whitespace)) ++ _WHITESPACE = frozenset(list(map(ord, whitespace))) + + + def _secure_headers(headers, hdr_validation_flags): +@@ -81,7 +81,7 @@ def _secure_headers(headers, hdr_validation_flags): + for header in headers: + if header[0] in _SECURE_HEADERS: + yield NeverIndexedHeaderTuple(*header) +- elif header[0] in (b'cookie', u'cookie') and len(header[1]) < 20: ++ elif header[0] in (b'cookie', 'cookie') and len(header[1]) < 20: + yield NeverIndexedHeaderTuple(*header) + else: + yield header +@@ -92,7 +92,7 @@ def extract_method_header(headers): + Extracts the request method from the headers list. + """ + for k, v in headers: +- if k in (b':method', u':method'): ++ if k in (b':method', ':method'): + if not isinstance(v, bytes): + return v.encode('utf-8') + else: +@@ -116,9 +116,9 @@ def is_informational_response(headers): + status = b':status' + informational_start = b'1' + else: +- sigil = u':' +- status = u':status' +- informational_start = u'1' ++ sigil = ':' ++ status = ':status' ++ informational_start = '1' + + # If we find a non-special header, we're done here: stop looping. + if not n.startswith(sigil): +@@ -173,7 +173,7 @@ def authority_from_headers(headers): + # This gets run against headers that come both from HPACK and from the + # user, so we may have unicode floating around in here. We only want + # bytes. +- if n in (b':authority', u':authority'): ++ if n in (b':authority', ':authority'): + return v.encode('utf-8') if not isinstance(v, bytes) else v + + return None +@@ -266,8 +266,8 @@ def _reject_te(headers, hdr_validation_flags): + its value is anything other than "trailers". + """ + for header in headers: +- if header[0] in (b'te', u'te'): +- if header[1].lower() not in (b'trailers', u'trailers'): ++ if header[0] in (b'te', 'te'): ++ if header[1].lower() not in (b'trailers', 'trailers'): + raise ProtocolError( + "Invalid value for Transfer-Encoding header: %s" % + header[1] +@@ -325,7 +325,7 @@ def _reject_pseudo_header_fields(headers, hdr_validation_flags): + seen_regular_header = False + + for header in headers: +- if _custom_startswith(header[0], b':', u':'): ++ if _custom_startswith(header[0], b':', ':'): + if header[0] in seen_pseudo_header_fields: + raise ProtocolError( + "Received duplicate pseudo-header field %s" % header[0] +@@ -374,7 +374,7 @@ def _check_pseudo_header_field_acceptability(pseudo_headers, + # Relevant RFC section: RFC 7540 § 8.1.2.4 + # https://tools.ietf.org/html/rfc7540#section-8.1.2.4 + if hdr_validation_flags.is_response_header: +- _assert_header_in_set(u':status', b':status', pseudo_headers) ++ _assert_header_in_set(':status', b':status', pseudo_headers) + invalid_response_headers = pseudo_headers & _REQUEST_ONLY_HEADERS + if invalid_response_headers: + raise ProtocolError( +@@ -385,9 +385,9 @@ def _check_pseudo_header_field_acceptability(pseudo_headers, + not hdr_validation_flags.is_trailer): + # This is a request, so we need to have seen :path, :method, and + # :scheme. +- _assert_header_in_set(u':path', b':path', pseudo_headers) +- _assert_header_in_set(u':method', b':method', pseudo_headers) +- _assert_header_in_set(u':scheme', b':scheme', pseudo_headers) ++ _assert_header_in_set(':path', b':path', pseudo_headers) ++ _assert_header_in_set(':method', b':method', pseudo_headers) ++ _assert_header_in_set(':scheme', b':scheme', pseudo_headers) + invalid_request_headers = pseudo_headers & _RESPONSE_ONLY_HEADERS + if invalid_request_headers: + raise ProtocolError( +@@ -417,9 +417,9 @@ def _validate_host_authority_header(headers): + host_header_val = None + + for header in headers: +- if header[0] in (b':authority', u':authority'): ++ if header[0] in (b':authority', ':authority'): + authority_header_val = header[1] +- elif header[0] in (b'host', u'host'): ++ elif header[0] in (b'host', 'host'): + host_header_val = header[1] + + yield header +@@ -472,7 +472,7 @@ def _check_path_header(headers, hdr_validation_flags): + """ + def inner(): + for header in headers: +- if header[0] in (b':path', u':path'): ++ if header[0] in (b':path', ':path'): + if not header[1]: + raise ProtocolError("An empty :path header is forbidden") + +diff --git a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/third_party/h2/h2/windows.py b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/third_party/h2/h2/windows.py +index 6656975f4..9ae3e60f5 100644 +--- a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/third_party/h2/h2/windows.py ++++ b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/third_party/h2/h2/windows.py +@@ -12,7 +12,7 @@ the user has already used. It then implements a basic algorithm that attempts + to manage the flow control window without user input, trying to ensure that it + does not emit too many WINDOW_UPDATE frames. + """ +-from __future__ import division ++ + + from .exceptions import FlowControlError + +diff --git a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/third_party/hpack/hpack/compat.py b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/third_party/hpack/hpack/compat.py +index 4fcaad439..3b4feca57 100644 +--- a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/third_party/hpack/hpack/compat.py ++++ b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/third_party/hpack/hpack/compat.py +@@ -25,7 +25,7 @@ if is_py2: + else: + return bytes(b) + +- unicode = unicode # noqa ++ str = str # noqa + bytes = str + + elif is_py3: +@@ -38,5 +38,5 @@ elif is_py3: + def to_bytes(b): + return bytes(b) + +- unicode = str ++ str = str + bytes = bytes +diff --git a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/third_party/hpack/hpack/hpack.py b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/third_party/hpack/hpack/hpack.py +index f8e808bec..773b36567 100644 +--- a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/third_party/hpack/hpack/hpack.py ++++ b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/third_party/hpack/hpack/hpack.py +@@ -31,9 +31,9 @@ INDEX_INCREMENTAL = b'\x40' + _PREFIX_BIT_MAX_NUMBERS = [(2 ** i) - 1 for i in range(9)] + + try: # pragma: no cover +- basestring = basestring ++ str = str + except NameError: # pragma: no cover +- basestring = (str, bytes) ++ str = (str, bytes) + + + # We default the maximum header list we're willing to accept to 64kB. That's a +@@ -137,7 +137,7 @@ def _dict_to_iterable(header_dict): + """ + assert isinstance(header_dict, dict) + keys = sorted( +- header_dict.keys(), ++ list(header_dict.keys()), + key=lambda k: not _to_bytes(k).startswith(b':') + ) + for key in keys: +@@ -148,7 +148,7 @@ def _to_bytes(string): + """ + Convert string to bytes. + """ +- if not isinstance(string, basestring): # pragma: no cover ++ if not isinstance(string, str): # pragma: no cover + string = str(string) + + return string if isinstance(string, bytes) else string.encode('utf-8') +diff --git a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/third_party/html5lib/html5lib/__init__.py b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/third_party/html5lib/html5lib/__init__.py +index 7b854f990..c46b26b7b 100644 +--- a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/third_party/html5lib/html5lib/__init__.py ++++ b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/third_party/html5lib/html5lib/__init__.py +@@ -20,7 +20,7 @@ For convenience, this module re-exports the following names: + * :func:`~.serializer.serialize` + """ + +-from __future__ import absolute_import, division, unicode_literals ++ + + from .html5parser import HTMLParser, parse, parseFragment + from .treebuilders import getTreeBuilder +diff --git a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/third_party/html5lib/html5lib/_ihatexml.py b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/third_party/html5lib/html5lib/_ihatexml.py +index 3ff803c19..a35e057ff 100644 +--- a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/third_party/html5lib/html5lib/_ihatexml.py ++++ b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/third_party/html5lib/html5lib/_ihatexml.py +@@ -1,4 +1,4 @@ +-from __future__ import absolute_import, division, unicode_literals ++ + + import re + import warnings +diff --git a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/third_party/html5lib/html5lib/_inputstream.py b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/third_party/html5lib/html5lib/_inputstream.py +index 0207dd211..593f8702d 100644 +--- a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/third_party/html5lib/html5lib/_inputstream.py ++++ b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/third_party/html5lib/html5lib/_inputstream.py +@@ -1,4 +1,4 @@ +-from __future__ import absolute_import, division, unicode_literals ++ + + from six import text_type + from six.moves import http_client, urllib +@@ -598,7 +598,7 @@ class EncodingBytes(bytes): + raise TypeError + return self[p:p + 1] + +- def next(self): ++ def __next__(self): + # Py2 compat + return self.__next__() + +diff --git a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/third_party/html5lib/html5lib/_tokenizer.py b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/third_party/html5lib/html5lib/_tokenizer.py +index 4748a1979..34054e105 100644 +--- a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/third_party/html5lib/html5lib/_tokenizer.py ++++ b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/third_party/html5lib/html5lib/_tokenizer.py +@@ -1,6 +1,6 @@ +-from __future__ import absolute_import, division, unicode_literals + +-from six import unichr as chr ++ ++from six import chr as chr + + from collections import deque, OrderedDict + from sys import version_info +diff --git a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/third_party/html5lib/html5lib/_trie/__init__.py b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/third_party/html5lib/html5lib/_trie/__init__.py +index 07bad5d31..0c62e6903 100644 +--- a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/third_party/html5lib/html5lib/_trie/__init__.py ++++ b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/third_party/html5lib/html5lib/_trie/__init__.py +@@ -1,4 +1,4 @@ +-from __future__ import absolute_import, division, unicode_literals ++ + + from .py import Trie + +diff --git a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/third_party/html5lib/html5lib/_trie/_base.py b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/third_party/html5lib/html5lib/_trie/_base.py +index 6b71975f0..772ca80e5 100644 +--- a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/third_party/html5lib/html5lib/_trie/_base.py ++++ b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/third_party/html5lib/html5lib/_trie/_base.py +@@ -1,4 +1,4 @@ +-from __future__ import absolute_import, division, unicode_literals ++ + + try: + from collections.abc import Mapping +@@ -11,7 +11,7 @@ class Trie(Mapping): + + def keys(self, prefix=None): + # pylint:disable=arguments-differ +- keys = super(Trie, self).keys() ++ keys = list(super(Trie, self).keys()) + + if prefix is None: + return set(keys) +@@ -19,7 +19,7 @@ class Trie(Mapping): + return {x for x in keys if x.startswith(prefix)} + + def has_keys_with_prefix(self, prefix): +- for key in self.keys(): ++ for key in list(self.keys()): + if key.startswith(prefix): + return True + +diff --git a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/third_party/html5lib/html5lib/_trie/py.py b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/third_party/html5lib/html5lib/_trie/py.py +index c2ba3da75..c6c69cdbb 100644 +--- a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/third_party/html5lib/html5lib/_trie/py.py ++++ b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/third_party/html5lib/html5lib/_trie/py.py +@@ -1,4 +1,4 @@ +-from __future__ import absolute_import, division, unicode_literals ++ + from six import text_type + + from bisect import bisect_left +@@ -8,7 +8,7 @@ from ._base import Trie as ABCTrie + + class Trie(ABCTrie): + def __init__(self, data): +- if not all(isinstance(x, text_type) for x in data.keys()): ++ if not all(isinstance(x, text_type) for x in list(data.keys())): + raise TypeError("All keys must be strings") + + self._data = data +diff --git a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/third_party/html5lib/html5lib/_utils.py b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/third_party/html5lib/html5lib/_utils.py +index 9ea579421..efa2f7537 100644 +--- a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/third_party/html5lib/html5lib/_utils.py ++++ b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/third_party/html5lib/html5lib/_utils.py +@@ -1,4 +1,4 @@ +-from __future__ import absolute_import, division, unicode_literals ++ + + from types import ModuleType + +diff --git a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/third_party/html5lib/html5lib/constants.py b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/third_party/html5lib/html5lib/constants.py +index fe3e237cd..c14c3be2a 100644 +--- a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/third_party/html5lib/html5lib/constants.py ++++ b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/third_party/html5lib/html5lib/constants.py +@@ -1,4 +1,4 @@ +-from __future__ import absolute_import, division, unicode_literals ++ + + import string + +@@ -520,7 +520,7 @@ adjustForeignAttributes = { + } + + unadjustForeignAttributes = {(ns, local): qname for qname, (prefix, local, ns) in +- adjustForeignAttributes.items()} ++ list(adjustForeignAttributes.items())} + + spaceCharacters = frozenset([ + "\t", +@@ -2933,7 +2933,7 @@ tagTokenTypes = frozenset([tokenTypes["StartTag"], tokenTypes["EndTag"], + tokenTypes["EmptyTag"]]) + + +-prefixes = {v: k for k, v in namespaces.items()} ++prefixes = {v: k for k, v in list(namespaces.items())} + prefixes["http://www.w3.org/1998/Math/MathML"] = "math" + + +diff --git a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/third_party/html5lib/html5lib/filters/alphabeticalattributes.py b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/third_party/html5lib/html5lib/filters/alphabeticalattributes.py +index 5ba926e3b..33c8e6bb4 100644 +--- a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/third_party/html5lib/html5lib/filters/alphabeticalattributes.py ++++ b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/third_party/html5lib/html5lib/filters/alphabeticalattributes.py +@@ -1,4 +1,4 @@ +-from __future__ import absolute_import, division, unicode_literals ++ + + from . import base + +@@ -22,7 +22,7 @@ class Filter(base.Filter): + for token in base.Filter.__iter__(self): + if token["type"] in ("StartTag", "EmptyTag"): + attrs = OrderedDict() +- for name, value in sorted(token["data"].items(), ++ for name, value in sorted(list(token["data"].items()), + key=_attr_key): + attrs[name] = value + token["data"] = attrs +diff --git a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/third_party/html5lib/html5lib/filters/base.py b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/third_party/html5lib/html5lib/filters/base.py +index c7dbaed0f..75b654649 100644 +--- a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/third_party/html5lib/html5lib/filters/base.py ++++ b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/third_party/html5lib/html5lib/filters/base.py +@@ -1,4 +1,4 @@ +-from __future__ import absolute_import, division, unicode_literals ++ + + + class Filter(object): +diff --git a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/third_party/html5lib/html5lib/filters/inject_meta_charset.py b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/third_party/html5lib/html5lib/filters/inject_meta_charset.py +index aefb5c842..ad082fe13 100644 +--- a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/third_party/html5lib/html5lib/filters/inject_meta_charset.py ++++ b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/third_party/html5lib/html5lib/filters/inject_meta_charset.py +@@ -1,4 +1,4 @@ +-from __future__ import absolute_import, division, unicode_literals ++ + + from . import base + +@@ -31,7 +31,7 @@ class Filter(base.Filter): + if token["name"].lower() == "meta": + # replace charset with actual encoding + has_http_equiv_content_type = False +- for (namespace, name), value in token["data"].items(): ++ for (namespace, name), value in list(token["data"].items()): + if namespace is not None: + continue + elif name.lower() == 'charset': +diff --git a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/third_party/html5lib/html5lib/filters/lint.py b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/third_party/html5lib/html5lib/filters/lint.py +index acd4d7a2a..a655d6d13 100644 +--- a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/third_party/html5lib/html5lib/filters/lint.py ++++ b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/third_party/html5lib/html5lib/filters/lint.py +@@ -1,4 +1,4 @@ +-from __future__ import absolute_import, division, unicode_literals ++ + + from six import text_type + +@@ -44,7 +44,7 @@ class Filter(base.Filter): + assert type == "StartTag" + if type == "StartTag" and self.require_matching_tags: + open_elements.append((namespace, name)) +- for (namespace, name), value in token["data"].items(): ++ for (namespace, name), value in list(token["data"].items()): + assert namespace is None or isinstance(namespace, text_type) + assert namespace != "" + assert isinstance(name, text_type) +diff --git a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/third_party/html5lib/html5lib/filters/optionaltags.py b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/third_party/html5lib/html5lib/filters/optionaltags.py +index 4a865012c..6bd9392a1 100644 +--- a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/third_party/html5lib/html5lib/filters/optionaltags.py ++++ b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/third_party/html5lib/html5lib/filters/optionaltags.py +@@ -1,4 +1,4 @@ +-from __future__ import absolute_import, division, unicode_literals ++ + + from . import base + +diff --git a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/third_party/html5lib/html5lib/filters/sanitizer.py b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/third_party/html5lib/html5lib/filters/sanitizer.py +index 70ef90665..49a29f8f1 100644 +--- a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/third_party/html5lib/html5lib/filters/sanitizer.py ++++ b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/third_party/html5lib/html5lib/filters/sanitizer.py +@@ -6,7 +6,7 @@ is recommended as a replacement. Please let us know in the aforementioned issue + if Bleach is unsuitable for your needs. + + """ +-from __future__ import absolute_import, division, unicode_literals ++ + + import re + import warnings +@@ -873,7 +873,7 @@ class Filter(base.Filter): + elif token["data"]: + assert token_type in ("StartTag", "EmptyTag") + attrs = [] +- for (ns, name), v in token["data"].items(): ++ for (ns, name), v in list(token["data"].items()): + attrs.append(' %s="%s"' % (name if ns is None else "%s:%s" % (prefixes[ns], name), escape(v))) + token["data"] = "<%s%s>" % (token["name"], ''.join(attrs)) + else: +diff --git a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/third_party/html5lib/html5lib/filters/whitespace.py b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/third_party/html5lib/html5lib/filters/whitespace.py +index 0d12584b4..07f902683 100644 +--- a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/third_party/html5lib/html5lib/filters/whitespace.py ++++ b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/third_party/html5lib/html5lib/filters/whitespace.py +@@ -1,4 +1,4 @@ +-from __future__ import absolute_import, division, unicode_literals ++ + + import re + +diff --git a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/third_party/html5lib/html5lib/html5parser.py b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/third_party/html5lib/html5lib/html5parser.py +index 74d829d98..5ef82239f 100644 +--- a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/third_party/html5lib/html5lib/html5parser.py ++++ b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/third_party/html5lib/html5lib/html5parser.py +@@ -1,4 +1,4 @@ +-from __future__ import absolute_import, division, unicode_literals ++ + from six import with_metaclass, viewkeys + + import types +@@ -74,7 +74,7 @@ def parseFragment(doc, container="div", treebuilder="etree", namespaceHTMLElemen + def method_decorator_metaclass(function): + class Decorated(type): + def __new__(meta, classname, bases, classDict): +- for attributeName, attribute in classDict.items(): ++ for attributeName, attribute in list(classDict.items()): + if isinstance(attribute, types.FunctionType): + attribute = function(attribute) + +@@ -119,7 +119,7 @@ class HTMLParser(object): + self.errors = [] + + self.phases = {name: cls(self, self.tree) for name, cls in +- getPhases(debug).items()} ++ list(getPhases(debug).items())} + + def _parse(self, stream, innerHTML=False, container="div", scripting=False, **kwargs): + +@@ -397,7 +397,7 @@ class HTMLParser(object): + def getPhases(debug): + def log(function): + """Logger that records which phase processes each token""" +- type_names = {value: key for key, value in tokenTypes.items()} ++ type_names = {value: key for key, value in list(tokenTypes.items())} + + def wrapped(self, *args, **kwargs): + if function.__name__.startswith("process") and len(args) > 0: +@@ -473,7 +473,7 @@ def getPhases(debug): + self.parser.parseError("non-html-root") + # XXX Need a check here to see if the first start tag token emitted is + # this token... If it's not, invoke self.parser.parseError(). +- for attr, value in token["data"].items(): ++ for attr, value in list(token["data"].items()): + if attr not in self.tree.openElements[0].attributes: + self.tree.openElements[0].attributes[attr] = value + self.parser.firstStartTag = False +@@ -1020,7 +1020,7 @@ def getPhases(debug): + assert self.parser.innerHTML + else: + self.parser.framesetOK = False +- for attr, value in token["data"].items(): ++ for attr, value in list(token["data"].items()): + if attr not in self.tree.openElements[1].attributes: + self.tree.openElements[1].attributes[attr] = value + +@@ -2779,7 +2779,7 @@ def adjust_attributes(token, replacements): + needs_adjustment = viewkeys(token['data']) & viewkeys(replacements) + if needs_adjustment: + token['data'] = type(token['data'])((replacements.get(k, k), v) +- for k, v in token['data'].items()) ++ for k, v in list(token['data'].items())) + + + def impliedTagToken(name, type="EndTag", attributes=None, +diff --git a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/third_party/html5lib/html5lib/serializer.py b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/third_party/html5lib/html5lib/serializer.py +index c66df6839..f031bcaf7 100644 +--- a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/third_party/html5lib/html5lib/serializer.py ++++ b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/third_party/html5lib/html5lib/serializer.py +@@ -1,4 +1,4 @@ +-from __future__ import absolute_import, division, unicode_literals ++ + from six import text_type + + import re +@@ -298,7 +298,7 @@ class HTMLSerializer(object): + in_cdata = True + elif in_cdata: + self.serializeError("Unexpected child element of a CDATA element") +- for (_, attr_name), attr_value in token["data"].items(): ++ for (_, attr_name), attr_value in list(token["data"].items()): + # TODO: Add namespace support here + k = attr_name + v = attr_value +diff --git a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/third_party/html5lib/html5lib/treeadapters/__init__.py b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/third_party/html5lib/html5lib/treeadapters/__init__.py +index dfeb0ba5e..8ba44752a 100644 +--- a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/third_party/html5lib/html5lib/treeadapters/__init__.py ++++ b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/third_party/html5lib/html5lib/treeadapters/__init__.py +@@ -16,7 +16,7 @@ Example: + genshi_tree = genshi.to_genshi(TreeWalker(tree)) + + """ +-from __future__ import absolute_import, division, unicode_literals ++ + + from . import sax + +diff --git a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/third_party/html5lib/html5lib/treeadapters/genshi.py b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/third_party/html5lib/html5lib/treeadapters/genshi.py +index 61d5fb6ac..6877c9956 100644 +--- a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/third_party/html5lib/html5lib/treeadapters/genshi.py ++++ b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/third_party/html5lib/html5lib/treeadapters/genshi.py +@@ -1,4 +1,4 @@ +-from __future__ import absolute_import, division, unicode_literals ++ + + from genshi.core import QName, Attrs + from genshi.core import START, END, TEXT, COMMENT, DOCTYPE +@@ -27,7 +27,7 @@ def to_genshi(walker): + else: + name = token["name"] + attrs = Attrs([(QName("{%s}%s" % attr if attr[0] is not None else attr[1]), value) +- for attr, value in token["data"].items()]) ++ for attr, value in list(token["data"].items())]) + yield (START, (QName(name), attrs), (None, -1, -1)) + if type == "EmptyTag": + type = "EndTag" +diff --git a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/third_party/html5lib/html5lib/treeadapters/sax.py b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/third_party/html5lib/html5lib/treeadapters/sax.py +index f4ccea5a2..6f321e7ef 100644 +--- a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/third_party/html5lib/html5lib/treeadapters/sax.py ++++ b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/third_party/html5lib/html5lib/treeadapters/sax.py +@@ -1,11 +1,11 @@ +-from __future__ import absolute_import, division, unicode_literals ++ + + from xml.sax.xmlreader import AttributesNSImpl + + from ..constants import adjustForeignAttributes, unadjustForeignAttributes + + prefix_mapping = {} +-for prefix, localName, namespace in adjustForeignAttributes.values(): ++for prefix, localName, namespace in list(adjustForeignAttributes.values()): + if prefix is not None: + prefix_mapping[prefix] = namespace + +@@ -19,7 +19,7 @@ def to_sax(walker, handler): + + """ + handler.startDocument() +- for prefix, namespace in prefix_mapping.items(): ++ for prefix, namespace in list(prefix_mapping.items()): + handler.startPrefixMapping(prefix, namespace) + + for token in walker: +@@ -45,6 +45,6 @@ def to_sax(walker, handler): + else: + assert False, "Unknown token type" + +- for prefix, namespace in prefix_mapping.items(): ++ for prefix, namespace in list(prefix_mapping.items()): + handler.endPrefixMapping(prefix) + handler.endDocument() +diff --git a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/third_party/html5lib/html5lib/treebuilders/__init__.py b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/third_party/html5lib/html5lib/treebuilders/__init__.py +index d44447eaf..8628ff8a9 100644 +--- a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/third_party/html5lib/html5lib/treebuilders/__init__.py ++++ b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/third_party/html5lib/html5lib/treebuilders/__init__.py +@@ -29,7 +29,7 @@ implement several things: + + """ + +-from __future__ import absolute_import, division, unicode_literals ++ + + from .._utils import default_etree + +diff --git a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/third_party/html5lib/html5lib/treebuilders/base.py b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/third_party/html5lib/html5lib/treebuilders/base.py +index e4a3d710d..73b49800e 100644 +--- a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/third_party/html5lib/html5lib/treebuilders/base.py ++++ b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/third_party/html5lib/html5lib/treebuilders/base.py +@@ -1,4 +1,4 @@ +-from __future__ import absolute_import, division, unicode_literals ++ + from six import text_type + + from ..constants import scopingElements, tableInsertModeElements, namespaces +@@ -45,7 +45,7 @@ class Node(object): + def __str__(self): + attributesStr = " ".join(["%s=\"%s\"" % (name, value) + for name, value in +- self.attributes.items()]) ++ list(self.attributes.items())]) + if attributesStr: + return "<%s %s>" % (self.name, attributesStr) + else: +diff --git a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/third_party/html5lib/html5lib/treebuilders/dom.py b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/third_party/html5lib/html5lib/treebuilders/dom.py +index d8b530046..740ce9d86 100644 +--- a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/third_party/html5lib/html5lib/treebuilders/dom.py ++++ b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/third_party/html5lib/html5lib/treebuilders/dom.py +@@ -1,4 +1,4 @@ +-from __future__ import absolute_import, division, unicode_literals ++ + + + try: +@@ -22,7 +22,7 @@ def getDomBuilder(DomImplementation): + self.element = element + + def __iter__(self): +- return iter(self.element.attributes.keys()) ++ return iter(list(self.element.attributes.keys())) + + def __setitem__(self, name, value): + if isinstance(name, tuple): +diff --git a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/third_party/html5lib/html5lib/treebuilders/etree.py b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/third_party/html5lib/html5lib/treebuilders/etree.py +index 086bed4ee..c9fb9e2aa 100644 +--- a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/third_party/html5lib/html5lib/treebuilders/etree.py ++++ b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/third_party/html5lib/html5lib/treebuilders/etree.py +@@ -1,4 +1,4 @@ +-from __future__ import absolute_import, division, unicode_literals ++ + # pylint:disable=protected-access + + from six import text_type +@@ -68,7 +68,7 @@ def getETreeBuilder(ElementTreeImplementation, fullTree=False): + if attributes: + # calling .items _always_ allocates, and the above truthy check is cheaper than the + # allocation on average +- for key, value in attributes.items(): ++ for key, value in list(attributes.items()): + if isinstance(key, tuple): + name = "{%s}%s" % (key[2], key[1]) + else: +@@ -236,7 +236,7 @@ def getETreeBuilder(ElementTreeImplementation, fullTree=False): + + if hasattr(element, "attrib"): + attributes = [] +- for name, value in element.attrib.items(): ++ for name, value in list(element.attrib.items()): + nsmatch = tag_regexp.match(name) + if nsmatch is not None: + ns, name = nsmatch.groups() +@@ -296,7 +296,7 @@ def getETreeBuilder(ElementTreeImplementation, fullTree=False): + else: + attr = " ".join(["%s=\"%s\"" % ( + filter.fromXmlName(name), value) +- for name, value in element.attrib.items()]) ++ for name, value in list(element.attrib.items())]) + rv.append("<%s %s>" % (element.tag, attr)) + if element.text: + rv.append(element.text) +diff --git a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/third_party/html5lib/html5lib/treebuilders/etree_lxml.py b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/third_party/html5lib/html5lib/treebuilders/etree_lxml.py +index e73de61a8..6b7db7388 100644 +--- a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/third_party/html5lib/html5lib/treebuilders/etree_lxml.py ++++ b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/third_party/html5lib/html5lib/treebuilders/etree_lxml.py +@@ -9,7 +9,7 @@ Docypes with no name + When any of these things occur, we emit a DataLossWarning + """ + +-from __future__ import absolute_import, division, unicode_literals ++ + # pylint:disable=protected-access + + import warnings +@@ -115,7 +115,7 @@ def testSerializer(element): + + if hasattr(element, "attrib"): + attributes = [] +- for name, value in element.attrib.items(): ++ for name, value in list(element.attrib.items()): + nsmatch = tag_regexp.match(name) + if nsmatch is not None: + ns, name = nsmatch.groups() +@@ -164,7 +164,7 @@ def tostring(element): + rv.append("<%s>" % (element.tag,)) + else: + attr = " ".join(["%s=\"%s\"" % (name, value) +- for name, value in element.attrib.items()]) ++ for name, value in list(element.attrib.items())]) + rv.append("<%s %s>" % (element.tag, attr)) + if element.text: + rv.append(element.text) +diff --git a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/third_party/html5lib/html5lib/treewalkers/__init__.py b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/third_party/html5lib/html5lib/treewalkers/__init__.py +index b2d3aac31..6e611a2aa 100644 +--- a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/third_party/html5lib/html5lib/treewalkers/__init__.py ++++ b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/third_party/html5lib/html5lib/treewalkers/__init__.py +@@ -8,7 +8,7 @@ implements a 'serialize' method which takes a tree as sole argument and + returns an iterator which generates tokens. + """ + +-from __future__ import absolute_import, division, unicode_literals ++ + + from .. import constants + from .._utils import default_etree +diff --git a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/third_party/html5lib/html5lib/treewalkers/base.py b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/third_party/html5lib/html5lib/treewalkers/base.py +index 80c474c4e..8e214f50a 100644 +--- a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/third_party/html5lib/html5lib/treewalkers/base.py ++++ b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/third_party/html5lib/html5lib/treewalkers/base.py +@@ -1,4 +1,4 @@ +-from __future__ import absolute_import, division, unicode_literals ++ + + from xml.dom import Node + from ..constants import namespaces, voidElements, spaceCharacters +diff --git a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/third_party/html5lib/html5lib/treewalkers/dom.py b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/third_party/html5lib/html5lib/treewalkers/dom.py +index b0c89b001..d95ae9fb8 100644 +--- a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/third_party/html5lib/html5lib/treewalkers/dom.py ++++ b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/third_party/html5lib/html5lib/treewalkers/dom.py +@@ -1,4 +1,4 @@ +-from __future__ import absolute_import, division, unicode_literals ++ + + from xml.dom import Node + +diff --git a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/third_party/html5lib/html5lib/treewalkers/etree.py b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/third_party/html5lib/html5lib/treewalkers/etree.py +index 44653372d..47b500ece 100644 +--- a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/third_party/html5lib/html5lib/treewalkers/etree.py ++++ b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/third_party/html5lib/html5lib/treewalkers/etree.py +@@ -1,4 +1,4 @@ +-from __future__ import absolute_import, division, unicode_literals ++ + + from collections import OrderedDict + import re +diff --git a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/third_party/html5lib/html5lib/treewalkers/etree_lxml.py b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/third_party/html5lib/html5lib/treewalkers/etree_lxml.py +index a614ac5b3..bbc9071f3 100644 +--- a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/third_party/html5lib/html5lib/treewalkers/etree_lxml.py ++++ b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/third_party/html5lib/html5lib/treewalkers/etree_lxml.py +@@ -1,4 +1,4 @@ +-from __future__ import absolute_import, division, unicode_literals ++ + from six import text_type + + from collections import OrderedDict +diff --git a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/third_party/html5lib/html5lib/treewalkers/genshi.py b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/third_party/html5lib/html5lib/treewalkers/genshi.py +index 7483be27d..5de27201c 100644 +--- a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/third_party/html5lib/html5lib/treewalkers/genshi.py ++++ b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/third_party/html5lib/html5lib/treewalkers/genshi.py +@@ -1,4 +1,4 @@ +-from __future__ import absolute_import, division, unicode_literals ++ + + from genshi.core import QName + from genshi.core import START, END, XML_NAMESPACE, DOCTYPE, TEXT +diff --git a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/third_party/hyperframe/hyperframe/frame.py b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/third_party/hyperframe/hyperframe/frame.py +index 5294768a2..90bfe4549 100644 +--- a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/third_party/hyperframe/hyperframe/frame.py ++++ b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/third_party/hyperframe/hyperframe/frame.py +@@ -407,7 +407,7 @@ class SettingsFrame(Frame): + + def serialize_body(self): + return b''.join([_STRUCT_HL.pack(setting & 0xFF, value) +- for setting, value in self.settings.items()]) ++ for setting, value in list(self.settings.items())]) + + def parse_body(self, data): + body_len = 0 +diff --git a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/third_party/six/six.py b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/third_party/six/six.py +index 83f69783d..2ce396486 100644 +--- a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/third_party/six/six.py ++++ b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/third_party/six/six.py +@@ -20,7 +20,7 @@ + + """Utilities for writing code that runs on Python 2 and 3""" + +-from __future__ import absolute_import ++ + + import functools + import itertools +@@ -46,10 +46,10 @@ if PY3: + + MAXSIZE = sys.maxsize + else: +- string_types = basestring, +- integer_types = (int, long) +- class_types = (type, types.ClassType) +- text_type = unicode ++ string_types = str, ++ integer_types = (int, int) ++ class_types = (type, type) ++ text_type = str + binary_type = str + + if sys.platform.startswith("java"): +@@ -529,7 +529,7 @@ try: + advance_iterator = next + except NameError: + def advance_iterator(it): +- return it.next() ++ return it.__next__() + next = advance_iterator + + +@@ -552,7 +552,7 @@ if PY3: + Iterator = object + else: + def get_unbound_function(unbound): +- return unbound.im_func ++ return unbound.__func__ + + def create_bound_method(func, obj): + return types.MethodType(func, obj, obj.__class__) +@@ -562,7 +562,7 @@ else: + + class Iterator(object): + +- def next(self): ++ def __next__(self): + return type(self).__next__(self) + + callable = callable +@@ -629,7 +629,7 @@ if PY3: + + def u(s): + return s +- unichr = chr ++ chr = chr + import struct + int2byte = struct.Struct(">B").pack + del struct +@@ -655,8 +655,8 @@ else: + # Workaround for standalone backslash + + def u(s): +- return unicode(s.replace(r'\\', r'\\\\'), "unicode_escape") +- unichr = unichr ++ return str(s.replace(r'\\', r'\\\\'), "unicode_escape") ++ chr = chr + int2byte = chr + + def byte2int(bs): +@@ -665,8 +665,8 @@ else: + def indexbytes(buf, i): + return ord(buf[i]) + iterbytes = functools.partial(itertools.imap, ord) +- import StringIO +- StringIO = BytesIO = StringIO.StringIO ++ import io ++ StringIO = BytesIO = io.StringIO + _assertCountEqual = "assertItemsEqual" + _assertRaisesRegex = "assertRaisesRegexp" + _assertRegex = "assertRegexpMatches" +@@ -747,11 +747,11 @@ if print_ is None: + return + + def write(data): +- if not isinstance(data, basestring): ++ if not isinstance(data, str): + data = str(data) + # If the file has an encoding, encode unicode with it. + if (isinstance(fp, file) and +- isinstance(data, unicode) and ++ isinstance(data, str) and + fp.encoding is not None): + errors = getattr(fp, "errors", None) + if errors is None: +@@ -761,13 +761,13 @@ if print_ is None: + want_unicode = False + sep = kwargs.pop("sep", None) + if sep is not None: +- if isinstance(sep, unicode): ++ if isinstance(sep, str): + want_unicode = True + elif not isinstance(sep, str): + raise TypeError("sep must be None or a string") + end = kwargs.pop("end", None) + if end is not None: +- if isinstance(end, unicode): ++ if isinstance(end, str): + want_unicode = True + elif not isinstance(end, str): + raise TypeError("end must be None or a string") +@@ -775,12 +775,12 @@ if print_ is None: + raise TypeError("invalid keyword arguments to print()") + if not want_unicode: + for arg in args: +- if isinstance(arg, unicode): ++ if isinstance(arg, str): + want_unicode = True + break + if want_unicode: +- newline = unicode("\n") +- space = unicode(" ") ++ newline = str("\n") ++ space = str(" ") + else: + newline = "\n" + space = " " +diff --git a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/third_party/webencodings/webencodings/__init__.py b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/third_party/webencodings/webencodings/__init__.py +index d21d697c8..93c990069 100644 +--- a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/third_party/webencodings/webencodings/__init__.py ++++ b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/third_party/webencodings/webencodings/__init__.py +@@ -12,7 +12,7 @@ + + """ + +-from __future__ import unicode_literals ++ + + import codecs + +diff --git a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/third_party/webencodings/webencodings/mklabels.py b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/third_party/webencodings/webencodings/mklabels.py +index 295dc928b..cc1cf2e87 100644 +--- a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/third_party/webencodings/webencodings/mklabels.py ++++ b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/third_party/webencodings/webencodings/mklabels.py +@@ -12,7 +12,7 @@ + + import json + try: +- from urllib import urlopen ++ from urllib.request import urlopen + except ImportError: + from urllib.request import urlopen + +diff --git a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/third_party/webencodings/webencodings/tests.py b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/third_party/webencodings/webencodings/tests.py +index e12c10d03..0c367905a 100644 +--- a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/third_party/webencodings/webencodings/tests.py ++++ b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/third_party/webencodings/webencodings/tests.py +@@ -11,7 +11,7 @@ + + """ + +-from __future__ import unicode_literals ++ + + from . import (lookup, LABELS, decode, encode, iter_decode, iter_encode, + IncrementalDecoder, IncrementalEncoder, UTF8) +diff --git a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/third_party/webencodings/webencodings/x_user_defined.py b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/third_party/webencodings/webencodings/x_user_defined.py +index d16e32602..45037bb49 100644 +--- a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/third_party/webencodings/webencodings/x_user_defined.py ++++ b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/third_party/webencodings/webencodings/x_user_defined.py +@@ -11,7 +11,7 @@ + + """ + +-from __future__ import unicode_literals ++ + + import codecs + +diff --git a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/wpt/browser.py b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/wpt/browser.py +index 35f60d162..0097214cb 100644 +--- a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/wpt/browser.py ++++ b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/wpt/browser.py +@@ -47,9 +47,7 @@ def get_taskcluster_artifact(index, path): + return resp + + +-class Browser(object): +- __metaclass__ = ABCMeta +- ++class Browser(object, metaclass=ABCMeta): + def __init__(self, logger): + self.logger = logger + +@@ -800,7 +798,7 @@ class Chrome(Browser): + return m.group(1) + + +-class ChromeAndroidBase(Browser): ++class ChromeAndroidBase(Browser, metaclass=ABCMeta): + """A base class for ChromeAndroid and AndroidWebView. + + On Android, WebView is based on Chromium open source project, and on some +@@ -808,7 +806,6 @@ class ChromeAndroidBase(Browser): + a very similar WPT runner implementation. + Includes webdriver installation. + """ +- __metaclass__ = ABCMeta # This is an abstract class. + + def __init__(self, logger): + super(ChromeAndroidBase, self).__init__(logger) +diff --git a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/wpt/install.py b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/wpt/install.py +index 817702254..2ae2c62d8 100644 +--- a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/wpt/install.py ++++ b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/wpt/install.py +@@ -22,7 +22,7 @@ channel_by_name = { + } + + channel_args = argparse.ArgumentParser(add_help=False) +-channel_args.add_argument('--channel', choices=channel_by_name.keys(), ++channel_args.add_argument('--channel', choices=list(channel_by_name.keys()), + default='nightly', action='store', + help=''' + Name of browser release channel (default: nightly). "stable" and "release" are +diff --git a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/wpt/markdown.py b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/wpt/markdown.py +index 43020cdaf..542bff7d9 100644 +--- a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/wpt/markdown.py ++++ b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/wpt/markdown.py +@@ -17,17 +17,17 @@ def format_comment_title(product): + + def markdown_adjust(s): + """Escape problematic markdown sequences.""" +- s = s.replace('\t', u'\\t') +- s = s.replace('\n', u'\\n') +- s = s.replace('\r', u'\\r') +- s = s.replace('`', u'') +- s = s.replace('|', u'\\|') ++ s = s.replace('\t', '\\t') ++ s = s.replace('\n', '\\n') ++ s = s.replace('\r', '\\r') ++ s = s.replace('`', '') ++ s = s.replace('|', '\\|') + return s + + + def table(headings, data, log): + """Create and log data to specified logger in tabular format.""" +- cols = range(len(headings)) ++ cols = list(range(len(headings))) + assert all(len(item) == len(cols) for item in data) + max_widths = reduce(lambda prev, cur: [(len(cur[i]) + 2) + if (len(cur[i]) + 2) > prev[i] +diff --git a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/wpt/revlist.py b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/wpt/revlist.py +index bd85612e2..c88cdac2c 100644 +--- a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/wpt/revlist.py ++++ b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/wpt/revlist.py +@@ -40,15 +40,15 @@ def get_tagged_revisions(pattern): + git = get_git_cmd(wpt_root) + args = [ + pattern, +- u'--sort=-committerdate', +- u'--format=%(refname:lstrip=2) %(objectname) %(committerdate:raw)', +- u'--count=100000' ++ '--sort=-committerdate', ++ '--format=%(refname:lstrip=2) %(objectname) %(committerdate:raw)', ++ '--count=100000' + ] +- ref_list = git(u"for-each-ref", *args) ++ ref_list = git("for-each-ref", *args) + for line in ref_list.splitlines(): + if not line: + continue +- tag, commit, date, _ = line.split(u" ") ++ tag, commit, date, _ = line.split(" ") + date = int(date) + yield tag, commit, date + +@@ -84,7 +84,7 @@ def get_epoch_revisions(epoch, until, max_count): + # Expected result: N,M,K,J,H,G,F,C,A + + cutoff_date = calculate_cutoff_date(until, epoch, epoch_offset) +- for _, commit, date in get_tagged_revisions(u"refs/tags/merge_pr_*"): ++ for _, commit, date in get_tagged_revisions("refs/tags/merge_pr_*"): + if count >= max_count: + return + if date < cutoff_date: +diff --git a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/wpt/testfiles.py b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/wpt/testfiles.py +index 7f966a625..9315ac032 100644 +--- a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/wpt/testfiles.py ++++ b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/wpt/testfiles.py +@@ -124,17 +124,17 @@ def branch_point(): + + def compile_ignore_rule(rule): + # type: (Text) -> Pattern[Text] +- rule = rule.replace(ensure_text(os.path.sep), u"/") +- parts = rule.split(u"/") ++ rule = rule.replace(ensure_text(os.path.sep), "/") ++ parts = rule.split("/") + re_parts = [] + for part in parts: +- if part.endswith(u"**"): +- re_parts.append(re.escape(part[:-2]) + u".*") +- elif part.endswith(u"*"): +- re_parts.append(re.escape(part[:-1]) + u"[^/]*") ++ if part.endswith("**"): ++ re_parts.append(re.escape(part[:-2]) + ".*") ++ elif part.endswith("*"): ++ re_parts.append(re.escape(part[:-1]) + "[^/]*") + else: + re_parts.append(re.escape(part)) +- return re.compile(u"^%s$" % u"/".join(re_parts)) ++ return re.compile("^%s$" % "/".join(re_parts)) + + + def repo_files_changed(revish, include_uncommitted=False, include_new=False): +@@ -143,7 +143,7 @@ def repo_files_changed(revish, include_uncommitted=False, include_new=False): + if git is None: + raise Exception("git not found") + +- files_list = git("diff", "--name-only", "-z", revish).split(u"\0") ++ files_list = git("diff", "--name-only", "-z", revish).split("\0") + assert not files_list[-1] + files = set(files_list[:-1]) + +@@ -218,7 +218,7 @@ def _in_repo_root(full_path): + def load_manifest(manifest_path=None, manifest_update=True): + # type: (Optional[Text], bool) -> manifest.Manifest + if manifest_path is None: +- manifest_path = os.path.join(wpt_root, u"MANIFEST.json") ++ manifest_path = os.path.join(wpt_root, "MANIFEST.json") + return manifest.load_and_update(wpt_root, manifest_path, "/", + update=manifest_update) + +@@ -231,7 +231,7 @@ def affected_testfiles(files_changed, # type: Iterable[Text] + # type: (...) -> Tuple[Set[Text], Set[Text]] + """Determine and return list of test files that reference changed files.""" + if skip_dirs is None: +- skip_dirs = {u"conformance-checkers", u"docs", u"tools"} ++ skip_dirs = {"conformance-checkers", "docs", "tools"} + affected_testfiles = set() + # Exclude files that are in the repo root, because + # they are not part of any test. +@@ -370,7 +370,7 @@ def get_revish(**kwargs): + # type: (**Any) -> Text + revish = kwargs.get("revish") + if revish is None: +- revish = u"%s..HEAD" % branch_point() ++ revish = "%s..HEAD" % branch_point() + return ensure_text(revish).strip() + + +@@ -382,7 +382,7 @@ def run_changed_files(**kwargs): + include_uncommitted=kwargs["modified"], + include_new=kwargs["new"]) + +- separator = u"\0" if kwargs["null"] else u"\n" ++ separator = "\0" if kwargs["null"] else "\n" + + for item in sorted(changed): + line = os.path.relpath(item, wpt_root) + separator +diff --git a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/wptserve/wptserve/config.py b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/wptserve/wptserve/config.py +index 4d653f522..299e52d1b 100644 +--- a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/wptserve/wptserve/config.py ++++ b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/wptserve/wptserve/config.py +@@ -302,7 +302,7 @@ class ConfigBuilder(object): + + rv = {} + for name, host in iteritems(hosts): +- rv[name] = {subdomain: (subdomain.encode("idna").decode("ascii") + u"." + host) ++ rv[name] = {subdomain: (subdomain.encode("idna").decode("ascii") + "." + host) + for subdomain in data["subdomains"]} + rv[name][""] = host + return rv +@@ -314,7 +314,7 @@ class ConfigBuilder(object): + + rv = {} + for name, host in iteritems(hosts): +- rv[name] = {subdomain: (subdomain.encode("idna").decode("ascii") + u"." + host) ++ rv[name] = {subdomain: (subdomain.encode("idna").decode("ascii") + "." + host) + for subdomain in data["not_subdomains"]} + return rv + +diff --git a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/wptserve/wptserve/request.py b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/wptserve/wptserve/request.py +index dbfe067ba..e3350f394 100644 +--- a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/wptserve/wptserve/request.py ++++ b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/wptserve/wptserve/request.py +@@ -378,7 +378,7 @@ class RequestHeaders(dict): + (i.e. names of headers) and values have binary type. + """ + def __init__(self, items): +- for header in items.keys(): ++ for header in list(items.keys()): + key = isomorphic_encode(header).lower() + # get all headers with the same name + values = items.getallmatchingheaders(header) +diff --git a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/wptserve/wptserve/response.py b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/wptserve/wptserve/response.py +index b7bfcebd1..da18d7651 100644 +--- a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/wptserve/wptserve/response.py ++++ b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/wptserve/wptserve/response.py +@@ -179,9 +179,9 @@ class Response(object): + cookie = isomorphic_decode(cookie) + parser.load(cookie) + +- if name in parser.keys(): ++ if name in list(parser.keys()): + del self.headers["Set-Cookie"] +- for m in parser.values(): ++ for m in list(parser.values()): + if m.key != name: + self.headers.append(("Set-Cookie", m.OutputString())) + +@@ -241,7 +241,7 @@ class Response(object): + self.write_status_headers() + self.write_content() + +- def set_error(self, code, message=u""): ++ def set_error(self, code, message=""): + """Set the response status headers and return a JSON error object: + + {"error": {"code": code, "message": message}} +@@ -413,9 +413,9 @@ class H2Response(Response): + item = None + item_iter = self.iter_content() + try: +- item = item_iter.next() ++ item = next(item_iter) + while True: +- check_last = item_iter.next() ++ check_last = next(item_iter) + self.writer.write_data(item, last=False) + item = check_last + except StopIteration: +diff --git a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/wptserve/wptserve/server.py b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/wptserve/wptserve/server.py +index d6718df3f..b4f4b42bf 100644 +--- a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/wptserve/wptserve/server.py ++++ b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/wptserve/wptserve/server.py +@@ -382,7 +382,7 @@ class Http2WebTestRequestHandler(BaseWebTestRequestHandler): + self.close_connection = True + + # Flood all the streams with connection terminated, this will cause them to stop +- for stream_id, (thread, queue) in stream_queues.items(): ++ for stream_id, (thread, queue) in list(stream_queues.items()): + queue.put(frame) + + elif hasattr(frame, 'stream_id'): +@@ -398,12 +398,12 @@ class Http2WebTestRequestHandler(BaseWebTestRequestHandler): + self.logger.error('(%s) Closing Connection - \n%s' % (self.uid, str(e))) + if not self.close_connection: + self.close_connection = True +- for stream_id, (thread, queue) in stream_queues.items(): ++ for stream_id, (thread, queue) in list(stream_queues.items()): + queue.put(None) + except Exception as e: + self.logger.error('(%s) Unexpected Error - \n%s' % (self.uid, str(e))) + finally: +- for stream_id, (thread, queue) in stream_queues.items(): ++ for stream_id, (thread, queue) in list(stream_queues.items()): + thread.join() + + def start_stream_thread(self, frame, queue): +diff --git a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/wptserve/wptserve/utils.py b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/wptserve/wptserve/utils.py +index b005b417d..79e68e596 100644 +--- a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/wptserve/wptserve/utils.py ++++ b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/wptserve/wptserve/utils.py +@@ -42,7 +42,7 @@ def isomorphic_encode(s): + + def invert_dict(dict): + rv = {} +- for key, values in dict.items(): ++ for key, values in list(dict.items()): + for value in values: + if value in rv: + raise ValueError +diff --git a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/wptserve/wptserve/wptserve.py b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/wptserve/wptserve/wptserve.py +index 816c8a5a6..3704f8b3c 100755 +--- a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/wptserve/wptserve/wptserve.py ++++ b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/wptserve/wptserve/wptserve.py +@@ -2,7 +2,7 @@ + import argparse + import os + +-import server ++from . import server + + def abs_path(path): + return os.path.abspath(path) +diff --git a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/tool/commands/abstract_local_server_command.py b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/tool/commands/abstract_local_server_command.py +index 5f0afff1c..7a2d4bbc8 100644 +--- a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/tool/commands/abstract_local_server_command.py ++++ b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/tool/commands/abstract_local_server_command.py +@@ -57,8 +57,8 @@ class AbstractLocalServerCommand(Command): + + server_url = 'http://localhost:%d%s' % (options.httpd_port, + self.launch_path) +- print 'Starting server at %s' % server_url +- print "Use the 'Exit' link in the UI, %squitquitquit or Ctrl-C to stop" % server_url ++ print('Starting server at %s' % server_url) ++ print("Use the 'Exit' link in the UI, %squitquitquit or Ctrl-C to stop" % server_url) + + if options.show_results: + # FIXME: This seems racy. +diff --git a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/tool/commands/analyze_baselines.py b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/tool/commands/analyze_baselines.py +index 96d069b7a..897dbecb8 100644 +--- a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/tool/commands/analyze_baselines.py ++++ b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/tool/commands/analyze_baselines.py +@@ -56,7 +56,7 @@ class AnalyzeBaselines(AbstractRebaseliningCommand): + self._tool = None + + def _write(self, msg): +- print msg ++ print(msg) + + def _analyze_baseline(self, options, test_name): + # TODO(robertma): Investigate changing the CLI to take extensions with leading '.'. +diff --git a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/tool/commands/flaky_tests.py b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/tool/commands/flaky_tests.py +index 4905ff111..751ea571b 100644 +--- a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/tool/commands/flaky_tests.py ++++ b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/tool/commands/flaky_tests.py +@@ -121,5 +121,5 @@ class FlakyTests(Command): + ','.join(test_names) + expectations_string = '\n'.join(line.to_string() for line in lines) + +- print self.OUTPUT % (self.HEADER, expectations_string, +- flakiness_dashboard_url) ++ print(self.OUTPUT % (self.HEADER, expectations_string, ++ flakiness_dashboard_url)) +diff --git a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/tool/commands/help_command.py b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/tool/commands/help_command.py +index c711e7565..e7f7a361f 100644 +--- a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/tool/commands/help_command.py ++++ b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/tool/commands/help_command.py +@@ -58,8 +58,8 @@ class HelpCommand(Command): + relevant_commands = self._tool.commands[:] + else: + epilog = 'Common %prog commands:\n' +- relevant_commands = filter(self._tool.should_show_in_main_help, +- self._tool.commands) ++ relevant_commands = list(filter(self._tool.should_show_in_main_help, ++ self._tool.commands)) + longest_name_length = max( + len(command.name) for command in relevant_commands) + relevant_commands.sort(lambda a, b: cmp(a.name, b.name)) +@@ -83,7 +83,7 @@ class HelpCommand(Command): + if args: + command = self._tool.command_by_name(args[0]) + if command: +- print command.standalone_help() ++ print(command.standalone_help()) + return 0 + + self.show_all_commands = options.show_all_commands +diff --git a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/tool/commands/pretty_diff.py b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/tool/commands/pretty_diff.py +index 12516921f..e154fd861 100644 +--- a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/tool/commands/pretty_diff.py ++++ b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/tool/commands/pretty_diff.py +@@ -30,7 +30,7 @@ import logging + import optparse + import sys + import tempfile +-import urllib ++import urllib.request, urllib.parse, urllib.error + + from blinkpy.common.pretty_diff import prettify_diff + from blinkpy.common.system.executive import ScriptError +@@ -101,5 +101,5 @@ class PrettyDiff(Command): + return diff_file + + def _open_pretty_diff(self, file_path): +- url = 'file://%s' % urllib.quote(file_path) ++ url = 'file://%s' % urllib.parse.quote(file_path) + self._tool.user.open_url(url) +diff --git a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/tool/commands/queries.py b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/tool/commands/queries.py +index 465099ebf..90c16ff9a 100644 +--- a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/tool/commands/queries.py ++++ b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/tool/commands/queries.py +@@ -53,7 +53,7 @@ and PID and prints it to stdout.""" + pid = None + if len(args) > 1: + pid = int(args[1]) +- print crash_logs.find_newest_log(args[0], pid) ++ print(crash_logs.find_newest_log(args[0], pid)) + + + class PrintExpectations(Command): +@@ -103,7 +103,7 @@ class PrintExpectations(Command): + + def execute(self, options, args, tool): + if not options.paths and not args and not options.all: +- print 'You must either specify one or more test paths or --all.' ++ print('You must either specify one or more test paths or --all.') + return + + if options.platform: +@@ -114,7 +114,7 @@ class PrintExpectations(Command): + if default_port: + port_names = [default_port.name()] + else: +- print "No port names match '%s'" % options.platform ++ print("No port names match '%s'" % options.platform) + return + else: + default_port = tool.port_factory.get(port_names[0]) +@@ -129,7 +129,7 @@ class PrintExpectations(Command): + if file.startswith(web_tests_dir): + file = file.replace(web_tests_dir, + WEB_TESTS_LAST_COMPONENT) +- print file ++ print(file) + return + + tests = set(default_port.tests(args)) +@@ -143,8 +143,8 @@ class PrintExpectations(Command): + for test in sorted(tests_to_print) + ] + if port_name != port_names[0]: +- print +- print '\n'.join(self._format_lines(options, port_name, lines)) ++ print() ++ print('\n'.join(self._format_lines(options, port_name, lines))) + + @staticmethod + def _test_set_for_keyword(keyword, test_expectations, tests): +@@ -220,7 +220,7 @@ class PrintBaselines(Command): + + def execute(self, options, args, tool): + if not args and not options.all: +- print 'You must either specify one or more test paths or --all.' ++ print('You must either specify one or more test paths or --all.') + return + + default_port = tool.port_factory.get() +@@ -228,7 +228,7 @@ class PrintBaselines(Command): + port_names = fnmatch.filter(tool.port_factory.all_port_names(), + options.platform) + if not port_names: +- print "No port names match '%s'" % options.platform ++ print("No port names match '%s'" % options.platform) + else: + port_names = [default_port.name()] + +@@ -239,9 +239,9 @@ class PrintBaselines(Command): + + for port_name in port_names: + if port_name != port_names[0]: +- print ++ print() + if not options.csv: +- print '// For %s' % port_name ++ print('// For %s' % port_name) + port = tool.port_factory.get(port_name) + for test_name in tests: + self._print_baselines( +@@ -253,13 +253,13 @@ class PrintBaselines(Command): + baseline_location = baselines[extension] + if baseline_location: + if options.csv: +- print '%s,%s,%s,%s,%s,%s' % ( ++ print('%s,%s,%s,%s,%s,%s' % ( + port_name, test_name, + self._platform_for_path(test_name), extension[1:], + baseline_location, +- self._platform_for_path(baseline_location)) ++ self._platform_for_path(baseline_location))) + else: +- print baseline_location ++ print(baseline_location) + + def _platform_for_path(self, relpath): + platform_matchobj = self._platform_regexp.match(relpath) +diff --git a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/tool/commands/rebaseline.py b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/tool/commands/rebaseline.py +index 389901da2..4d114fca8 100644 +--- a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/tool/commands/rebaseline.py ++++ b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/tool/commands/rebaseline.py +@@ -183,7 +183,7 @@ class TestBaselineSet(object): + + def _iter_combinations(self): + """Iterates through (test, build, port) combinations.""" +- for test_prefix, build_port_pairs in self._test_prefix_map.iteritems(): ++ for test_prefix, build_port_pairs in self._test_prefix_map.items(): + for test in self._port.tests([test_prefix]): + for build, port_name in build_port_pairs: + yield (test, build, port_name) +@@ -276,7 +276,7 @@ class AbstractParallelRebaselineCommand(AbstractRebaseliningCommand): + for builder in list(release_builders) + list(debug_builders): + port = self._tool.port_factory.get_from_builder_name(builder) + fallback_path = port.baseline_search_path() +- if fallback_path not in builders_to_fallback_paths.values(): ++ if fallback_path not in list(builders_to_fallback_paths.values()): + builders_to_fallback_paths[builder] = fallback_path + + return set(builders_to_fallback_paths) +@@ -352,7 +352,7 @@ class AbstractParallelRebaselineCommand(AbstractRebaseliningCommand): + change_set = ChangeSet() + for _, stdout, _ in command_results: + updated = False +- for line in filter(None, stdout.splitlines()): ++ for line in [_f for _f in stdout.splitlines() if _f]: + try: + parsed_line = json.loads(line) + change_set.update(ChangeSet.from_dict(parsed_line)) +@@ -376,7 +376,7 @@ class AbstractParallelRebaselineCommand(AbstractRebaseliningCommand): + self._suffixes_for_actual_failures(test, build)) + + optimize_commands = [] +- for test, suffixes in tests_to_suffixes.iteritems(): ++ for test, suffixes in tests_to_suffixes.items(): + # No need to optimize baselines for a test with no failures. + if not suffixes: + continue +@@ -397,7 +397,7 @@ class AbstractParallelRebaselineCommand(AbstractRebaseliningCommand): + return optimize_commands + + def _update_expectations_files(self, lines_to_remove): +- tests = lines_to_remove.keys() ++ tests = list(lines_to_remove.keys()) + to_remove = defaultdict(set) + all_versions = frozenset([ + config.version.lower() for config in self._tool.port_factory.get(). +@@ -416,7 +416,7 @@ class AbstractParallelRebaselineCommand(AbstractRebaseliningCommand): + port.test_configuration().version.lower()) + + # Get configurations to remove based on builders for each test +- for test, port_names in lines_to_remove.items(): ++ for test, port_names in list(lines_to_remove.items()): + for port_name in port_names: + port = self._tool.port_factory.get(port_name) + if port.test_configuration().version.lower() in all_versions: +@@ -430,7 +430,7 @@ class AbstractParallelRebaselineCommand(AbstractRebaseliningCommand): + path: self._tool.filesystem.read_text_file(path) + }) + system_remover = SystemConfigurationRemover(test_expectations) +- for test, versions in to_remove.items(): ++ for test, versions in list(to_remove.items()): + system_remover.remove_os_versions(test, versions) + system_remover.update_expectations() + +diff --git a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/tool/commands/rebaseline_cl.py b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/tool/commands/rebaseline_cl.py +index 2a2075a90..90454a251 100644 +--- a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/tool/commands/rebaseline_cl.py ++++ b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/tool/commands/rebaseline_cl.py +@@ -201,7 +201,7 @@ class RebaselineCL(AbstractParallelRebaselineCommand): + Args: + jobs: A dict mapping Build objects to TryJobStatus objects. + """ +- finished_jobs = {b for b, s in jobs.items() if s.status == 'COMPLETED'} ++ finished_jobs = {b for b, s in list(jobs.items()) if s.status == 'COMPLETED'} + if self.selected_try_bots.issubset( + {b.builder_name + for b in finished_jobs}): +@@ -240,7 +240,7 @@ class RebaselineCL(AbstractParallelRebaselineCommand): + """ + results_fetcher = self._tool.results_fetcher + results = {} +- for build, status in jobs.iteritems(): ++ for build, status in jobs.items(): + if status == TryJobStatus('COMPLETED', 'SUCCESS'): + # Builds with passing try jobs are mapped to None, to indicate + # that there are no baselines to download. +@@ -310,7 +310,7 @@ class RebaselineCL(AbstractParallelRebaselineCommand): + A TestBaselineSet object. + """ + builds_to_tests = {} +- for build, results in builds_to_results.iteritems(): ++ for build, results in builds_to_results.items(): + builds_to_tests[build] = self._tests_to_rebaseline(build, results) + if only_changed_tests: + files_in_cl = self._tool.git().changed_files(diff_filter='AM') +@@ -323,7 +323,7 @@ class RebaselineCL(AbstractParallelRebaselineCommand): + ] + + test_baseline_set = TestBaselineSet(self._tool) +- for build, tests in builds_to_tests.iteritems(): ++ for build, tests in builds_to_tests.items(): + for test in tests: + if only_changed_tests and test not in tests_in_cl: + continue +diff --git a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/tool/commands/rebaseline_server.py b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/tool/commands/rebaseline_server.py +index c96ba176f..a708ae2e2 100644 +--- a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/tool/commands/rebaseline_server.py ++++ b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/tool/commands/rebaseline_server.py +@@ -83,7 +83,7 @@ class RebaselineServer(AbstractLocalServerCommand): + results_directory = args[0] + host = Host() + +- print 'Parsing full_results.json...' ++ print('Parsing full_results.json...') + results_json_path = host.filesystem.join(results_directory, + 'full_results.json') + results_json = json_results_generator.load_json( +@@ -96,7 +96,7 @@ class RebaselineServer(AbstractLocalServerCommand): + self._test_config = TestConfig(port, web_tests_directory, + results_directory, platforms, host) + +- print 'Gathering current baselines...' ++ print('Gathering current baselines...') + self._gather_baselines(results_json) + + return { +diff --git a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/w3c/android_wpt_expectations_updater.py b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/w3c/android_wpt_expectations_updater.py +index 752346c8a..1619afffa 100644 +--- a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/w3c/android_wpt_expectations_updater.py ++++ b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/w3c/android_wpt_expectations_updater.py +@@ -28,6 +28,7 @@ from blinkpy.web_tests.models.typ_types import Expectation, ResultType + from blinkpy.web_tests.port.android import ( + PRODUCTS, PRODUCTS_TO_STEPNAMES, PRODUCTS_TO_BROWSER_TAGS, + PRODUCTS_TO_EXPECTATION_FILE_PATHS, ANDROID_DISABLED_TESTS) ++from functools import reduce + + _log = logging.getLogger(__name__) + +@@ -50,7 +51,7 @@ class AndroidWPTExpectationsUpdater(WPTExpectationsUpdater): + # We need to put all the Android expectation files in + # the _test_expectations member variable so that the + # files get cleaned in cleanup_test_expectations_files() +- return (PRODUCTS_TO_EXPECTATION_FILE_PATHS.values() + ++ return (list(PRODUCTS_TO_EXPECTATION_FILE_PATHS.values()) + + [ANDROID_DISABLED_TESTS]) + + def _get_web_test_results(self, build): +@@ -72,7 +73,7 @@ class AndroidWPTExpectationsUpdater(WPTExpectationsUpdater): + step_name = PRODUCTS_TO_STEPNAMES[product] + results_sets.append(self.host.results_fetcher.fetch_results( + build, True, '%s (with patch)' % step_name)) +- return filter(None, results_sets) ++ return [_f for _f in results_sets if _f] + + def get_builder_configs(self, build, results_set=None): + """Gets step name from WebTestResults instance and uses +@@ -99,7 +100,7 @@ class AndroidWPTExpectationsUpdater(WPTExpectationsUpdater): + else: + step_name = results_set.step_name() + step_name = step_name[: step_name.index(' (with patch)')] +- product = {s: p for p, s in PRODUCTS_TO_STEPNAMES.items()}[step_name] ++ product = {s: p for p, s in list(PRODUCTS_TO_STEPNAMES.items())}[step_name] + products = {product} + + for product in products: +@@ -168,7 +169,7 @@ class AndroidWPTExpectationsUpdater(WPTExpectationsUpdater): + """ + browser_to_exp_path = { + browser: PRODUCTS_TO_EXPECTATION_FILE_PATHS[product] +- for product, browser in PRODUCTS_TO_BROWSER_TAGS.items()} ++ for product, browser in list(PRODUCTS_TO_BROWSER_TAGS.items())} + product_exp_paths = {PRODUCTS_TO_EXPECTATION_FILE_PATHS[prod] + for prod in self.options.android_product} + untriaged_exps = self._get_untriaged_test_expectations( +@@ -177,18 +178,18 @@ class AndroidWPTExpectationsUpdater(WPTExpectationsUpdater): + self._never_fix_expectations, [ANDROID_DISABLED_TESTS], + self.NEVER_FIX_MARKER_COMMENT)[ANDROID_DISABLED_TESTS] + +- for path, test_exps in untriaged_exps.items(): ++ for path, test_exps in list(untriaged_exps.items()): + self._test_expectations.remove_expectations( +- path, reduce(lambda x, y: x + y, test_exps.values())) ++ path, reduce(lambda x, y: x + y, list(test_exps.values()))) + + if neverfix_tests: + self._never_fix_expectations.remove_expectations( + ANDROID_DISABLED_TESTS, +- reduce(lambda x, y: x + y, neverfix_tests.values())) ++ reduce(lambda x, y: x + y, list(neverfix_tests.values()))) + +- for results_test_name, platform_results in test_to_results.items(): ++ for results_test_name, platform_results in list(test_to_results.items()): + exps_test_name = 'external/wpt/%s' % results_test_name +- for configs, test_results in platform_results.items(): ++ for configs, test_results in list(platform_results.items()): + for config in configs: + path = browser_to_exp_path[config.browser] + neverfix_exp = self._maybe_create_never_fix_expectation( +@@ -221,7 +222,7 @@ class AndroidWPTExpectationsUpdater(WPTExpectationsUpdater): + self._test_expectations, path, self.MARKER_COMMENT) + self._test_expectations.add_expectations( + path, +- sorted([exps[0] for exps in untriaged_exps[path].values()], ++ sorted([exps[0] for exps in list(untriaged_exps[path].values())], + key=lambda e: e.test), + marker_lineno) + +@@ -233,7 +234,7 @@ class AndroidWPTExpectationsUpdater(WPTExpectationsUpdater): + if neverfix_tests: + self._never_fix_expectations.add_expectations( + ANDROID_DISABLED_TESTS, +- sorted(reduce(lambda x, y: x + y, neverfix_tests.values()), ++ sorted(reduce(lambda x, y: x + y, list(neverfix_tests.values())), + key=lambda e: e.test), + disabled_tests_marker_lineno) + +diff --git a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/w3c/android_wpt_expectations_updater_unittest.py b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/w3c/android_wpt_expectations_updater_unittest.py +index 5fc3016e7..0c60c6ddc 100644 +--- a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/w3c/android_wpt_expectations_updater_unittest.py ++++ b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/w3c/android_wpt_expectations_updater_unittest.py +@@ -77,7 +77,7 @@ class AndroidWPTExpectationsUpdaterTest(LoggingTestCase): + }, + }) + # Write dummy expectations +- for path in PRODUCTS_TO_EXPECTATION_FILE_PATHS.values(): ++ for path in list(PRODUCTS_TO_EXPECTATION_FILE_PATHS.values()): + host.filesystem.write_text_file( + path, self._raw_android_expectations) + +diff --git a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/w3c/directory_owners_extractor.py b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/w3c/directory_owners_extractor.py +index c9cf7f617..26bda8077 100644 +--- a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/w3c/directory_owners_extractor.py ++++ b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/w3c/directory_owners_extractor.py +@@ -69,7 +69,7 @@ class DirectoryOwnersExtractor(object): + email_map[tuple(owners)].add(owned_directory_relpath) + return { + owners: sorted(owned_directories) +- for owners, owned_directories in email_map.iteritems() ++ for owners, owned_directories in email_map.items() + } + + def find_owners_file(self, start_path): +diff --git a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/w3c/directory_owners_extractor_unittest.py b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/w3c/directory_owners_extractor_unittest.py +index 1d94129dc..9e242717b 100644 +--- a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/w3c/directory_owners_extractor_unittest.py ++++ b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/w3c/directory_owners_extractor_unittest.py +@@ -30,7 +30,7 @@ class DirectoryOwnersExtractorTest(unittest.TestCase): + def _write_files(self, files): + # Use write_text_file instead of directly assigning to filesystem.files + # so that intermediary directories are correctly created, too. +- for path, contents in files.iteritems(): ++ for path, contents in files.items(): + self.host.filesystem.write_text_file(path, contents) + + def test_list_owners_combines_same_owners(self): +diff --git a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/w3c/export_notifier.py b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/w3c/export_notifier.py +index 21b67171f..ef8e0d357 100644 +--- a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/w3c/export_notifier.py ++++ b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/w3c/export_notifier.py +@@ -92,7 +92,7 @@ class ExportNotifier(object): + """Processes and comments on CLs with failed Tackcluster checks.""" + _log.info('Processing %d CLs with failed Taskcluster checks.', + len(gerrit_dict)) +- for change_id, pr_status_info in gerrit_dict.items(): ++ for change_id, pr_status_info in list(gerrit_dict.items()): + try: + cl = self.gerrit.query_cl_comments_and_revisions(change_id) + has_commented = self.has_latest_taskcluster_status_commented( +@@ -184,7 +184,7 @@ class PRStatusInfo(object): + + def _checks_results_as_comment(self): + comment = '' +- for check, url in self._checks_results.items(): ++ for check, url in list(self._checks_results.items()): + comment += '\n%s (%s)' % (check, url) + + return comment +diff --git a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/w3c/gerrit.py b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/w3c/gerrit.py +index 2c0397547..c06b82afb 100644 +--- a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/w3c/gerrit.py ++++ b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/w3c/gerrit.py +@@ -5,7 +5,7 @@ + import base64 + import json + import logging +-from urllib2 import HTTPError ++from urllib.error import HTTPError + + from blinkpy.common.net.network_transaction import NetworkTimeout + from blinkpy.w3c.chromium_commit import ChromiumCommit +@@ -166,7 +166,7 @@ class GerritCL(object): + # TODO(robertma): Consolidate with the related part in chromium_exportable_commits.py. + + try: +- files = self.current_revision['files'].keys() ++ files = list(self.current_revision['files'].keys()) + except KeyError: + # Empty (deleted) CL is not exportable. + return False +diff --git a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/w3c/import_notifier.py b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/w3c/import_notifier.py +index 7c3232a3f..2249c4be7 100644 +--- a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/w3c/import_notifier.py ++++ b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/w3c/import_notifier.py +@@ -113,7 +113,7 @@ class ImportNotifier(object): + changed baselines. + gerrit_url_with_ps: Gerrit URL of this CL with the patchset number. + """ +- for test_name, changed_baselines in changed_test_baselines.iteritems(): ++ for test_name, changed_baselines in changed_test_baselines.items(): + directory = self.find_owned_directory(test_name) + if not directory: + _log.warning('Cannot find OWNERS of %s', test_name) +@@ -161,7 +161,7 @@ class ImportNotifier(object): + test_expectations: A dictionary mapping names of tests that cannot + be rebaselined to a list of new test expectation lines. + """ +- for test_name, expectation_lines in test_expectations.iteritems(): ++ for test_name, expectation_lines in test_expectations.items(): + directory = self.find_owned_directory(test_name) + if not directory: + _log.warning('Cannot find OWNERS of %s', test_name) +@@ -191,7 +191,7 @@ class ImportNotifier(object): + imported_commits = self.local_wpt.commits_in_range( + wpt_revision_start, wpt_revision_end) + bugs = [] +- for directory, failures in self.new_failures_by_directory.iteritems(): ++ for directory, failures in self.new_failures_by_directory.items(): + summary = '[WPT] New failures introduced in {} by import {}'.format( + directory, gerrit_url) + +@@ -228,7 +228,7 @@ class ImportNotifier(object): + cc, + components, + labels=['Test-WebTest']) +- _log.info(unicode(bug)) ++ _log.info(str(bug)) + + if is_wpt_notify_enabled: + _log.info( +@@ -259,7 +259,7 @@ class ImportNotifier(object): + commit_list = '' + for sha, subject in imported_commits: + # subject is a Unicode string and can contain non-ASCII characters. +- line = u'{}: {}'.format(subject, GITHUB_COMMIT_PREFIX + sha) ++ line = '{}: {}'.format(subject, GITHUB_COMMIT_PREFIX + sha) + if self.local_wpt.is_commit_affecting_directory( + sha, path_from_wpt): + line += ' [affecting this directory]' +diff --git a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/w3c/import_notifier_unittest.py b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/w3c/import_notifier_unittest.py +index 1eb532446..fc2cec37c 100644 +--- a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/w3c/import_notifier_unittest.py ++++ b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/w3c/import_notifier_unittest.py +@@ -227,7 +227,7 @@ class ImportNotifierTest(unittest.TestCase): + imported_commits = [ + ('SHA1', 'Subject 1'), + # Use non-ASCII chars to really test Unicode handling. +- ('SHA2', u'ABC~‾¥≈¤・・•∙·☼★星🌟星★☼·∙•・・¤≈¥‾~XYZ') ++ ('SHA2', 'ABC~‾¥≈¤・・•∙·☼★星🌟星★☼·∙•・・¤≈¥‾~XYZ') + ] + + def _is_commit_affecting_directory(commit, directory): +@@ -239,8 +239,8 @@ class ImportNotifierTest(unittest.TestCase): + self.assertEqual( + self.notifier.format_commit_list( + imported_commits, MOCK_WEB_TESTS + 'external/wpt/foo'), +- u'Subject 1: https://github.com/web-platform-tests/wpt/commit/SHA1 [affecting this directory]\n' +- u'ABC~‾¥≈¤・・•∙·☼★星🌟星★☼·∙•・・¤≈¥‾~XYZ: https://github.com/web-platform-tests/wpt/commit/SHA2\n' ++ 'Subject 1: https://github.com/web-platform-tests/wpt/commit/SHA1 [affecting this directory]\n' ++ 'ABC~‾¥≈¤・・•∙·☼★星🌟星★☼·∙•・・¤≈¥‾~XYZ: https://github.com/web-platform-tests/wpt/commit/SHA2\n' + ) + + def test_find_owned_directory_non_virtual(self): +diff --git a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/w3c/monorail.py b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/w3c/monorail.py +index a0a5ec85e..40bbf69d6 100644 +--- a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/w3c/monorail.py ++++ b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/w3c/monorail.py +@@ -36,7 +36,7 @@ class MonorailIssue(object): + for field in self._STRING_LIST_FIELDS: + if field in self._body: + # Not a str or unicode. +- assert not isinstance(self._body[field], basestring) ++ assert not isinstance(self._body[field], str) + # Is iterable (TypeError would be raised otherwise). + self._body[field] = list(self._body[field]) + # We expect a KeyError to be raised if 'status' is missing. +@@ -46,19 +46,19 @@ class MonorailIssue(object): + assert self._body['summary'], 'summary cannot be empty.' + + def __unicode__(self): +- result = (u'Monorail issue in project {}\n' ++ result = ('Monorail issue in project {}\n' + 'Summary: {}\n' + 'Status: {}\n').format(self.project_id, self.body['summary'], + self.body['status']) + if 'cc' in self.body: +- result += u'CC: {}\n'.format(', '.join(self.body['cc'])) ++ result += 'CC: {}\n'.format(', '.join(self.body['cc'])) + if 'components' in self.body: +- result += u'Components: {}\n'.format(', '.join( ++ result += 'Components: {}\n'.format(', '.join( + self.body['components'])) + if 'labels' in self.body: +- result += u'Labels: {}\n'.format(', '.join(self.body['labels'])) ++ result += 'Labels: {}\n'.format(', '.join(self.body['labels'])) + if 'description' in self.body: +- result += u'Description:\n{}\n'.format(self.body['description']) ++ result += 'Description:\n{}\n'.format(self.body['description']) + return result + + @property +diff --git a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/w3c/monorail_unittest.py b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/w3c/monorail_unittest.py +index db95a7952..7bd7d92d0 100644 +--- a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/w3c/monorail_unittest.py ++++ b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/w3c/monorail_unittest.py +@@ -29,22 +29,22 @@ class MonorailIssueTest(unittest.TestCase): + def test_unicode(self): + issue = MonorailIssue( + 'chromium', +- summary=u'test', ++ summary='test', + status='Untriaged', +- description=u'ABC~‾¥≈¤・・•∙·☼★星🌟星★☼·∙•・・¤≈¥‾~XYZ', ++ description='ABC~‾¥≈¤・・•∙·☼★星🌟星★☼·∙•・・¤≈¥‾~XYZ', + cc=['foo@chromium.org', 'bar@chromium.org'], + labels=['Flaky'], + components=['Infra']) +- self.assertEqual(type(unicode(issue)), unicode) ++ self.assertEqual(type(str(issue)), str) + self.assertEqual( +- unicode(issue), +- (u'Monorail issue in project chromium\n' +- u'Summary: test\n' +- u'Status: Untriaged\n' +- u'CC: foo@chromium.org, bar@chromium.org\n' +- u'Components: Infra\n' +- u'Labels: Flaky\n' +- u'Description:\nABC~‾¥≈¤・・•∙·☼★星🌟星★☼·∙•・・¤≈¥‾~XYZ\n')) ++ str(issue), ++ ('Monorail issue in project chromium\n' ++ 'Summary: test\n' ++ 'Status: Untriaged\n' ++ 'CC: foo@chromium.org, bar@chromium.org\n' ++ 'Components: Infra\n' ++ 'Labels: Flaky\n' ++ 'Description:\nABC~‾¥≈¤・・•∙·☼★星🌟星★☼·∙•・・¤≈¥‾~XYZ\n')) + + def test_init_unknown_fields(self): + with self.assertRaises(AssertionError): +diff --git a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/w3c/test_importer_unittest.py b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/w3c/test_importer_unittest.py +index c1e691c4d..71bfe4c99 100644 +--- a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/w3c/test_importer_unittest.py ++++ b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/w3c/test_importer_unittest.py +@@ -30,7 +30,7 @@ class TestImporterTest(LoggingTestCase): + + def mock_host(self): + host = MockHost() +- for path in PRODUCTS_TO_EXPECTATION_FILE_PATHS.values(): ++ for path in list(PRODUCTS_TO_EXPECTATION_FILE_PATHS.values()): + host.filesystem.write_text_file(path, '') + return host + +diff --git a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/w3c/wpt_expectations_updater.py b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/w3c/wpt_expectations_updater.py +index 91efdea59..31dfb076a 100644 +--- a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/w3c/wpt_expectations_updater.py ++++ b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/w3c/wpt_expectations_updater.py +@@ -71,8 +71,8 @@ class WPTExpectationsUpdater(object): + for tests that were renamed. Also the files may have their expectations + updated using builder results. + """ +- return (self.port.all_expectations_dict().keys() + +- PRODUCTS_TO_EXPECTATION_FILE_PATHS.values()) ++ return (list(self.port.all_expectations_dict().keys()) + ++ list(PRODUCTS_TO_EXPECTATION_FILE_PATHS.values())) + + def run(self): + """Does required setup before calling update_expectations(). +@@ -162,7 +162,7 @@ class WPTExpectationsUpdater(object): + + # Here we build up a dict of failing test results for all platforms. + test_expectations = {} +- for build, job_status in build_to_status.iteritems(): ++ for build, job_status in build_to_status.items(): + if (job_status.result == 'SUCCESS' and + not self.options.include_unexpected_pass): + continue +@@ -179,7 +179,7 @@ class WPTExpectationsUpdater(object): + # } + # } + # And then we merge results for different platforms that had the same results. +- for test_name, platform_result in test_expectations.iteritems(): ++ for test_name, platform_result in test_expectations.items(): + # platform_result is a dict mapping platforms to results. + test_expectations[test_name] = self.merge_same_valued_keys( + platform_result) +@@ -239,7 +239,7 @@ class WPTExpectationsUpdater(object): + self.host.results_fetcher.fetch_webdriver_test_results( + build, master)) + +- test_results_list = filter(None, test_results_list) ++ test_results_list = [_f for _f in test_results_list if _f] + if not test_results_list: + _log.warning('No results for build %s', build) + self.configs_with_no_results.extend(self.get_builder_configs(build)) +@@ -475,13 +475,13 @@ class WPTExpectationsUpdater(object): + (each SimpleTestResult turns into a line). + """ + line_dict = defaultdict(list) +- for test_name, test_results in sorted(merged_results.iteritems()): ++ for test_name, test_results in sorted(merged_results.items()): + if not self._is_wpt_test(test_name): + _log.warning( + 'Non-WPT test "%s" unexpectedly passed to create_line_dict.', + test_name) + continue +- for configs, result in sorted(test_results.iteritems()): ++ for configs, result in sorted(test_results.items()): + line_dict[test_name].extend( + self._create_lines(test_name, configs, result)) + return line_dict +@@ -604,7 +604,7 @@ class WPTExpectationsUpdater(object): + """ + specifiers = {s.lower() for s in specifiers} + covered_by_try_bots = self._platform_specifiers_covered_by_try_bots() +- for macro, versions in specifier_macros.iteritems(): ++ for macro, versions in specifier_macros.items(): + macro = macro.lower() + + # Only consider version specifiers that have corresponding try bots. +@@ -656,7 +656,7 @@ class WPTExpectationsUpdater(object): + line_list = [] + wont_fix_list = [] + webdriver_list = [] +- for lines in line_dict.itervalues(): ++ for lines in line_dict.values(): + for line in lines: + if 'Skip' in line and '-manual.' in line: + wont_fix_list.append(line) +@@ -669,7 +669,7 @@ class WPTExpectationsUpdater(object): + self.port.path_to_generic_test_expectations_file(): line_list, + self.port.path_to_webdriver_expectations_file(): webdriver_list + } +- for expectations_file_path, lines in list_to_expectation.iteritems(): ++ for expectations_file_path, lines in list_to_expectation.items(): + if not lines: + continue + +@@ -926,7 +926,7 @@ class WPTExpectationsUpdater(object): + new_test_results = copy.deepcopy(test_results) + tests_to_rebaseline = set() + for test_name in test_results: +- for platforms, result in test_results[test_name].iteritems(): ++ for platforms, result in test_results[test_name].items(): + if self.can_rebaseline(test_name, result): + del new_test_results[test_name][platforms] + tests_to_rebaseline.add(test_name) +diff --git a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/w3c/wpt_expectations_updater_unittest.py b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/w3c/wpt_expectations_updater_unittest.py +index fa8964d9d..48f4dc319 100644 +--- a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/w3c/wpt_expectations_updater_unittest.py ++++ b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/w3c/wpt_expectations_updater_unittest.py +@@ -91,7 +91,7 @@ class WPTExpectationsUpdaterTest(LoggingTestCase): + }, + })) + +- for path in PRODUCTS_TO_EXPECTATION_FILE_PATHS.values(): ++ for path in list(PRODUCTS_TO_EXPECTATION_FILE_PATHS.values()): + host.filesystem.write_text_file(path, '') + return host + +@@ -1237,7 +1237,7 @@ class WPTExpectationsUpdaterTest(LoggingTestCase): + host.filesystem.files[MOCK_WEB_TESTS + 'new/b.html'] = '' + # TODO(rmhasan): Remove creation of Android files within + # tests. +- for path in PRODUCTS_TO_EXPECTATION_FILE_PATHS.values(): ++ for path in list(PRODUCTS_TO_EXPECTATION_FILE_PATHS.values()): + host.filesystem.write_text_file(path, '') + + updater = WPTExpectationsUpdater(host) +@@ -1282,7 +1282,7 @@ class WPTExpectationsUpdaterTest(LoggingTestCase): + + # TODO(rmhasan): Remove creation of Android files within + # tests. +- for path in PRODUCTS_TO_EXPECTATION_FILE_PATHS.values(): ++ for path in list(PRODUCTS_TO_EXPECTATION_FILE_PATHS.values()): + host.filesystem.write_text_file(path, '') + + updater = WPTExpectationsUpdater( +diff --git a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/w3c/wpt_github.py b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/w3c/wpt_github.py +index 7e5674b51..082b25a22 100644 +--- a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/w3c/wpt_github.py ++++ b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/w3c/wpt_github.py +@@ -7,7 +7,7 @@ import datetime + import json + import logging + import re +-import urllib2 ++import urllib.request, urllib.error, urllib.parse + from collections import namedtuple + + from blinkpy.common.memoized import memoized +@@ -128,7 +128,7 @@ class WPTGitHub(object): + } + try: + response = self.request(path, method='POST', body=body) +- except urllib2.HTTPError as e: ++ except urllib.error.HTTPError as e: + _log.error(e.reason) + if e.code == 422: + _log.error('Please check if branch already exists; If so, ' +@@ -185,7 +185,7 @@ class WPTGitHub(object): + WPT_GH_ORG, + WPT_GH_REPO_NAME, + number, +- urllib2.quote(label), ++ urllib.parse.quote(label), + ) + response = self.request(path, method='DELETE') + +@@ -373,7 +373,7 @@ class WPTGitHub(object): + else: + raise GitHubError(204, response.status_code, + 'check if PR %d is merged' % pr_number) +- except urllib2.HTTPError as e: ++ except urllib.error.HTTPError as e: + if e.code == 404: + return False + else: +@@ -395,7 +395,7 @@ class WPTGitHub(object): + + try: + response = self.request(path, method='PUT', body=body) +- except urllib2.HTTPError as e: ++ except urllib.error.HTTPError as e: + if e.code == 405: + raise MergeError(pr_number) + else: +diff --git a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/w3c/wpt_manifest.py b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/w3c/wpt_manifest.py +index 28a1a00cf..88ea3cc02 100644 +--- a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/w3c/wpt_manifest.py ++++ b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/w3c/wpt_manifest.py +@@ -153,7 +153,7 @@ class WPTManifest(object): + for test_type in self.test_types: + if test_type not in items: + continue +- for filename, records in items[test_type].iteritems(): ++ for filename, records in items[test_type].items(): + for item in filter(self._is_not_jsshell, records): + url_for_item = self._get_url_from_item(item) + url_items[url_for_item] = item +@@ -163,7 +163,7 @@ class WPTManifest(object): + @memoized + def all_urls(self): + """Returns a set of the URLs for all items in the manifest.""" +- return frozenset(self.all_url_items().keys()) ++ return frozenset(list(self.all_url_items().keys())) + + def is_test_file(self, path_in_wpt): + """Checks if path_in_wpt is a test file according to the manifest.""" +@@ -329,7 +329,7 @@ class WPTManifest(object): + """ + assert isinstance(node, dict) + +- for k, v in node.items(): ++ for k, v in list(node.items()): + # WPT urls are always joined by '/', even on Windows. + new_path = k if not path else path + '/' + k + +@@ -360,7 +360,7 @@ class WPTManifest(object): + _handle_node(test_type_items, v, new_path) + + new_items = {} +- for test_type, value in items.items(): ++ for test_type, value in list(items.items()): + test_type_items = {} + _handle_node(test_type_items, value, '') + new_items[test_type] = test_type_items +diff --git a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/w3c/wpt_manifest_unittest.py b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/w3c/wpt_manifest_unittest.py +index b9d94f52b..bde5a7e16 100644 +--- a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/w3c/wpt_manifest_unittest.py ++++ b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/w3c/wpt_manifest_unittest.py +@@ -92,7 +92,7 @@ class WPTManifestUnitTest(unittest.TestCase): + manifest = WPTManifest(manifest_json) + self.assertTrue(manifest.is_test_file('test.any.js')) + self.assertEqual(manifest.all_url_items(), +- {u'test.any.html': [u'test.any.html', {}]}) ++ {'test.any.html': ['test.any.html', {}]}) + self.assertEqual(manifest.extract_reference_list('/foo/bar.html'), []) + + def test_all_url_items_skips_jsshell_tests(self): +@@ -113,7 +113,7 @@ class WPTManifestUnitTest(unittest.TestCase): + ''' + manifest = WPTManifest(manifest_json) + self.assertEqual(manifest.all_url_items(), +- {u'test.any.html': [u'test.any.html', {}]}) ++ {'test.any.html': ['test.any.html', {}]}) + + def test_file_for_test(self): + # Test that we can lookup a test's filename for various cases like +@@ -135,8 +135,8 @@ class WPTManifestUnitTest(unittest.TestCase): + manifest = WPTManifest(manifest_json) + self.assertEqual( + manifest.all_url_items(), { +- u'test.any.html': [u'test.any.html', {}], +- u'test.any.worker.html': [u'test.any.worker.html', {}] ++ 'test.any.html': ['test.any.html', {}], ++ 'test.any.worker.html': ['test.any.worker.html', {}] + }) + # Ensure that we can get back to `test.any.js` from both of the tests. + self.assertEqual( +@@ -171,10 +171,10 @@ class WPTManifestUnitTest(unittest.TestCase): + manifest = WPTManifest(manifest_json) + self.assertEqual( + manifest.all_url_items(), { +- u'test.html': [u'test.html', {}], +- u'test-crash.html': [u'test-crash.html', {}] ++ 'test.html': ['test.html', {}], ++ 'test-crash.html': ['test-crash.html', {}] + }) + +- self.assertTrue(manifest.is_crash_test(u'test-crash.html')) +- self.assertFalse(manifest.is_crash_test(u'test.html')) +- self.assertFalse(manifest.is_crash_test(u'different-test-crash.html')) ++ self.assertTrue(manifest.is_crash_test('test-crash.html')) ++ self.assertFalse(manifest.is_crash_test('test.html')) ++ self.assertFalse(manifest.is_crash_test('different-test-crash.html')) +diff --git a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/w3c/wpt_metadata_builder.py b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/w3c/wpt_metadata_builder.py +index cbfd6ceef..649c5e111 100644 +--- a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/w3c/wpt_metadata_builder.py ++++ b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/w3c/wpt_metadata_builder.py +@@ -124,7 +124,7 @@ class WPTMetadataBuilder(object): + + tests_for_metadata = self.get_tests_needing_metadata() + _log.info("Found %d tests requiring metadata", len(tests_for_metadata)) +- for test_name, test_status_bitmap in tests_for_metadata.items(): ++ for test_name, test_status_bitmap in list(tests_for_metadata.items()): + filename, file_contents = self.get_metadata_filename_and_contents( + test_name, test_status_bitmap) + if not filename or not file_contents: +diff --git a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/w3c/wpt_metadata_builder_unittest.py b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/w3c/wpt_metadata_builder_unittest.py +index ef67c37c5..12e757037 100644 +--- a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/w3c/wpt_metadata_builder_unittest.py ++++ b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/w3c/wpt_metadata_builder_unittest.py +@@ -131,7 +131,7 @@ class WPTMetadataBuilderTest(unittest.TestCase): + test_names = metadata_builder.get_tests_needing_metadata() + # The test will appear in the result but won't have a SKIP status + found = False +- for name_item, status_item in test_names.items(): ++ for name_item, status_item in list(test_names.items()): + if name_item == test_name: + found = True + self.assertNotEqual(SKIP_TEST, status_item) +diff --git a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/web_tests/bisect_test_ordering.py b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/web_tests/bisect_test_ordering.py +index c60f62481..eb44c75ab 100644 +--- a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/web_tests/bisect_test_ordering.py ++++ b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/web_tests/bisect_test_ordering.py +@@ -57,7 +57,7 @@ class Bisector(object): + def bisect(self): + if self.test_fails_in_isolation(): + self.buckets = [Bucket([self.expected_failure])] +- print '%s fails when run in isolation.' % self.expected_failure ++ print('%s fails when run in isolation.' % self.expected_failure) + self.print_result() + return 0 + if not self.test_fails(self.tests): +@@ -81,26 +81,26 @@ class Bisector(object): + return self.test_bucket_list_fails([Bucket([self.expected_failure])]) + + def verify_non_flaky(self): +- print 'Verifying the failure is not flaky by running 10 times.' ++ print('Verifying the failure is not flaky by running 10 times.') + count_failures = 0 + for _ in range(0, 10): + if self.test_bucket_list_fails(self.buckets): + count_failures += 1 +- print 'Failed %d/10 times' % count_failures ++ print('Failed %d/10 times' % count_failures) + + def print_progress(self): + count = 0 + for bucket in self.buckets: + count += len(bucket.tests) +- print '%d tests left, %d buckets' % (count, len(self.buckets)) ++ print('%d tests left, %d buckets' % (count, len(self.buckets))) + + def print_result(self): + tests = [] + for bucket in self.buckets: + tests += bucket.tests + extra_args = ' --debug' if self.is_debug else '' +- print 'run_web_tests.py%s --jobs=1 --order=none %s' % (extra_args, +- ' '.join(tests)) ++ print('run_web_tests.py%s --jobs=1 --order=none %s' % (extra_args, ++ ' '.join(tests))) + + def is_done(self): + for bucket in self.buckets: +diff --git a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/web_tests/breakpad/dump_reader_multipart.py b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/web_tests/breakpad/dump_reader_multipart.py +index 79f415a76..808a1a325 100644 +--- a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/web_tests/breakpad/dump_reader_multipart.py ++++ b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/web_tests/breakpad/dump_reader_multipart.py +@@ -29,7 +29,7 @@ + import cgi + import logging + import threading +-import Queue ++import queue + + from blinkpy.common.path_finder import PathFinder + from blinkpy.web_tests.breakpad.dump_reader import DumpReader +@@ -139,7 +139,7 @@ class DumpReaderMultipart(DumpReader): + self._generated_symbols = True + + _log.debug('Generating breakpad symbols') +- queue = Queue.Queue() ++ queue = queue.Queue() + thread = threading.Thread(target=_symbolize_keepalive, args=(queue, )) + thread.start() + try: +@@ -175,7 +175,7 @@ def _symbolize_keepalive(queue): + try: + queue.get(block=True, timeout=60) + return +- except Queue.Empty: ++ except queue.Empty: + pass + + +diff --git a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/web_tests/builder_list.py b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/web_tests/builder_list.py +index 76b11a3bc..12af600d4 100644 +--- a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/web_tests/builder_list.py ++++ b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/web_tests/builder_list.py +@@ -100,7 +100,7 @@ class BuilderList(object): + return sorted(builders) + + def all_port_names(self): +- return sorted({b['port_name'] for b in self._builders.values()}) ++ return sorted({b['port_name'] for b in list(self._builders.values())}) + + def bucket_for_builder(self, builder_name): + return self._builders[builder_name].get('bucket', '') +@@ -131,7 +131,7 @@ class BuilderList(object): + to non-debug builders. If no builder is found, None is returned. + """ + debug_builder_name = None +- for builder_name, builder_info in self._builders.iteritems(): ++ for builder_name, builder_info in self._builders.items(): + if builder_info.get('is_try_builder'): + continue + if builder_info['port_name'] == target_port_name: +@@ -148,7 +148,7 @@ class BuilderList(object): + the version specifier for the first builder that matches, even + if it's a try bot builder. + """ +- for _, builder_info in sorted(self._builders.iteritems()): ++ for _, builder_info in sorted(self._builders.items()): + if builder_info['port_name'] == target_port_name: + return builder_info['specifiers'][0] + return None +diff --git a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/web_tests/controllers/manager.py b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/web_tests/controllers/manager.py +index 603fb672c..6c1026649 100644 +--- a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/web_tests/controllers/manager.py ++++ b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/web_tests/controllers/manager.py +@@ -269,7 +269,7 @@ class Manager(object): + tests_to_retry = self._tests_to_retry(initial_results) + all_retry_results = [] + if should_retry_failures and tests_to_retry: +- for retry_attempt in xrange(1, self._options.num_retries + 1): ++ for retry_attempt in range(1, self._options.num_retries + 1): + if not tests_to_retry: + break + +@@ -450,9 +450,9 @@ class Manager(object): + retry_attempt=0): + + test_inputs = [] +- for _ in xrange(iterations): ++ for _ in range(iterations): + for test in tests_to_run: +- for _ in xrange(repeat_each): ++ for _ in range(repeat_each): + test_inputs.append( + self._test_input_for_file(test, retry_attempt)) + return self._runner.run_tests(self._expectations, test_inputs, +@@ -519,7 +519,7 @@ class Manager(object): + test_failures.AbstractTestResultType.result_directory = self._results_directory + test_failures.AbstractTestResultType.filesystem = self._filesystem + +- for test, result in run_results.unexpected_results_by_name.iteritems(): ++ for test, result in run_results.unexpected_results_by_name.items(): + if result.type != ResultType.Crash: + continue + for failure in result.failures: +@@ -532,7 +532,7 @@ class Manager(object): + + sample_files = self._port.look_for_new_samples(crashed_processes, + start_time) or {} +- for test, sample_file in sample_files.iteritems(): ++ for test, sample_file in sample_files.items(): + test_failures.AbstractTestResultType.test_name = test + test_result = run_results.unexpected_results_by_name[test] + artifact_relative_path = self._port.output_filename( +@@ -551,7 +551,7 @@ class Manager(object): + + new_crash_logs = self._port.look_for_new_crash_logs( + crashed_processes, start_time) or {} +- for test, (crash_log, crash_site) in new_crash_logs.iteritems(): ++ for test, (crash_log, crash_site) in new_crash_logs.items(): + test_failures.AbstractTestResultType.test_name = test + failure.crash_log = crash_log + failure.has_log = self._port.output_contains_sanitizer_messages( +@@ -586,7 +586,7 @@ class Manager(object): + # only consider the last retry attempt for the count of unexpected regressions. + return [ + result.test_name +- for result in run_results.unexpected_results_by_name.values() ++ for result in list(run_results.unexpected_results_by_name.values()) + if result.type != ResultType.Pass + ] + +@@ -597,7 +597,7 @@ class Manager(object): + + # FIXME: Upload stats.json to the server and delete times_ms. + times_trie = json_results_generator.test_timings_trie( +- initial_results.results_by_name.values()) ++ list(initial_results.results_by_name.values())) + times_json_path = self._filesystem.join(self._artifacts_directory, + 'times_ms.json') + json_results_generator.write_json(self._filesystem, times_trie, +@@ -710,7 +710,7 @@ class Manager(object): + return int(worker_name.split('/')[1]) if worker_name else -1 + + stats = {} +- for result in initial_results.results_by_name.values(): ++ for result in list(initial_results.results_by_name.values()): + if result.type != ResultType.Skip: + stats[result.test_name] = { + 'results': (_worker_number(result.worker_name), +@@ -719,6 +719,6 @@ class Manager(object): + int(result.total_run_time * 1000)) + } + stats_trie = {} +- for name, value in stats.iteritems(): ++ for name, value in stats.items(): + json_results_generator.add_path_to_trie(name, value, stats_trie) + return stats_trie +diff --git a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/web_tests/controllers/repaint_overlay_unittest.py b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/web_tests/controllers/repaint_overlay_unittest.py +index bafa13995..5d16fa0df 100644 +--- a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/web_tests/controllers/repaint_overlay_unittest.py ++++ b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/web_tests/controllers/repaint_overlay_unittest.py +@@ -35,7 +35,7 @@ class TestRepaintOverlay(unittest.TestCase): + self.assertFalse(repaint_overlay.result_contains_repaint_rects('ABCD')) + + def test_extract_layer_tree(self): +- self.assertEquals(LAYER_TREE, ++ self.assertEqual(LAYER_TREE, + repaint_overlay.extract_layer_tree(LAYER_TREE)) + + def test_generate_repaint_overlay_html(self): +@@ -67,7 +67,7 @@ class TestRepaintOverlay(unittest.TestCase): + 'paint/invalidation/repaint-overlay/layers-overlay.html') + expected = host.filesystem.read_text_file(overlay_html_file) + +- self.assertEquals( ++ self.assertEqual( + expected, overlay_html, + 'This failure is probably caused by changed repaint_overlay.py. ' + 'Please examine the diffs:\n diff %s %s\n' +diff --git a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/web_tests/controllers/test_result_sink.py b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/web_tests/controllers/test_result_sink.py +index 0b00a7767..c903438e0 100644 +--- a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/web_tests/controllers/test_result_sink.py ++++ b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/web_tests/controllers/test_result_sink.py +@@ -15,7 +15,7 @@ section. + + import json + import logging +-import urllib2 ++import urllib.request, urllib.error, urllib.parse + + from blinkpy.web_tests.models.typ_types import ResultType + +@@ -76,7 +76,7 @@ class TestResultSink(object): + self._sink_ctx['address']) + + def _send(self, data): +- req = urllib2.Request( ++ req = urllib.request.Request( + url=self._sink_url, + data=json.dumps(data), + headers={ +@@ -86,7 +86,7 @@ class TestResultSink(object): + 'ResultSink %s' % self._sink_ctx['auth_token'], + }, + ) +- return urllib2.urlopen(req) ++ return urllib.request.urlopen(req) + + def _status(self, result): + """Returns the TestStatus enum value corresponding to the result type. +@@ -130,7 +130,7 @@ class TestResultSink(object): + """ + ret = {} + base_dir = self._port.results_directory() +- for name, paths in result.artifacts.artifacts.iteritems(): ++ for name, paths in result.artifacts.artifacts.items(): + for p in paths: + art_id = name + i = 1 +diff --git a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/web_tests/controllers/test_result_sink_unittest.py b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/web_tests/controllers/test_result_sink_unittest.py +index 74e5a8d87..f08fac095 100644 +--- a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/web_tests/controllers/test_result_sink_unittest.py ++++ b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/web_tests/controllers/test_result_sink_unittest.py +@@ -7,7 +7,7 @@ import json + import mock + import sys + import unittest +-from urlparse import urlparse ++from urllib.parse import urlparse + + from blinkpy.common.host_mock import MockHost + from blinkpy.web_tests.controllers.test_result_sink import CreateTestResultSink +diff --git a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/web_tests/controllers/web_test_finder.py b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/web_tests/controllers/web_test_finder.py +index 11ad6e51e..1593d6cc6 100644 +--- a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/web_tests/controllers/web_test_finder.py ++++ b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/web_tests/controllers/web_test_finder.py +@@ -113,7 +113,7 @@ class WebTestFinder(object): + + # Ignore tests with a time==0 because those are skipped tests. + sorted_times = sorted( +- [test for (test, time) in times.iteritems() if time], ++ [test for (test, time) in times.items() if time], + key=lambda t: (times[t], t)) + clamped_percentile = max(0, min(100, fastest_percentile)) + number_of_tests_to_return = int( +diff --git a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/web_tests/controllers/web_test_finder_unittest.py b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/web_tests/controllers/web_test_finder_unittest.py +index cbf48a7d6..00fcc612a 100644 +--- a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/web_tests/controllers/web_test_finder_unittest.py ++++ b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/web_tests/controllers/web_test_finder_unittest.py +@@ -144,9 +144,9 @@ class WebTestFinderTests(unittest.TestCase): + self.assertEqual(tests, set([idlharness_test_1, idlharness_test_2])) + self.assertTrue( + expectations.get_expectations(non_idlharness_test).is_default_pass) +- self.assertEquals( ++ self.assertEqual( + expectations.get_expectations(idlharness_test_1).results, {'SKIP'}) +- self.assertEquals( ++ self.assertEqual( + expectations.get_expectations(idlharness_test_2).results, {'SKIP'}) + + # Disable expectations entirely; we should still skip the idlharness +@@ -165,9 +165,9 @@ class WebTestFinderTests(unittest.TestCase): + # TestExpectations work. + self.assertTrue( + expectations.get_expectations(non_idlharness_test).is_default_pass) +- self.assertEquals( ++ self.assertEqual( + expectations.get_expectations(idlharness_test_1).results, {'SKIP'}) +- self.assertEquals( ++ self.assertEqual( + expectations.get_expectations(idlharness_test_2).results, {'SKIP'}) + + def test_find_fastest_tests(self): +diff --git a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/web_tests/controllers/web_test_runner.py b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/web_tests/controllers/web_test_runner.py +index b76bc91f7..a7dc54894 100644 +--- a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/web_tests/controllers/web_test_runner.py ++++ b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/web_tests/controllers/web_test_runner.py +@@ -177,7 +177,7 @@ class WebTestRunner(object): + if args not in tests_by_args: + tests_by_args[args] = [] + tests_by_args[args].append(test_input) +- shard.test_inputs = list(itertools.chain(*tests_by_args.values())) ++ shard.test_inputs = list(itertools.chain(*list(tests_by_args.values()))) + + def _worker_factory(self, worker_connection): + return Worker(worker_connection, self._results_directory, +@@ -511,7 +511,7 @@ class Sharder(object): + tests_by_dir.setdefault(directory, []) + tests_by_dir[directory].append(test_input) + +- for directory, test_inputs in tests_by_dir.iteritems(): ++ for directory, test_inputs in tests_by_dir.items(): + shard = TestShard(directory, test_inputs) + if test_inputs[0].requires_lock: + locked_shards.append(shard) +diff --git a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/web_tests/layout_package/bot_test_expectations.py b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/web_tests/layout_package/bot_test_expectations.py +index 1bd7b3288..c89c699ae 100644 +--- a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/web_tests/layout_package/bot_test_expectations.py ++++ b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/web_tests/layout_package/bot_test_expectations.py +@@ -32,8 +32,8 @@ corresponding to the give port. + import json + import logging + import os.path +-import urllib +-import urllib2 ++import urllib.request, urllib.parse, urllib.error ++import urllib.request, urllib.error, urllib.parse + + from blinkpy.web_tests.models.typ_types import Expectation, ResultType + +@@ -112,7 +112,7 @@ class ResultsJSON(object): + self._json = json_dict + + def _walk_trie(self, trie, parent_path): +- for name, value in trie.items(): ++ for name, value in list(trie.items()): + full_path = os.path.join(parent_path, name) + + # FIXME: If we ever have a test directory self.RESULTS_KEY +@@ -162,9 +162,9 @@ class BotTestExpectationsFactory(object): + def _results_url_for_builder(self, builder, use_try_step=False): + test_type = (self.STEP_NAME_TRY if use_try_step else self.STEP_NAME) + return self.RESULTS_URL_FORMAT % ( +- urllib.quote(test_type), +- urllib.quote(self.builders.master_for_builder(builder)), +- urllib.quote(builder)) ++ urllib.parse.quote(test_type), ++ urllib.parse.quote(self.builders.master_for_builder(builder)), ++ urllib.parse.quote(builder)) + + def _results_json_for_builder(self, builder): + results_url = self._results_url_for_builder( +@@ -173,8 +173,8 @@ class BotTestExpectationsFactory(object): + _log.debug('Fetching flakiness data from appengine: %s', + results_url) + return ResultsJSON(builder, json.load( +- urllib2.urlopen(results_url))) +- except urllib2.URLError as error: ++ urllib.request.urlopen(results_url))) ++ except urllib.error.URLError as error: + _log.warning( + 'Could not retrieve flakiness data from the bot. url: %s', + results_url) +@@ -186,8 +186,8 @@ class BotTestExpectationsFactory(object): + _log.debug('Fetching flakiness data from appengine: %s', + results_url) + return ResultsFilter(builder, json.load( +- urllib2.urlopen(results_url))) +- except urllib2.URLError as error: ++ urllib.request.urlopen(results_url))) ++ except urllib.error.URLError as error: + _log.warning( + 'Could not retrieve flakiness data from the bot. url: %s', + results_url) +@@ -256,7 +256,7 @@ class BotTestExpectations(object): + result_types = self._all_types_in_results(results_dict) + + # Distinct results as non-encoded strings. +- results = map(self.results_json.expectation_for_type, result_types) ++ results = list(map(self.results_json.expectation_for_type, result_types)) + + # Get test expectations + expectations = exp_string.split(' ') +@@ -300,8 +300,8 @@ class BotTestExpectations(object): + continue + + # Distinct results as non-encoded strings. +- result_strings = map(self.results_json.expectation_for_type, +- result_types) ++ result_strings = list(map(self.results_json.expectation_for_type, ++ result_types)) + + results_by_path[test_path] = sorted(result_strings) + return results_by_path +diff --git a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/web_tests/layout_package/json_results_generator.py b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/web_tests/layout_package/json_results_generator.py +index ff5f59476..5299198ec 100644 +--- a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/web_tests/layout_package/json_results_generator.py ++++ b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/web_tests/layout_package/json_results_generator.py +@@ -65,7 +65,7 @@ def write_json(filesystem, json_object, file_path, callback=None): + def convert_times_trie_to_flat_paths(trie, prefix=None): + """Converts the directory structure in the given trie to flat paths, prepending a prefix to each.""" + result = {} +- for name, data in trie.iteritems(): ++ for name, data in trie.items(): + if prefix: + name = prefix + "/" + name + if isinstance(data, int): +@@ -115,7 +115,7 @@ class TestResult(object): + """A simple class that represents a single test result.""" + + # Test modifier constants. +- (NONE, FAILS, FLAKY, DISABLED) = range(4) ++ (NONE, FAILS, FLAKY, DISABLED) = list(range(4)) + + def __init__(self, test, failed=False, elapsed_time=0): + self.test_name = test +diff --git a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/web_tests/lint_test_expectations.py b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/web_tests/lint_test_expectations.py +index f309d4d6c..0e8660c4e 100644 +--- a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/web_tests/lint_test_expectations.py ++++ b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/web_tests/lint_test_expectations.py +@@ -41,6 +41,7 @@ from blinkpy.web_tests.port.android import ( + PRODUCTS_TO_EXPECTATION_FILE_PATHS, ANDROID_DISABLED_TESTS, + ANDROID_WEBLAYER) + from blinkpy.web_tests.port.factory import platform_options ++from functools import reduce + + _log = logging.getLogger(__name__) + +@@ -51,7 +52,7 @@ def lint(host, options): + + # Add all extra expectation files to be linted. + options.additional_expectations.extend( +- PRODUCTS_TO_EXPECTATION_FILE_PATHS.values() + [ANDROID_DISABLED_TESTS] + [ ++ list(PRODUCTS_TO_EXPECTATION_FILE_PATHS.values()) + [ANDROID_DISABLED_TESTS] + [ + host.filesystem.join(port.web_tests_dir(), 'WPTOverrideExpectations'), + host.filesystem.join(port.web_tests_dir(), 'WebGPUExpectations'), + ]) +@@ -82,16 +83,16 @@ def lint(host, options): + if config_macro_dict: + all_system_specifiers.update( + {s.lower() +- for s in config_macro_dict.keys()}) ++ for s in list(config_macro_dict.keys())}) + all_system_specifiers.update({ + s.lower() +- for s in reduce(lambda x, y: x + y, config_macro_dict.values()) ++ for s in reduce(lambda x, y: x + y, list(config_macro_dict.values())) + }) + for path in port.extra_expectations_files(): + if host.filesystem.exists(path): + expectations_dict[path] = host.filesystem.read_text_file(path) + +- for path, content in expectations_dict.items(): ++ for path, content in list(expectations_dict.items()): + # Check the expectations file content + failures.extend(_check_expectations_file_content(content)) + +@@ -153,7 +154,7 @@ def _check_expectations_file_content(content): + def _check_test_existence(host, port, path, expectations, wpt_tests): + failures = [] + warnings = [] +- is_android_path = path in PRODUCTS_TO_EXPECTATION_FILE_PATHS.values() ++ is_android_path = path in list(PRODUCTS_TO_EXPECTATION_FILE_PATHS.values()) + for exp in expectations: + if not exp.test: + continue +@@ -308,7 +309,7 @@ def _check_expectations(host, port, path, test_expectations, options, wpt_tests) + host, port, path, expectations, wpt_tests) + failures.extend(_check_directory_glob(host, port, path, expectations)) + failures.extend(_check_never_fix_tests(host, port, path, expectations)) +- if path in PRODUCTS_TO_EXPECTATION_FILE_PATHS.values(): ++ if path in list(PRODUCTS_TO_EXPECTATION_FILE_PATHS.values()): + failures.extend(_check_non_wpt_in_android_override( + host, port, path, expectations)) + # TODO(crbug.com/1080691): Change this to failures once +@@ -490,7 +491,7 @@ def main(argv, stderr, host=None): + except KeyboardInterrupt: + exit_status = exit_codes.INTERRUPTED_EXIT_STATUS + except Exception as error: # pylint: disable=broad-except +- print >> stderr, '\n%s raised: %s' % (error.__class__.__name__, error) ++ print('\n%s raised: %s' % (error.__class__.__name__, error), file=stderr) + traceback.print_exc(file=stderr) + exit_status = exit_codes.EXCEPTIONAL_EXIT_STATUS + +diff --git a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/web_tests/lint_test_expectations_unittest.py b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/web_tests/lint_test_expectations_unittest.py +index bdfb015bc..84c0cbb13 100644 +--- a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/web_tests/lint_test_expectations_unittest.py ++++ b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/web_tests/lint_test_expectations_unittest.py +@@ -26,7 +26,7 @@ + # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +-import StringIO ++import io + import optparse + import unittest + +@@ -274,7 +274,7 @@ class LintTest(LoggingTestCase): + self.assertTrue(failures) + self.assertEqual(warnings, []) + +- self.assertEquals(len(failures), 6) ++ self.assertEqual(len(failures), 6) + expected_non_existence = [ + 'test1/*', + 'test2/bar.html', +@@ -298,7 +298,7 @@ class LintTest(LoggingTestCase): + raw_expectations = ('# results: [ Failure ]\n' + 'external/wpt/test.html [ Failure ]\n' + 'non-wpt/test.html [ Failure ]\n') +- for path in PRODUCTS_TO_EXPECTATION_FILE_PATHS.values(): ++ for path in list(PRODUCTS_TO_EXPECTATION_FILE_PATHS.values()): + host.filesystem.write_text_file(path, raw_expectations) + host.port_factory.get = lambda platform, options=None: port + host.port_factory.all_port_names = lambda platform=None: [port.name()] +@@ -370,8 +370,8 @@ class LintTest(LoggingTestCase): + failures, warnings = lint_test_expectations.lint(host, options) + self.assertEqual(failures, []) + +- self.assertEquals(len(warnings), 1) +- self.assertRegexpMatches(warnings[0], ':5 .*redundant with.* line 4$') ++ self.assertEqual(len(warnings), 1) ++ self.assertRegex(warnings[0], ':5 .*redundant with.* line 4$') + + def test_never_fix_tests(self): + options = optparse.Values({ +@@ -405,11 +405,11 @@ class LintTest(LoggingTestCase): + failures, warnings = lint_test_expectations.lint(host, options) + self.assertEqual(warnings, []) + +- self.assertEquals(len(failures), 4) +- self.assertRegexpMatches(failures[0], ':7 .*must override') +- self.assertRegexpMatches(failures[1], ':8 .*must override') +- self.assertRegexpMatches(failures[2], ':9 Only one of') +- self.assertRegexpMatches(failures[3], ':11 .*must override') ++ self.assertEqual(len(failures), 4) ++ self.assertRegex(failures[0], ':7 .*must override') ++ self.assertRegex(failures[1], ':8 .*must override') ++ self.assertRegex(failures[2], ':9 Only one of') ++ self.assertRegex(failures[3], ':11 .*must override') + + + class CheckVirtualSuiteTest(unittest.TestCase): +@@ -481,7 +481,7 @@ class MainTest(unittest.TestCase): + self.orig_lint_fn = lint_test_expectations.lint + self.orig_check_fn = lint_test_expectations.check_virtual_test_suites + lint_test_expectations.check_virtual_test_suites = lambda host, options: [] +- self.stderr = StringIO.StringIO() ++ self.stderr = io.StringIO() + + def tearDown(self): + lint_test_expectations.lint = self.orig_lint_fn +diff --git a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/web_tests/merge_results.py b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/web_tests/merge_results.py +index 3b220f19f..fc908840a 100644 +--- a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/web_tests/merge_results.py ++++ b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/web_tests/merge_results.py +@@ -161,8 +161,8 @@ class JSONMerger(Merger): + Merger.__init__(self) + + self.add_helper( +- TypeMatch(types.ListType, types.TupleType), self.merge_listlike) +- self.add_helper(TypeMatch(types.DictType), self.merge_dictlike) ++ TypeMatch(list, tuple), self.merge_listlike) ++ self.add_helper(TypeMatch(dict), self.merge_dictlike) + + def fallback_matcher(self, objs, name=None): + raise MergeFailure("No merge helper found!", name, objs) +@@ -210,7 +210,7 @@ class JSONMerger(Merger): + dict_mid.setdefault(key, []).append(dobj[key]) + + dict_out = dicts[0].__class__({}) +- for k, v in dict_mid.iteritems(): ++ for k, v in dict_mid.items(): + assert v + if len(v) == 1: + dict_out[k] = v[0] +@@ -492,7 +492,7 @@ class DirMerger(Merger): + + # Go through each file and try to merge it. + # partial_file_path is the file relative to the directories. +- for partial_file_path, in_dirs in sorted(files.iteritems()): ++ for partial_file_path, in_dirs in sorted(files.items()): + out_path = self.filesystem.join(output_dir, partial_file_path) + if self.filesystem.exists(out_path): + raise MergeFailure('File %s already exist in output.', +diff --git a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/web_tests/merge_results_unittest.py b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/web_tests/merge_results_unittest.py +index b47360db8..70d9d5cee 100644 +--- a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/web_tests/merge_results_unittest.py ++++ b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/web_tests/merge_results_unittest.py +@@ -9,7 +9,7 @@ + import types + import unittest + +-import cStringIO as StringIO ++import io as StringIO + + from collections import OrderedDict + +@@ -19,13 +19,13 @@ from blinkpy.web_tests import merge_results + + class JSONMergerTests(unittest.TestCase): + def test_type_match(self): +- self.assertTrue(merge_results.TypeMatch(types.DictType)(dict())) ++ self.assertTrue(merge_results.TypeMatch(dict)(dict())) + self.assertFalse( +- merge_results.TypeMatch(types.ListType, types.TupleType)(dict())) ++ merge_results.TypeMatch(list, tuple)(dict())) + self.assertTrue( +- merge_results.TypeMatch(types.ListType, types.TupleType)(list())) ++ merge_results.TypeMatch(list, tuple)(list())) + self.assertTrue( +- merge_results.TypeMatch(types.ListType, types.TupleType)(tuple())) ++ merge_results.TypeMatch(list, tuple)(tuple())) + + def test_merge_listlike(self): + m = merge_results.JSONMerger() +@@ -45,10 +45,10 @@ class JSONMergerTests(unittest.TestCase): + self.assertListEqual(expected, m.merge([inputa, inputb])) + self.assertSequenceEqual( + expected, m.merge_listlike([tuple(inputa), +- tuple(inputb)]), types.TupleType) ++ tuple(inputb)]), tuple) + self.assertSequenceEqual(expected, + m.merge([tuple(inputa), +- tuple(inputb)]), types.TupleType) ++ tuple(inputb)]), tuple) + + def test_merge_simple_dict(self): + m = merge_results.JSONMerger() +@@ -445,11 +445,11 @@ class JSONMergerTests(unittest.TestCase): + b_before_a['a'] = 1 + + r1 = m.merge([a, b]) +- self.assertSequenceEqual(a_before_b.items(), r1.items()) ++ self.assertSequenceEqual(list(a_before_b.items()), list(r1.items())) + self.assertIsInstance(r1, OrderedDict) + + r2 = m.merge([b, a]) +- self.assertSequenceEqual(b_before_a.items(), r2.items()) ++ self.assertSequenceEqual(list(b_before_a.items()), list(r2.items())) + self.assertIsInstance(r2, OrderedDict) + + def test_custom_match_on_name(self): +@@ -1477,7 +1477,7 @@ ADD_RESULTS({ + fs, results_json_value_overrides={'layout_tests_dir': 'src'}) + merger.merge('/out', ['/shards/0', '/shards/1']) + +- for fname, contents in self.web_test_output_filesystem.items(): ++ for fname, contents in list(self.web_test_output_filesystem.items()): + self.assertIn(fname, fs.files) + self.assertMultiLineEqual(contents, fs.files[fname]) + +diff --git a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/web_tests/models/test_configuration.py b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/web_tests/models/test_configuration.py +index 9fbc21c5b..29c8a0e97 100644 +--- a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/web_tests/models/test_configuration.py ++++ b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/web_tests/models/test_configuration.py +@@ -43,10 +43,10 @@ class TestConfiguration(object): + return ['version', 'architecture', 'build_type'] + + def items(self): +- return self.__dict__.items() ++ return list(self.__dict__.items()) + + def keys(self): +- return self.__dict__.keys() ++ return list(self.__dict__.keys()) + + def __str__(self): + return ( +@@ -63,7 +63,7 @@ class TestConfiguration(object): + + def values(self): + """Returns the configuration values of this instance as a tuple.""" +- return self.__dict__.values() ++ return list(self.__dict__.values()) + + + class SpecifierSorter(object): +@@ -73,7 +73,7 @@ class SpecifierSorter(object): + if not all_test_configurations: + return + for test_configuration in all_test_configurations: +- for category, specifier in test_configuration.items(): ++ for category, specifier in list(test_configuration.items()): + self.add_specifier(category, specifier) + + self.add_macros(macros) +@@ -85,7 +85,7 @@ class SpecifierSorter(object): + if not macros: + return + # Assume well-formed macros. +- for macro, specifier_list in macros.items(): ++ for macro, specifier_list in list(macros.items()): + self.add_specifier( + self.category_for_specifier(specifier_list[0]), macro) + +@@ -100,7 +100,7 @@ class SpecifierSorter(object): + return self._specifier_to_category.get(specifier) + + def sort_specifiers(self, specifiers): +- category_slots = map(lambda x: [], TestConfiguration.category_order()) ++ category_slots = [[] for x in TestConfiguration.category_order()] + for specifier in specifiers: + category_slots[self.specifier_priority(specifier)].append( + specifier) +@@ -123,24 +123,24 @@ class TestConfigurationConverter(object): + self._collapsing_sets_by_category = {} + matching_sets_by_category = {} + for configuration in all_test_configurations: +- for category, specifier in configuration.items(): ++ for category, specifier in list(configuration.items()): + self._specifier_to_configuration_set.setdefault( + specifier, set()).add(configuration) + self._specifier_sorter.add_specifier(category, specifier) + self._collapsing_sets_by_category.setdefault( + category, set()).add(specifier) + # FIXME: This seems extra-awful. +- for cat2, spec2 in configuration.items(): ++ for cat2, spec2 in list(configuration.items()): + if category == cat2: + continue + matching_sets_by_category.setdefault( + specifier, {}).setdefault(cat2, set()).add(spec2) +- for collapsing_set in self._collapsing_sets_by_category.values(): ++ for collapsing_set in list(self._collapsing_sets_by_category.values()): + self._collapsing_sets_by_size.setdefault( + len(collapsing_set), set()).add(frozenset(collapsing_set)) + +- for specifier, sets_by_category in matching_sets_by_category.items(): +- for category, set_by_category in sets_by_category.items(): ++ for specifier, sets_by_category in list(matching_sets_by_category.items()): ++ for category, set_by_category in list(sets_by_category.items()): + if (len(set_by_category) == 1 and + self._specifier_sorter.category_priority(category) > + self._specifier_sorter.specifier_priority(specifier)): +@@ -177,11 +177,11 @@ class TestConfigurationConverter(object): + matching_sets.setdefault(category, + set()).update(configurations) + +- return reduce(set.intersection, matching_sets.values()) ++ return reduce(set.intersection, list(matching_sets.values())) + + @classmethod + def collapse_macros(cls, macros_dict, specifiers_list): +- for macro_specifier, macro in macros_dict.items(): ++ for macro_specifier, macro in list(macros_dict.items()): + if len(macro) == 1: + continue + +@@ -209,7 +209,7 @@ class TestConfigurationConverter(object): + for specifier in specifiers_to_add: + specifiers_list.append(specifier) + +- for macro_specifier, macro in macros_dict.items(): ++ for macro_specifier, macro in list(macros_dict.items()): + collapse_individual_specifier_set(macro_specifier, macro) + + @classmethod +@@ -237,7 +237,7 @@ class TestConfigurationConverter(object): + for config in test_configuration_set: + values = set(config.values()) + for specifier, junk_specifier_set in \ +- self._junk_specifier_combinations.items(): ++ list(self._junk_specifier_combinations.items()): + if specifier in values: + values -= junk_specifier_set + specifiers_list.append(frozenset(values)) +@@ -256,7 +256,7 @@ class TestConfigurationConverter(object): + + # 2) Collapse specifier sets with common specifiers: + # (win7, release), (win7, debug) --> (win7, x86) +- for size, collapsing_sets in self._collapsing_sets_by_size.items(): ++ for size, collapsing_sets in list(self._collapsing_sets_by_size.items()): + while try_collapsing(size, collapsing_sets): + pass + +@@ -276,7 +276,7 @@ class TestConfigurationConverter(object): + + # 3) Abbreviate specifier sets by combining specifiers across categories. + # (win7, release), (win10, release) --> (win7, win10, release) +- while try_abbreviating(self._collapsing_sets_by_size.values()): ++ while try_abbreviating(list(self._collapsing_sets_by_size.values())): + pass + + # 4) Substitute specifier subsets that match macros within each set: +diff --git a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/web_tests/models/test_configuration_unittest.py b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/web_tests/models/test_configuration_unittest.py +index b1f0dab85..1f9b8cb3f 100644 +--- a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/web_tests/models/test_configuration_unittest.py ++++ b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/web_tests/models/test_configuration_unittest.py +@@ -53,7 +53,7 @@ class TestConfigurationTest(unittest.TestCase): + def test_items(self): + config = TestConfiguration('win7', 'x86', 'release') + result_config_dict = {} +- for category, specifier in config.items(): ++ for category, specifier in list(config.items()): + result_config_dict[category] = specifier + self.assertEqual({ + 'version': 'win7', +@@ -64,7 +64,7 @@ class TestConfigurationTest(unittest.TestCase): + def test_keys(self): + config = TestConfiguration('win7', 'x86', 'release') + result_config_keys = [] +- for category in config.keys(): ++ for category in list(config.keys()): + result_config_keys.append(category) + self.assertEqual( + set(['version', 'architecture', 'build_type']), +@@ -107,14 +107,14 @@ class TestConfigurationTest(unittest.TestCase): + self.assertEqual( + TestConfiguration('win7', 'x86', 'release'), + TestConfiguration('win7', 'x86', 'release')) +- self.assertNotEquals( ++ self.assertNotEqual( + TestConfiguration('win7', 'x86', 'release'), + TestConfiguration('win7', 'x86', 'debug')) + + def test_values(self): + config = TestConfiguration('win7', 'x86', 'release') + result_config_values = [] +- for value in config.values(): ++ for value in list(config.values()): + result_config_values.append(value) + self.assertEqual( + set(['win7', 'x86', 'release']), set(result_config_values)) +diff --git a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/web_tests/models/test_expectations.py b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/web_tests/models/test_expectations.py +index 475b86c49..587f02ec5 100644 +--- a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/web_tests/models/test_expectations.py ++++ b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/web_tests/models/test_expectations.py +@@ -36,6 +36,7 @@ from collections import OrderedDict + + from blinkpy.common.memoized import memoized + from blinkpy.web_tests.models import typ_types ++from functools import reduce + + ResultType = typ_types.ResultType + +@@ -106,7 +107,7 @@ class TestExpectations(object): + # map file paths to sets of line numbers + self._expectation_file_linenos = defaultdict(set) + +- for path, content in self._expectations_dict.items(): ++ for path, content in list(self._expectations_dict.items()): + test_expectations = typ_types.TestExpectations( + tags=self._system_condition_tags) + ret, errors = test_expectations.parse_tagged_list( +@@ -184,7 +185,7 @@ class TestExpectations(object): + args: + path: Absolute path of expectations file.""" + content = self._expectations_dict[path] +- idx = self._expectations_dict.keys().index(path) ++ idx = list(self._expectations_dict.keys()).index(path) + typ_expectations = self._expectations[idx] + lines = [] + +@@ -231,7 +232,7 @@ class TestExpectations(object): + lines.append(_NotExpectation('', len(content_lines) + 1)) + + for line in sorted( +- reduce(lambda x,y: x+y, lineno_to_exps.values()), ++ reduce(lambda x,y: x+y, list(lineno_to_exps.values())), + key=lambda e: e.test): + if line.lineno: + raise ValueError( +@@ -281,7 +282,7 @@ class TestExpectations(object): + def _os_to_version(self): + os_to_version = {} + for os, os_versions in \ +- self._port.configuration_specifier_macros().items(): ++ list(self._port.configuration_specifier_macros().items()): + for version in os_versions: + os_to_version[version.lower()] = os.lower() + return os_to_version +@@ -332,7 +333,7 @@ class TestExpectations(object): + trailing_comments=trailing_comments) + + def get_expectations_from_file(self, path, test_name): +- idx = self._expectations_dict.keys().index(path) ++ idx = list(self._expectations_dict.keys()).index(path) + return copy.deepcopy( + self._expectations[idx].individual_exps.get(test_name) or []) + +@@ -386,7 +387,7 @@ class TestExpectations(object): + for test_exp in self._expectations: + tests.extend(test_exp.individual_exps) + tests.extend([ +- dir_name[:-1] for dir_name in test_exp.glob_exps.keys() ++ dir_name[:-1] for dir_name in list(test_exp.glob_exps.keys()) + if self.port.test_isdir(dir_name[:-1]) + ]) + return { +@@ -408,7 +409,7 @@ class TestExpectations(object): + if bot_expectations: + raw_expectations = ( + '# results: [ Failure Pass Crash Skip Timeout ]\n') +- for test, results in bot_expectations.items(): ++ for test, results in list(bot_expectations.items()): + raw_expectations += typ_types.Expectation( + test=test, results=results).to_string() + '\n' + self.merge_raw_expectations(raw_expectations) +@@ -422,7 +423,7 @@ class TestExpectations(object): + path: Absolute path of file where the Expectation instances + came from. + exps: List of Expectation instances to be deleted.""" +- idx = self._expectations_dict.keys().index(path) ++ idx = list(self._expectations_dict.keys()).index(path) + typ_expectations = self._expectations[idx] + + for exp in exps: +@@ -445,7 +446,7 @@ class TestExpectations(object): + exps: List of Expectation instances to be added to the file. + lineno: Line number in expectations file where the expectations will + be added.""" +- idx = self._expectations_dict.keys().index(path) ++ idx = list(self._expectations_dict.keys()).index(path) + typ_expectations = self._expectations[idx] + added_glob = False + +@@ -470,7 +471,7 @@ class TestExpectations(object): + + if added_glob: + glob_exps = reduce(lambda x, y: x + y, +- typ_expectations.glob_exps.values()) ++ list(typ_expectations.glob_exps.values())) + glob_exps.sort(key=lambda e: len(e.test), reverse=True) + typ_expectations.glob_exps = OrderedDict() + for exp in glob_exps: +@@ -487,16 +488,16 @@ class SystemConfigurationRemover(object): + def __init__(self, test_expectations): + self._test_expectations = test_expectations + self._configuration_specifiers_dict = {} +- for os, os_versions in (self._test_expectations.port. +- configuration_specifier_macros().items()): ++ for os, os_versions in (list(self._test_expectations.port. ++ configuration_specifier_macros().items())): + self._configuration_specifiers_dict[os.lower()] = (frozenset( + version.lower() for version in os_versions)) + self._os_specifiers = frozenset( +- os for os in self._configuration_specifiers_dict.keys()) ++ os for os in list(self._configuration_specifiers_dict.keys())) + self._version_specifiers = frozenset( + specifier.lower() for specifier in reduce( +- lambda x, y: x | y, self._configuration_specifiers_dict. +- values())) ++ lambda x, y: x | y, list(self._configuration_specifiers_dict. ++ values()))) + self._deleted_lines = set() + self._generic_exp_file_path = \ + self._test_expectations.port.path_to_generic_test_expectations_file() +@@ -527,7 +528,7 @@ class SystemConfigurationRemover(object): + # expectation for each version that is not in the versions_to_remove list + system_specifiers = set(self._version_specifiers - + versions_to_remove) +- for os, os_versions in self._configuration_specifiers_dict.items(): ++ for os, os_versions in list(self._configuration_specifiers_dict.items()): + # If all the versions of an OS are in the system specifiers set, then + # replace all those specifiers with the OS specifier. + if os_versions.issubset(system_specifiers): +diff --git a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/web_tests/models/test_expectations_unittest.py b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/web_tests/models/test_expectations_unittest.py +index 17205daf3..73ebfc905 100644 +--- a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/web_tests/models/test_expectations_unittest.py ++++ b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/web_tests/models/test_expectations_unittest.py +@@ -37,6 +37,7 @@ from blinkpy.web_tests.models.test_configuration import ( + from blinkpy.web_tests.models.test_expectations import ( + TestExpectations, SystemConfigurationRemover, ParseError) + from blinkpy.web_tests.models.typ_types import ResultType, Expectation ++from functools import reduce + + + class Base(unittest.TestCase): +@@ -464,7 +465,7 @@ class SystemConfigurationRemoverTests(Base): + self.set_up_using_raw_expectations(raw_expectations) + all_versions = reduce( + lambda x, y: x + y, +- self._port.configuration_specifier_macros_dict.values()) ++ list(self._port.configuration_specifier_macros_dict.values())) + self._system_config_remover.remove_os_versions( + 'failures/expected/text.html', all_versions) + self._system_config_remover.update_expectations() +@@ -484,7 +485,7 @@ class SystemConfigurationRemoverTests(Base): + self.set_up_using_raw_expectations(raw_expectations) + all_versions = reduce( + lambda x, y: x + y, +- self._port.configuration_specifier_macros_dict.values()) ++ list(self._port.configuration_specifier_macros_dict.values())) + self._system_config_remover.remove_os_versions( + 'failures/expected/text.html', all_versions) + self._system_config_remover.update_expectations() +diff --git a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/web_tests/models/test_failures.py b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/web_tests/models/test_failures.py +index 251b15a76..499528f5a 100644 +--- a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/web_tests/models/test_failures.py ++++ b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/web_tests/models/test_failures.py +@@ -26,7 +26,7 @@ + # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +-import cPickle ++import pickle + + from blinkpy.web_tests.controllers import repaint_overlay + from blinkpy.web_tests.models.typ_types import ResultType +@@ -155,7 +155,7 @@ class AbstractTestResultType(object): + @staticmethod + def loads(s): + """Creates a AbstractTestResultType object from the specified string.""" +- return cPickle.loads(s) ++ return pickle.loads(s) + + def message(self): + """Returns a string describing the failure in more detail.""" +@@ -172,7 +172,7 @@ class AbstractTestResultType(object): + + def dumps(self): + """Returns the string/JSON representation of a AbstractTestResultType.""" +- return cPickle.dumps(self) ++ return pickle.dumps(self) + + def driver_needs_restart(self): + """Returns True if we should kill the driver before the next test.""" +diff --git a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/web_tests/models/test_results.py b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/web_tests/models/test_results.py +index 0ddc277cc..6a265d78e 100644 +--- a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/web_tests/models/test_results.py ++++ b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/web_tests/models/test_results.py +@@ -26,7 +26,7 @@ + # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +-import cPickle ++import pickle + + from blinkpy.web_tests.models import test_failures, test_expectations + from blinkpy.web_tests.models.typ_types import ResultType, Artifacts +@@ -66,7 +66,7 @@ class TestResult(object): + + @staticmethod + def loads(string): +- return cPickle.loads(string) ++ return pickle.loads(string) + + def __init__(self, + test_name, +@@ -136,4 +136,4 @@ class TestResult(object): + return not (self == other) + + def dumps(self): +- return cPickle.dumps(self) ++ return pickle.dumps(self) +diff --git a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/web_tests/models/test_run_results.py b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/web_tests/models/test_run_results.py +index 9a9b8827a..3362d6835 100644 +--- a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/web_tests/models/test_run_results.py ++++ b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/web_tests/models/test_run_results.py +@@ -78,7 +78,7 @@ class TestRunResults(object): + + self.tests_by_expectation = {} + for expected_result in \ +- test_expectations.EXPECTATION_DESCRIPTIONS.keys(): ++ list(test_expectations.EXPECTATION_DESCRIPTIONS.keys()): + self.tests_by_expectation[expected_result] = set() + + self.slow_tests = set() +@@ -199,7 +199,7 @@ def summarize_results(port_obj, + merged_results_by_name = collections.defaultdict(list) + for test_run_results in [initial_results] + all_retry_results: + # all_results does not include SKIP, so we need results_by_name. +- for test_name, result in test_run_results.results_by_name.iteritems(): ++ for test_name, result in test_run_results.results_by_name.items(): + if result.type == ResultType.Skip: + is_unexpected = test_name in test_run_results.unexpected_results_by_name + merged_results_by_name[test_name].append((result, +@@ -213,7 +213,7 @@ def summarize_results(port_obj, + + # Finally, compute the tests dict. + tests = {} +- for test_name, merged_results in merged_results_by_name.iteritems(): ++ for test_name, merged_results in merged_results_by_name.items(): + initial_result = merged_results[0][0] + + if only_include_failing and initial_result.type == ResultType.Skip: +@@ -341,7 +341,7 @@ def summarize_results(port_obj, + + for test_result, _ in merged_results: + for artifact_name, artifacts in \ +- test_result.artifacts.artifacts.items(): ++ list(test_result.artifacts.artifacts.items()): + artifact_dict = test_dict.setdefault('artifacts', {}) + artifact_dict.setdefault(artifact_name, []).extend(artifacts) + +diff --git a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/web_tests/models/test_run_results_unittest.py b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/web_tests/models/test_run_results_unittest.py +index 64e7667bd..86f3684dd 100644 +--- a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/web_tests/models/test_run_results_unittest.py ++++ b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/web_tests/models/test_run_results_unittest.py +@@ -281,7 +281,7 @@ class SummarizedResultsTest(unittest.TestCase): + def test_num_failures_by_type(self): + summary = summarized_results( + self.port, expected=False, passing=False, flaky=False) +- self.assertEquals(summary['num_failures_by_type'], { ++ self.assertEqual(summary['num_failures_by_type'], { + 'CRASH': 1, + 'PASS': 1, + 'SKIP': 0, +@@ -291,7 +291,7 @@ class SummarizedResultsTest(unittest.TestCase): + + summary = summarized_results( + self.port, expected=True, passing=False, flaky=False) +- self.assertEquals(summary['num_failures_by_type'], { ++ self.assertEqual(summary['num_failures_by_type'], { + 'CRASH': 1, + 'PASS': 1, + 'SKIP': 0, +@@ -301,7 +301,7 @@ class SummarizedResultsTest(unittest.TestCase): + + summary = summarized_results( + self.port, expected=False, passing=True, flaky=False) +- self.assertEquals(summary['num_failures_by_type'], { ++ self.assertEqual(summary['num_failures_by_type'], { + 'CRASH': 0, + 'PASS': 5, + 'SKIP': 1, +@@ -313,13 +313,13 @@ class SummarizedResultsTest(unittest.TestCase): + self.port._options.builder_name = 'dummy builder' + summary = summarized_results( + self.port, expected=False, passing=False, flaky=False) +- self.assertNotEquals(summary['chromium_revision'], '') ++ self.assertNotEqual(summary['chromium_revision'], '') + + def test_bug_entry(self): + self.port._options.builder_name = 'dummy builder' + summary = summarized_results( + self.port, expected=False, passing=True, flaky=False) +- self.assertEquals( ++ self.assertEqual( + summary['tests']['passes']['skipped']['skip.html']['bugs'], + ['crbug.com/123']) + +@@ -331,14 +331,14 @@ class SummarizedResultsTest(unittest.TestCase): + passing=True, + flaky=False, + extra_skipped_tests=['passes/text.html']) +- self.assertEquals(summary['tests']['passes']['text.html']['expected'], ++ self.assertEqual(summary['tests']['passes']['text.html']['expected'], + 'SKIP PASS') + + def test_summarized_results_wontfix(self): + self.port._options.builder_name = 'dummy builder' + summary = summarized_results( + self.port, expected=False, passing=False, flaky=False) +- self.assertEquals( ++ self.assertEqual( + summary['tests']['failures']['expected']['keyboard.html'] + ['expected'], 'SKIP CRASH') + self.assertTrue( +@@ -378,7 +378,7 @@ class SummarizedResultsTest(unittest.TestCase): + self.port._options.builder_name = 'dummy builder' + summary = summarized_results( + self.port, expected=False, passing=True, flaky=False) +- self.assertEquals( ++ self.assertEqual( + summary['tests']['passes']['skipped']['skip.html']['expected'], + 'SKIP') + +@@ -398,10 +398,10 @@ class SummarizedResultsTest(unittest.TestCase): + def test_rounded_run_times(self): + summary = summarized_results( + self.port, expected=False, passing=False, flaky=False) +- self.assertEquals(summary['tests']['passes']['text.html']['time'], 1) ++ self.assertEqual(summary['tests']['passes']['text.html']['time'], 1) + self.assertTrue('time' not in summary['tests']['failures']['expected'] + ['audio.html']) +- self.assertEquals( ++ self.assertEqual( + summary['tests']['failures']['expected']['timeout.html']['time'], + 0.1) + self.assertTrue('time' not in summary['tests']['failures']['expected'] +@@ -433,70 +433,70 @@ class SummarizedResultsTest(unittest.TestCase): + self.port, expectations, initial_results, all_retry_results) + self.assertIn('is_unexpected', + summary['tests']['failures']['expected']['text.html']) +- self.assertEquals( ++ self.assertEqual( + summary['tests']['failures']['expected']['text.html']['expected'], + 'FAIL') +- self.assertEquals( ++ self.assertEqual( + summary['tests']['failures']['expected']['text.html']['actual'], + 'TIMEOUT FAIL PASS PASS') +- self.assertEquals(summary['num_passes'], 1) +- self.assertEquals(summary['num_regressions'], 0) +- self.assertEquals(summary['num_flaky'], 0) ++ self.assertEqual(summary['num_passes'], 1) ++ self.assertEqual(summary['num_regressions'], 0) ++ self.assertEqual(summary['num_flaky'], 0) + + def test_summarized_results_flaky(self): + summary = summarized_results( + self.port, expected=False, passing=False, flaky=True) + +- self.assertEquals( ++ self.assertEqual( + summary['tests']['failures']['expected']['crash.html']['expected'], + 'CRASH') +- self.assertEquals( ++ self.assertEqual( + summary['tests']['failures']['expected']['crash.html']['actual'], + 'TIMEOUT FAIL CRASH FAIL') + + self.assertTrue( + 'is_unexpected' not in summary['tests']['passes']['text.html']) +- self.assertEquals(summary['tests']['passes']['text.html']['expected'], ++ self.assertEqual(summary['tests']['passes']['text.html']['expected'], + 'PASS') +- self.assertEquals(summary['tests']['passes']['text.html']['actual'], ++ self.assertEqual(summary['tests']['passes']['text.html']['actual'], + 'TIMEOUT PASS PASS PASS') + + self.assertTrue(summary['tests']['failures']['expected'] + ['timeout.html']['is_unexpected']) +- self.assertEquals( ++ self.assertEqual( + summary['tests']['failures']['expected']['timeout.html'] + ['expected'], 'TIMEOUT') +- self.assertEquals( ++ self.assertEqual( + summary['tests']['failures']['expected']['timeout.html']['actual'], + 'FAIL FAIL FAIL FAIL') + + self.assertTrue('is_unexpected' not in summary['tests']['failures'] + ['expected']['leak.html']) +- self.assertEquals( ++ self.assertEqual( + summary['tests']['failures']['expected']['leak.html']['expected'], + 'FAIL') +- self.assertEquals( ++ self.assertEqual( + summary['tests']['failures']['expected']['leak.html']['actual'], + 'TIMEOUT FAIL FAIL FAIL') + + self.assertTrue('is_unexpected' not in summary['tests']['failures'] + ['expected']['audio.html']) +- self.assertEquals( ++ self.assertEqual( + summary['tests']['failures']['expected']['audio.html']['expected'], + 'FAIL') +- self.assertEquals( ++ self.assertEqual( + summary['tests']['failures']['expected']['audio.html']['actual'], + 'CRASH FAIL FAIL FAIL') + +- self.assertEquals( ++ self.assertEqual( + summary['tests']['failures']['expected']['text.html']['expected'], + 'FAIL') + self.assertTrue('is_unexpected' not in summary['tests']['failures'] + ['expected']['text.html']) + +- self.assertEquals(summary['num_flaky'], 6) +- self.assertEquals(summary['num_passes'], 1) # keyboard.html +- self.assertEquals(summary['num_regressions'], 0) ++ self.assertEqual(summary['num_flaky'], 6) ++ self.assertEqual(summary['num_passes'], 1) # keyboard.html ++ self.assertEqual(summary['num_regressions'], 0) + + def test_summarized_results_flaky_pass_after_first_retry(self): + test_name = 'passes/text.html' +@@ -520,13 +520,13 @@ class SummarizedResultsTest(unittest.TestCase): + self.port, expectations, initial_results, all_retry_results) + self.assertTrue( + 'is_unexpected' not in summary['tests']['passes']['text.html']) +- self.assertEquals(summary['tests']['passes']['text.html']['expected'], ++ self.assertEqual(summary['tests']['passes']['text.html']['expected'], + 'PASS') +- self.assertEquals(summary['tests']['passes']['text.html']['actual'], ++ self.assertEqual(summary['tests']['passes']['text.html']['actual'], + 'CRASH TIMEOUT PASS PASS') +- self.assertEquals(summary['num_flaky'], 1) +- self.assertEquals(summary['num_passes'], 0) +- self.assertEquals(summary['num_regressions'], 0) ++ self.assertEqual(summary['num_flaky'], 1) ++ self.assertEqual(summary['num_passes'], 0) ++ self.assertEqual(summary['num_regressions'], 0) + + def test_summarized_results_with_iterations(self): + test_name = 'passes/text.html' +@@ -549,13 +549,13 @@ class SummarizedResultsTest(unittest.TestCase): + + summary = test_run_results.summarize_results( + self.port, expectations, initial_results, all_retry_results) +- self.assertEquals(summary['tests']['passes']['text.html']['expected'], ++ self.assertEqual(summary['tests']['passes']['text.html']['expected'], + 'PASS') +- self.assertEquals(summary['tests']['passes']['text.html']['actual'], ++ self.assertEqual(summary['tests']['passes']['text.html']['actual'], + 'CRASH FAIL TIMEOUT FAIL FAIL') +- self.assertEquals(summary['num_flaky'], 0) +- self.assertEquals(summary['num_passes'], 0) +- self.assertEquals(summary['num_regressions'], 1) ++ self.assertEqual(summary['num_flaky'], 0) ++ self.assertEqual(summary['num_passes'], 0) ++ self.assertEqual(summary['num_regressions'], 1) + + def test_summarized_results_regression(self): + summary = summarized_results( +@@ -563,48 +563,48 @@ class SummarizedResultsTest(unittest.TestCase): + + self.assertTrue(summary['tests']['failures']['expected'] + ['timeout.html']['is_unexpected']) +- self.assertEquals( ++ self.assertEqual( + summary['tests']['failures']['expected']['timeout.html'] + ['expected'], 'TIMEOUT') +- self.assertEquals( ++ self.assertEqual( + summary['tests']['failures']['expected']['timeout.html']['actual'], + 'FAIL FAIL CRASH FAIL') + + self.assertTrue( + summary['tests']['passes']['text.html']['is_unexpected']) +- self.assertEquals(summary['tests']['passes']['text.html']['expected'], ++ self.assertEqual(summary['tests']['passes']['text.html']['expected'], + 'PASS') +- self.assertEquals(summary['tests']['passes']['text.html']['actual'], ++ self.assertEqual(summary['tests']['passes']['text.html']['actual'], + 'TIMEOUT TIMEOUT TIMEOUT TIMEOUT') + + self.assertTrue(summary['tests']['failures']['expected']['crash.html'] + ['is_unexpected']) +- self.assertEquals( ++ self.assertEqual( + summary['tests']['failures']['expected']['crash.html']['expected'], + 'CRASH') +- self.assertEquals( ++ self.assertEqual( + summary['tests']['failures']['expected']['crash.html']['actual'], + 'TIMEOUT TIMEOUT TIMEOUT TIMEOUT') + + self.assertTrue(summary['tests']['failures']['expected']['leak.html'] + ['is_unexpected']) +- self.assertEquals( ++ self.assertEqual( + summary['tests']['failures']['expected']['leak.html']['expected'], + 'FAIL') +- self.assertEquals( ++ self.assertEqual( + summary['tests']['failures']['expected']['leak.html']['actual'], + 'TIMEOUT TIMEOUT TIMEOUT TIMEOUT') + +- self.assertEquals( ++ self.assertEqual( + summary['tests']['failures']['expected']['audio.html']['expected'], + 'FAIL') +- self.assertEquals( ++ self.assertEqual( + summary['tests']['failures']['expected']['audio.html']['actual'], + 'CRASH FAIL FAIL FAIL') + +- self.assertEquals(summary['num_regressions'], 6) +- self.assertEquals(summary['num_passes'], 1) # keyboard.html +- self.assertEquals(summary['num_flaky'], 0) ++ self.assertEqual(summary['num_regressions'], 6) ++ self.assertEqual(summary['num_passes'], 1) # keyboard.html ++ self.assertEqual(summary['num_flaky'], 0) + + def test_results_contains_path_delimiter(self): + summary = summarized_results( +diff --git a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/web_tests/port/android_unittest.py b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/web_tests/port/android_unittest.py +index 966815858..a0c0f9847 100644 +--- a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/web_tests/port/android_unittest.py ++++ b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/web_tests/port/android_unittest.py +@@ -117,8 +117,8 @@ class AndroidPortTest(port_testcase.PortTestCase): + 'adb_devices': ['123456789ABCDEF9'] + })) + +- self.assertEquals(6, port_default.default_child_processes()) +- self.assertEquals(1, port_fixed_device.default_child_processes()) ++ self.assertEqual(6, port_default.default_child_processes()) ++ self.assertEqual(1, port_fixed_device.default_child_processes()) + + def test_no_bot_expectations_searched(self): + # We don't support bot expectations at the moment +@@ -189,7 +189,7 @@ class ChromiumAndroidDriverTest(unittest.TestCase): + + # The cmd_line() method in the Android port is used for starting a shell, not the test runner. + def test_cmd_line(self): +- self.assertEquals(['adb', '-s', '123456789ABCDEF0', 'shell'], ++ self.assertEqual(['adb', '-s', '123456789ABCDEF0', 'shell'], + self._driver.cmd_line([])) + + # Test that the Chromium Android port can interpret Android's shell output. +diff --git a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/web_tests/port/base.py b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/web_tests/port/base.py +index bb65e8aaf..ecc7cf660 100644 +--- a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/web_tests/port/base.py ++++ b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/web_tests/port/base.py +@@ -309,7 +309,7 @@ class Port(object): + if name in configs: + raise ValueError('{} contains duplicated name {}.'.format( + config_file, name)) +- if args in configs.itervalues(): ++ if args in iter(configs.values()): + raise ValueError( + '{}: name "{}" has the same args as another entry.'.format( + config_file, name)) +@@ -440,8 +440,8 @@ class Port(object): + + The directories are searched in order. + """ +- return map(self._absolute_baseline_path, +- self.FALLBACK_PATHS[self.version()]) ++ return list(map(self._absolute_baseline_path, ++ self.FALLBACK_PATHS[self.version()])) + + @memoized + def _compare_baseline(self): +@@ -1635,7 +1635,7 @@ class Port(object): + the --additional-expectations flag is passed; those aren't included + here. + """ +- return filter(None, [ ++ return [_f for _f in [ + self.path_to_generic_test_expectations_file(), + self.path_to_webdriver_expectations_file(), + self._filesystem.join(self.web_tests_dir(), 'NeverFixTests'), +@@ -1643,7 +1643,7 @@ class Port(object): + 'StaleTestExpectations'), + self._filesystem.join(self.web_tests_dir(), 'SlowTests'), + self._flag_specific_expectations_path() +- ]) ++ ] if _f] + + def extra_expectations_files(self): + """Returns a list of paths to test expectations not loaded by default. +@@ -1795,15 +1795,15 @@ class Port(object): + + # We require stdout and stderr to be bytestrings, not character strings. + if stdout: +- assert isinstance(stdout, basestring) ++ assert isinstance(stdout, str) + stdout_lines = stdout.decode('utf8', 'replace').splitlines() + else: +- stdout_lines = [u''] ++ stdout_lines = [''] + if stderr: +- assert isinstance(stderr, basestring) ++ assert isinstance(stderr, str) + stderr_lines = stderr.decode('utf8', 'replace').splitlines() + else: +- stderr_lines = [u''] ++ stderr_lines = [''] + + return (stderr, 'crash log for %s (pid %s):\n%s\n%s\n' % + (name_str, pid_str, '\n'.join( +@@ -1875,8 +1875,7 @@ class Port(object): + # maps then this could be more efficient. + if suite.bases: + tests.extend( +- map(lambda x: suite.full_prefix + x, +- self.real_tests(suite.bases))) ++ [suite.full_prefix + x for x in self.real_tests(suite.bases)]) + + if suite_paths: + tests.extend( +@@ -1920,7 +1919,7 @@ class Port(object): + + tests = [] + tests.extend( +- map(lambda x: suite.full_prefix + x, self.real_tests(bases))) ++ [suite.full_prefix + x for x in self.real_tests(bases)]) + + wpt_bases = [] + for base in bases: +@@ -1981,7 +1980,7 @@ class Port(object): + # This walks through the set of paths where we should look for tests. + # For each path, a map can be provided that we replace 'path' with in + # the result. +- for filter_path, virtual_prefix in itertools.izip_longest( ++ for filter_path, virtual_prefix in itertools.zip_longest( + filter_paths, virtual_prefixes): + # This is to make sure "external[\\/]?" can also match to + # external/wpt. +diff --git a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/web_tests/port/base_unittest.py b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/web_tests/port/base_unittest.py +index 908bdab32..ccbc0e0a2 100644 +--- a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/web_tests/port/base_unittest.py ++++ b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/web_tests/port/base_unittest.py +@@ -61,24 +61,24 @@ class PortTest(LoggingTestCase): + + def test_validate_wpt_dirs(self): + # Keys should not have trailing slashes. +- for wpt_path in Port.WPT_DIRS.keys(): ++ for wpt_path in list(Port.WPT_DIRS.keys()): + self.assertFalse(wpt_path.endswith('/')) + # Values should not be empty (except the last one). +- for url_prefix in Port.WPT_DIRS.values()[:-1]: ++ for url_prefix in list(Port.WPT_DIRS.values())[:-1]: + self.assertNotEqual(url_prefix, '/') +- self.assertEqual(Port.WPT_DIRS.values()[-1], '/') ++ self.assertEqual(list(Port.WPT_DIRS.values())[-1], '/') + + def test_validate_wpt_regex(self): +- self.assertEquals( ++ self.assertEqual( + Port.WPT_REGEX.match('external/wpt/foo/bar.html').groups(), + ('external/wpt', 'foo/bar.html')) +- self.assertEquals( ++ self.assertEqual( + Port.WPT_REGEX.match('virtual/test/external/wpt/foo/bar.html'). + groups(), ('external/wpt', 'foo/bar.html')) +- self.assertEquals( ++ self.assertEqual( + Port.WPT_REGEX.match('wpt_internal/foo/bar.html').groups(), + ('wpt_internal', 'foo/bar.html')) +- self.assertEquals( ++ self.assertEqual( + Port.WPT_REGEX.match('virtual/test/wpt_internal/foo/bar.html'). + groups(), ('wpt_internal', 'foo/bar.html')) + +@@ -224,7 +224,7 @@ class PortTest(LoggingTestCase): + self.assertEqual( + port.expected_filename(test_file, '.txt', return_default=False), + MOCK_WEB_TESTS + 'platform/foo/fast/test-expected.txt') +- self.assertEquals( ++ self.assertEqual( + port.fallback_expected_filename(test_file, '.txt'), + MOCK_WEB_TESTS + 'fast/test-expected.txt') + port.host.filesystem.remove(MOCK_WEB_TESTS + 'fast/test-expected.txt') +@@ -461,7 +461,7 @@ class PortTest(LoggingTestCase): + MOCK_WEB_TESTS + 'platform/nonexistant/TestExpectations'] + port.host.filesystem.write_text_file( + MOCK_WEB_TESTS + 'platform/exists/TestExpectations', '') +- self.assertEqual('\n'.join(port.expectations_dict().keys()), ++ self.assertEqual('\n'.join(list(port.expectations_dict().keys())), + MOCK_WEB_TESTS + 'platform/exists/TestExpectations') + + def _make_port_for_test_additional_expectations(self, options_dict={}): +@@ -479,13 +479,13 @@ class PortTest(LoggingTestCase): + + def test_additional_expectations_empty(self): + port = self._make_port_for_test_additional_expectations() +- self.assertEqual(port.expectations_dict().values(), []) ++ self.assertEqual(list(port.expectations_dict().values()), []) + + def test_additional_expectations_1(self): + port = self._make_port_for_test_additional_expectations({ + 'additional_expectations': ['/tmp/additional-expectations-1.txt'] + }) +- self.assertEqual(port.expectations_dict().values(), ['content1\n']) ++ self.assertEqual(list(port.expectations_dict().values()), ['content1\n']) + + def test_additional_expectations_2(self): + port = self._make_port_for_test_additional_expectations({ +@@ -494,7 +494,7 @@ class PortTest(LoggingTestCase): + '/tmp/additional-expectations-2.txt' + ] + }) +- self.assertEqual(port.expectations_dict().values(), ++ self.assertEqual(list(port.expectations_dict().values()), + ['content1\n', 'content2\n']) + + def test_additional_expectations_additional_flag(self): +@@ -505,7 +505,7 @@ class PortTest(LoggingTestCase): + ], + 'additional_driver_flag': ['--special-flag'] + }) +- self.assertEqual(port.expectations_dict().values(), ++ self.assertEqual(list(port.expectations_dict().values()), + ['content3', 'content1\n', 'content2\n']) + + def test_flag_specific_expectations(self): +@@ -517,7 +517,7 @@ class PortTest(LoggingTestCase): + port.host.filesystem.write_text_file( + MOCK_WEB_TESTS + 'FlagExpectations/README.txt', 'cc') + +- self.assertEqual(port.expectations_dict().values(), []) ++ self.assertEqual(list(port.expectations_dict().values()), []) + # all_expectations_dict() is an OrderedDict, but its order depends on + # file system walking order. + self.assertEqual( +@@ -1413,21 +1413,21 @@ class PortTest(LoggingTestCase): + + tests = port.tests( + ['virtual/virtual_passes/passes/test-virtual-passes.html']) +- self.assertEquals( ++ self.assertEqual( + ['virtual/virtual_passes/passes/test-virtual-passes.html'], tests) + + tests = port.tests(['virtual/virtual_empty_bases']) +- self.assertEquals([ ++ self.assertEqual([ + 'virtual/virtual_empty_bases/physical1.html', + 'virtual/virtual_empty_bases/dir/physical2.html' + ], tests) + + tests = port.tests(['virtual/virtual_empty_bases/dir']) +- self.assertEquals(['virtual/virtual_empty_bases/dir/physical2.html'], ++ self.assertEqual(['virtual/virtual_empty_bases/dir/physical2.html'], + tests) + + tests = port.tests(['virtual/virtual_empty_bases/dir/physical2.html']) +- self.assertEquals(['virtual/virtual_empty_bases/dir/physical2.html'], ++ self.assertEqual(['virtual/virtual_empty_bases/dir/physical2.html'], + tests) + + def test_build_path(self): +@@ -1732,7 +1732,7 @@ class PortTest(LoggingTestCase): + all_systems.append(system[0]) + all_systems.sort() + configuration_specifier_macros = [] +- for macros in Port.CONFIGURATION_SPECIFIER_MACROS.values(): ++ for macros in list(Port.CONFIGURATION_SPECIFIER_MACROS.values()): + configuration_specifier_macros += macros + configuration_specifier_macros.sort() + self.assertListEqual(all_systems, configuration_specifier_macros) +@@ -1740,7 +1740,7 @@ class PortTest(LoggingTestCase): + def test_configuration_specifier_macros(self): + # CONFIGURATION_SPECIFIER_MACROS should contain all SUPPORTED_VERSIONS + # of each port. Must use real Port classes in this test. +- for port_name, versions in Port.CONFIGURATION_SPECIFIER_MACROS.items(): ++ for port_name, versions in list(Port.CONFIGURATION_SPECIFIER_MACROS.items()): + port_class, _ = PortFactory.get_port_class(port_name) + self.assertIsNotNone(port_class, port_name) + self.assertListEqual(versions, list(port_class.SUPPORTED_VERSIONS)) +diff --git a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/web_tests/port/driver.py b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/web_tests/port/driver.py +index 0d5901289..0e9861241 100644 +--- a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/web_tests/port/driver.py ++++ b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/web_tests/port/driver.py +@@ -346,7 +346,7 @@ class Driver(object): + self._port.abspath_for_test(test_name)) + + if using_wptserve: +- for wpt_path, url_prefix in self.WPT_DIRS.items(): ++ for wpt_path, url_prefix in list(self.WPT_DIRS.items()): + # The keys of WPT_DIRS do not have trailing slashes. + wpt_path += '/' + if test_name.startswith(wpt_path): +@@ -400,7 +400,7 @@ class Driver(object): + for prefix in self._get_uri_prefixes(*self.WPT_HOST_AND_PORTS): + if uri.startswith(prefix): + url_path = '/' + uri[len(prefix):] +- for wpt_path, url_prefix in self.WPT_DIRS.items(): ++ for wpt_path, url_prefix in list(self.WPT_DIRS.items()): + if url_path.startswith(url_prefix): + return wpt_path + '/' + url_path[len(url_prefix):] + raise NotImplementedError('unknown url type: %s' % uri) +diff --git a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/web_tests/port/driver_unittest.py b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/web_tests/port/driver_unittest.py +index ef67ba7b2..991fd37fd 100644 +--- a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/web_tests/port/driver_unittest.py ++++ b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/web_tests/port/driver_unittest.py +@@ -332,7 +332,7 @@ class CoalesceRepeatedSwitchesTest(unittest.TestCase): + def _assert_coalesced_switches(self, input_switches, + expected_coalesced_switches): + output_switches = coalesce_repeated_switches(input_switches) +- self.assertEquals(output_switches, expected_coalesced_switches) ++ self.assertEqual(output_switches, expected_coalesced_switches) + + def test_no_dupes(self): + self._assert_coalesced_switches(['--a', '--b', '--c'], +diff --git a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/web_tests/port/factory_unittest.py b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/web_tests/port/factory_unittest.py +index 849d042cf..93620d0f2 100644 +--- a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/web_tests/port/factory_unittest.py ++++ b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/web_tests/port/factory_unittest.py +@@ -105,7 +105,7 @@ class FactoryTest(unittest.TestCase): + host = MockHost() + finder = PathFinder(host.filesystem) + files = files or {} +- for path, contents in files.items(): ++ for path, contents in list(files.items()): + host.filesystem.write_text_file( + finder.path_from_chromium_base(path), contents) + options = optparse.Values({ +diff --git a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/web_tests/port/fuchsia.py b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/web_tests/port/fuchsia.py +index 9feb12439..199bee3dd 100644 +--- a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/web_tests/port/fuchsia.py ++++ b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/web_tests/port/fuchsia.py +@@ -374,7 +374,7 @@ class FuchsiaServerProcess(server_process.ServerProcess): + listen_socket.listen(1) + stdin_port = listen_socket.getsockname()[1] + +- command = ['%s=%s' % (k, v) for k, v in self._env.items()] + \ ++ command = ['%s=%s' % (k, v) for k, v in list(self._env.items())] + \ + self._cmd + \ + ['--no-sandbox', '--stdin-redirect=%s:%s' % + (qemu_target.HOST_IP_ADDRESS, stdin_port)] +diff --git a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/web_tests/port/port_testcase.py b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/web_tests/port/port_testcase.py +index 8528ce7d9..ef035cff3 100644 +--- a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/web_tests/port/port_testcase.py ++++ b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/web_tests/port/port_testcase.py +@@ -268,9 +268,9 @@ class PortTestCase(LoggingTestCase): + 'foo', 1234, 'foo\xa6bar', 'foo\xa6bar', newer_than=None) + self.assertEqual(stderr, 'foo\xa6bar') + self.assertEqual( +- details, u'crash log for foo (pid 1234):\n' +- u'STDOUT: foo\ufffdbar\n' +- u'STDERR: foo\ufffdbar\n') ++ details, 'crash log for foo (pid 1234):\n' ++ 'STDOUT: foo\ufffdbar\n' ++ 'STDERR: foo\ufffdbar\n') + self.assertIsNone(crash_site) + + def test_get_crash_log_newer_than(self): +@@ -279,9 +279,9 @@ class PortTestCase(LoggingTestCase): + 'foo', 1234, 'foo\xa6bar', 'foo\xa6bar', newer_than=1.0) + self.assertEqual(stderr, 'foo\xa6bar') + self.assertEqual( +- details, u'crash log for foo (pid 1234):\n' +- u'STDOUT: foo\ufffdbar\n' +- u'STDERR: foo\ufffdbar\n') ++ details, 'crash log for foo (pid 1234):\n' ++ 'STDOUT: foo\ufffdbar\n' ++ 'STDERR: foo\ufffdbar\n') + self.assertIsNone(crash_site) + + def test_get_crash_log_crash_site(self): +@@ -317,7 +317,7 @@ class PortTestCase(LoggingTestCase): + port.host.filesystem.write_text_file(path, '') + ordered_dict = port.expectations_dict() + self.assertEqual(port.path_to_generic_test_expectations_file(), +- ordered_dict.keys()[0]) ++ list(ordered_dict.keys())[0]) + + options = optparse.Values( + dict(additional_expectations=['/tmp/foo', '/tmp/bar'])) +@@ -327,9 +327,9 @@ class PortTestCase(LoggingTestCase): + port.host.filesystem.write_text_file('/tmp/foo', 'foo') + port.host.filesystem.write_text_file('/tmp/bar', 'bar') + ordered_dict = port.expectations_dict() +- self.assertEqual(ordered_dict.keys()[-2:], ++ self.assertEqual(list(ordered_dict.keys())[-2:], + options.additional_expectations) +- self.assertEqual(ordered_dict.values()[-2:], ['foo', 'bar']) ++ self.assertEqual(list(ordered_dict.values())[-2:], ['foo', 'bar']) + + def test_path_to_apache_config_file(self): + # Specific behavior may vary by port, so unit test sub-classes may override this. +diff --git a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/web_tests/port/server_process.py b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/web_tests/port/server_process.py +index ffe637f03..31dbbeae6 100644 +--- a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/web_tests/port/server_process.py ++++ b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/web_tests/port/server_process.py +@@ -134,7 +134,7 @@ class ServerProcess(object): + env_str = '' + if self._env: + env_str += '\n'.join('%s=%s' % (k, v) +- for k, v in self._env.items()) + '\n' ++ for k, v in list(self._env.items())) + '\n' + _log.info('CMD: \n%s%s\n', env_str, _quote_cmd(self._cmd)) + proc = self._host.executive.popen( + self._cmd, +diff --git a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/web_tests/port/test.py b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/web_tests/port/test.py +index a47ba7d34..712911786 100644 +--- a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/web_tests/port/test.py ++++ b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/web_tests/port/test.py +@@ -83,7 +83,7 @@ class TestList(object): + + def add(self, name, **kwargs): + test = TestInstance(name) +- for key, value in kwargs.items(): ++ for key, value in list(kwargs.items()): + test.__dict__[key] = value + self.tests[name] = test + +@@ -123,7 +123,7 @@ class TestList(object): + reference_name, actual_checksum='diff', actual_image='DIFF') + + def keys(self): +- return self.tests.keys() ++ return list(self.tests.keys()) + + def __contains__(self, item): + return item in self.tests +@@ -506,7 +506,7 @@ passes/slow.html [ Slow ] + + # Add each test and the expected output, if any. + test_list = unit_test_list() +- for test in test_list.tests.values(): ++ for test in list(test_list.tests.values()): + add_file(test, test.name[test.name.rfind('.'):], '') + if test.expected_audio: + add_file(test, '-expected.wav', test.expected_audio) +diff --git a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/web_tests/port/win.py b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/web_tests/port/win.py +index 1253a09a1..74db0c74d 100644 +--- a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/web_tests/port/win.py ++++ b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/web_tests/port/win.py +@@ -34,7 +34,7 @@ import tempfile + # The _winreg library is only available on Windows. + # https://docs.python.org/2/library/_winreg.html + try: +- import _winreg # pylint: disable=import-error ++ import winreg # pylint: disable=import-error + except ImportError: + _winreg = None # pylint: disable=invalid-name + +@@ -114,9 +114,9 @@ class WinPort(base.Port): + # Note that we HKCR is a union of HKLM and HKCR (with the latter + # overriding the former), so reading from HKCR ensures that we get + # the value if it is set in either place. See als comments below. +- hkey = _winreg.OpenKey(_winreg.HKEY_CLASSES_ROOT, sub_key) +- args = _winreg.QueryValue(hkey, '').split() +- _winreg.CloseKey(hkey) ++ hkey = winreg.OpenKey(winreg.HKEY_CLASSES_ROOT, sub_key) ++ args = winreg.QueryValue(hkey, '').split() ++ winreg.CloseKey(hkey) + + # In order to keep multiple checkouts from stepping on each other, we simply check that an + # existing entry points to a valid path and has the right command line. +@@ -132,11 +132,11 @@ class WinPort(base.Port): + # to the registry, and that will get reflected in HKCR when it is read, above. + cmdline = self._path_from_chromium_base('third_party', 'perl', 'perl', + 'bin', 'perl.exe') + ' -wT' +- hkey = _winreg.CreateKeyEx(_winreg.HKEY_CURRENT_USER, ++ hkey = winreg.CreateKeyEx(winreg.HKEY_CURRENT_USER, + 'Software\\Classes\\' + sub_key, 0, +- _winreg.KEY_WRITE) +- _winreg.SetValue(hkey, '', _winreg.REG_SZ, cmdline) +- _winreg.CloseKey(hkey) ++ winreg.KEY_WRITE) ++ winreg.SetValue(hkey, '', winreg.REG_SZ, cmdline) ++ winreg.CloseKey(hkey) + return True + + def setup_environ_for_server(self): +@@ -150,7 +150,7 @@ class WinPort(base.Port): + self.host.environ['TMP'] = self.host.environ['TEMP'] + env = super(WinPort, self).setup_environ_for_server() + apache_envvars = ['SYSTEMDRIVE', 'SYSTEMROOT', 'TEMP', 'TMP'] +- for key, value in self.host.environ.copy().items(): ++ for key, value in list(self.host.environ.copy().items()): + if key not in env and key in apache_envvars: + env[key] = value + return env +diff --git a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/web_tests/run_web_tests_unittest.py b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/web_tests/run_web_tests_unittest.py +index 534725434..9b14ee6aa 100644 +--- a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/web_tests/run_web_tests_unittest.py ++++ b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/web_tests/run_web_tests_unittest.py +@@ -31,7 +31,7 @@ + import json + import os + import re +-import StringIO ++import io + import sys + import unittest + +@@ -84,7 +84,7 @@ def passing_run(extra_args=None, + if shared_port: + port_obj.host.port_factory.get = lambda *args, **kwargs: port_obj + +- printer = Printer(host, options, StringIO.StringIO()) ++ printer = Printer(host, options, io.StringIO()) + run_details = run_web_tests.run(port_obj, options, parsed_args, printer) + return run_details.exit_code == 0 + +@@ -109,7 +109,7 @@ def logging_run(extra_args=None, + def run_and_capture(port_obj, options, parsed_args, shared_port=True): + if shared_port: + port_obj.host.port_factory.get = lambda *args, **kwargs: port_obj +- logging_stream = StringIO.StringIO() ++ logging_stream = io.StringIO() + printer = Printer(port_obj.host, options, logging_stream) + run_details = run_web_tests.run(port_obj, options, parsed_args, printer) + return (run_details, logging_stream) +@@ -142,7 +142,7 @@ def get_test_results(args, host=None, port_obj=None): + port_obj = port_obj or host.port_factory.get( + port_name=options.platform, options=options) + +- printer = Printer(host, options, StringIO.StringIO()) ++ printer = Printer(host, options, io.StringIO()) + run_details = run_web_tests.run(port_obj, options, parsed_args, printer) + + all_results = [] +@@ -183,7 +183,7 @@ class RunTest(unittest.TestCase, StreamTestingMixin): + '/tmp/json_failing_test_results.json' + ], + tests_included=True) +- logging_stream = StringIO.StringIO() ++ logging_stream = io.StringIO() + host = MockHost() + port_obj = host.port_factory.get(options.platform, options) + printer = Printer(host, options, logging_stream) +@@ -1842,14 +1842,14 @@ class RunTest(unittest.TestCase, StreamTestingMixin): + run_details, err, _ = logging_run( + ['passes/args.html', 'virtual/passes/'], tests_included=True) + self.assertEqual( +- len(run_details.summarized_full_results['tests']['passes'].keys()), ++ len(list(run_details.summarized_full_results['tests']['passes'].keys())), + 1) + self.assertFalse(virtual_test_warning_msg in err.getvalue()) + + run_details, err, _ = logging_run( + ['passes/args.html', 'virtual/passes/*'], tests_included=True) + self.assertEqual( +- len(run_details.summarized_full_results['tests']['passes'].keys()), ++ len(list(run_details.summarized_full_results['tests']['passes'].keys())), + 1) + self.assertTrue(virtual_test_warning_msg in err.getvalue()) + +@@ -2178,16 +2178,16 @@ class RunTest(unittest.TestCase, StreamTestingMixin): + host=host) + written_files = host.filesystem.written_files + self.assertTrue( +- any(path.endswith('-diff.txt') for path in written_files.keys())) ++ any(path.endswith('-diff.txt') for path in list(written_files.keys()))) + self.assertTrue( + any( + path.endswith('-pretty-diff.html') +- for path in written_files.keys())) ++ for path in list(written_files.keys()))) + self.assertFalse( + any(path.endswith('-wdiff.html') for path in written_files)) + + def test_unsupported_platform(self): +- stderr = StringIO.StringIO() ++ stderr = io.StringIO() + res = run_web_tests.main(['--platform', 'foo'], stderr) + + self.assertEqual(res, exit_codes.UNEXPECTED_ERROR_EXIT_STATUS) +@@ -2207,7 +2207,7 @@ class RunTest(unittest.TestCase, StreamTestingMixin): + host = MockHost() + port_obj = host.port_factory.get( + port_name=options.platform, options=options) +- logging_stream = StringIO.StringIO() ++ logging_stream = io.StringIO() + printer = Printer(host, options, logging_stream) + run_web_tests.run(port_obj, options, parsed_args, printer) + self.assertTrue('text.html passed' in logging_stream.getvalue()) +@@ -2326,7 +2326,7 @@ class RebaselineTest(unittest.TestCase, StreamTestingMixin): + # The run exit code is 0, indicating success; since we're resetting + # baselines, it's OK for actual results to not match baselines. + self.assertEqual(details.exit_code, 0) +- self.assertEqual(len(written_files.keys()), 7) ++ self.assertEqual(len(list(written_files.keys())), 7) + self.assert_wpt_manifests_not_written(host, written_files) + self.assert_baselines( + written_files, +@@ -2365,7 +2365,7 @@ class RebaselineTest(unittest.TestCase, StreamTestingMixin): + host=host) + written_files = host.filesystem.written_files + self.assertEqual(details.exit_code, 0) +- self.assertEqual(len(written_files.keys()), 8) ++ self.assertEqual(len(list(written_files.keys())), 8) + self.assert_baselines(written_files, log_stream, + 'failures/unexpected/missing_text', ['.txt']) + self.assert_baselines(written_files, log_stream, +@@ -2388,7 +2388,7 @@ class RebaselineTest(unittest.TestCase, StreamTestingMixin): + host=host) + written_files = host.filesystem.written_files + self.assertEqual(details.exit_code, 0) +- self.assertEqual(len(written_files.keys()), 6) ++ self.assertEqual(len(list(written_files.keys())), 6) + self.assert_baselines(written_files, log_stream, + 'failures/unexpected/testharness', ['.txt']) + self.assert_baselines(written_files, log_stream, 'passes/testharness', +@@ -2406,7 +2406,7 @@ class RebaselineTest(unittest.TestCase, StreamTestingMixin): + host=host) + self.assertEqual(details.exit_code, 0) + written_files = host.filesystem.written_files +- self.assertEqual(len(written_files.keys()), 6) ++ self.assertEqual(len(list(written_files.keys())), 6) + self.assert_baselines(written_files, log_stream, + 'failures/unexpected/testharness', ['.txt']) + +@@ -2421,7 +2421,7 @@ class RebaselineTest(unittest.TestCase, StreamTestingMixin): + host=host) + self.assertEqual(details.exit_code, 0) + written_files = host.filesystem.written_files +- self.assertEqual(len(written_files.keys()), 6) ++ self.assertEqual(len(list(written_files.keys())), 6) + self.assert_baselines(written_files, log_stream, + 'failures/unexpected/image-only', ['.png']) + +@@ -2443,7 +2443,7 @@ class RebaselineTest(unittest.TestCase, StreamTestingMixin): + host=host) + written_files = host.filesystem.written_files + self.assertEqual(details.exit_code, 1) +- self.assertEqual(len(written_files.keys()), 11) ++ self.assertEqual(len(list(written_files.keys())), 11) + self.assert_contains( + log_stream, + 'Copying baseline to "platform/test-mac-mac10.10/failures/unexpected/text-image-checksum-expected.png"' +@@ -2472,7 +2472,7 @@ class RebaselineTest(unittest.TestCase, StreamTestingMixin): + host=host) + written_files = host.filesystem.written_files + self.assertEqual(details.exit_code, 0) +- self.assertEqual(len(written_files.keys()), 7) ++ self.assertEqual(len(list(written_files.keys())), 7) + self.assert_baselines( + written_files, + log_stream, +@@ -2489,7 +2489,7 @@ class RebaselineTest(unittest.TestCase, StreamTestingMixin): + host=host) + written_files = host.filesystem.written_files + self.assertEqual(details.exit_code, 0) +- self.assertEqual(len(written_files.keys()), 5) ++ self.assertEqual(len(list(written_files.keys())), 5) + self.assert_baselines( + written_files, + log_stream, +@@ -2508,7 +2508,7 @@ class RebaselineTest(unittest.TestCase, StreamTestingMixin): + host=host) + written_files = host.filesystem.written_files + self.assertEqual(details.exit_code, 0) +- self.assertEqual(len(written_files.keys()), 6) ++ self.assertEqual(len(list(written_files.keys())), 6) + self.assert_baselines( + written_files, + log_stream, +@@ -2527,7 +2527,7 @@ class RebaselineTest(unittest.TestCase, StreamTestingMixin): + host=host) + written_files = host.filesystem.written_files + self.assertEqual(details.exit_code, 0) +- self.assertEqual(len(written_files.keys()), 8) ++ self.assertEqual(len(list(written_files.keys())), 8) + self.assertIsNone(written_files[extra_txt]) + self.assertIsNone(written_files[extra_wav]) + self.assert_baselines( +@@ -2549,7 +2549,7 @@ class RebaselineTest(unittest.TestCase, StreamTestingMixin): + host=host) + written_files = host.filesystem.written_files + self.assertEqual(details.exit_code, 0) +- self.assertEqual(len(written_files.keys()), 8) ++ self.assertEqual(len(list(written_files.keys())), 8) + self.assertIsNone(written_files[extra_png]) + self.assertIsNone(written_files[extra_wav]) + self.assertIsNone(written_files[extra_txt]) +@@ -2566,7 +2566,7 @@ class RebaselineTest(unittest.TestCase, StreamTestingMixin): + host=host) + written_files = host.filesystem.written_files + self.assertEqual(details.exit_code, 0) +- self.assertEqual(len(written_files.keys()), 7) ++ self.assertEqual(len(list(written_files.keys())), 7) + self.assertIsNone(written_files[extra_png]) + self.assertIsNone(written_files[extra_wav]) + self.assertNotIn( +@@ -2585,7 +2585,7 @@ class RebaselineTest(unittest.TestCase, StreamTestingMixin): + host=host) + written_files = host.filesystem.written_files + self.assertEqual(details.exit_code, 0) +- self.assertEqual(len(written_files.keys()), 7) ++ self.assertEqual(len(list(written_files.keys())), 7) + self.assertIsNone(written_files[extra_png]) + self.assertIsNone(written_files[extra_txt]) + self.assert_baselines( +@@ -2602,7 +2602,7 @@ class RebaselineTest(unittest.TestCase, StreamTestingMixin): + host=host) + written_files = host.filesystem.written_files + self.assertEqual(details.exit_code, 0) +- self.assertEqual(len(written_files.keys()), 6) ++ self.assertEqual(len(list(written_files.keys())), 6) + self.assert_baselines( + written_files, + log_stream, +@@ -2628,7 +2628,7 @@ class RebaselineTest(unittest.TestCase, StreamTestingMixin): + host=host) + written_files = host.filesystem.written_files + self.assertEqual(details.exit_code, 0) +- self.assertEqual(len(written_files.keys()), 7) ++ self.assertEqual(len(list(written_files.keys())), 7) + # We should create new image baseline only. + self.assert_baselines( + written_files, +@@ -2655,7 +2655,7 @@ class RebaselineTest(unittest.TestCase, StreamTestingMixin): + host=host) + written_files = host.filesystem.written_files + self.assertEqual(details.exit_code, 1) +- self.assertEqual(len(written_files.keys()), 11) ++ self.assertEqual(len(list(written_files.keys())), 11) + self.assert_contains( + log_stream, + 'Copying baseline to "flag-specific/flag/failures/unexpected/text-image-checksum-expected.png"' +@@ -2694,7 +2694,7 @@ class RebaselineTest(unittest.TestCase, StreamTestingMixin): + self.assertEqual(details.exit_code, 0) + self.assertFalse(host.filesystem.exists(flag_specific_baseline_txt)) + written_files = host.filesystem.written_files +- self.assertEqual(len(written_files.keys()), 8) ++ self.assertEqual(len(list(written_files.keys())), 8) + # We should create new image baseline only. + self.assert_baselines( + written_files, +@@ -2721,7 +2721,7 @@ class RebaselineTest(unittest.TestCase, StreamTestingMixin): + host=host) + written_files = host.filesystem.written_files + self.assertEqual(details.exit_code, 0) +- self.assertEqual(len(written_files.keys()), 7) ++ self.assertEqual(len(list(written_files.keys())), 7) + # We should create new image baseline only. + self.assert_baselines( + written_files, +@@ -2746,7 +2746,7 @@ class RebaselineTest(unittest.TestCase, StreamTestingMixin): + host=host) + written_files = host.filesystem.written_files + self.assertEqual(details.exit_code, 0) +- self.assertEqual(len(written_files.keys()), 7) ++ self.assertEqual(len(list(written_files.keys())), 7) + # We should reset the platform image baseline. + self.assert_baselines( + written_files, +@@ -2774,7 +2774,7 @@ class RebaselineTest(unittest.TestCase, StreamTestingMixin): + host=host) + written_files = host.filesystem.written_files + self.assertEqual(details.exit_code, 0) +- self.assertEqual(len(written_files.keys()), 8) ++ self.assertEqual(len(list(written_files.keys())), 8) + # We should reset the platform image baseline. + self.assert_baselines( + written_files, +@@ -2810,7 +2810,7 @@ class RebaselineTest(unittest.TestCase, StreamTestingMixin): + self.assertEqual(details.exit_code, 0) + self.assertFalse(host.filesystem.exists(virtual_baseline_txt)) + written_files = host.filesystem.written_files +- self.assertEqual(len(written_files.keys()), 8) ++ self.assertEqual(len(list(written_files.keys())), 8) + self.assert_wpt_manifests_not_written(host, written_files) + # We should create new image baseline only. + self.assert_baselines( +@@ -2837,7 +2837,7 @@ class MainTest(unittest.TestCase): + def exception_raising_run(port, options, args, printer): + assert False + +- stderr = StringIO.StringIO() ++ stderr = io.StringIO() + try: + run_web_tests.run = interrupting_run + res = run_web_tests.main([], stderr) +diff --git a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/web_tests/servers/apache_http.py b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/web_tests/servers/apache_http.py +index 8a253dbe4..1cdde246b 100644 +--- a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/web_tests/servers/apache_http.py ++++ b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/web_tests/servers/apache_http.py +@@ -173,7 +173,7 @@ class ApacheHTTP(server_base.ServerBase): + + if additional_dirs: + self._start_cmd = start_cmd +- for alias, path in additional_dirs.iteritems(): ++ for alias, path in additional_dirs.items(): + start_cmd += [ + '-c', + 'Alias %s "%s"' % (alias, path), +diff --git a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/web_tests/servers/cli_wrapper.py b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/web_tests/servers/cli_wrapper.py +index e37580c11..8adee49d8 100644 +--- a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/web_tests/servers/cli_wrapper.py ++++ b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/web_tests/servers/cli_wrapper.py +@@ -102,7 +102,7 @@ def main(server_constructor, + server = server_constructor(port_obj, options.output_dir, **kwargs) + server.start() + +- print 'Press Ctrl-C or `kill {}` to stop the server'.format(os.getpid()) ++ print('Press Ctrl-C or `kill {}` to stop the server'.format(os.getpid())) + try: + while True: + sleep_fn() +diff --git a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/web_tests/try_flag.py b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/web_tests/try_flag.py +index 07e0255fe..42a51d8bb 100644 +--- a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/web_tests/try_flag.py ++++ b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/web_tests/try_flag.py +@@ -65,7 +65,7 @@ class TryFlag(object): + test_expectations.parse_tagged_list(content) + return { + test_name +- for test_name in test_expectations.individual_exps.keys() ++ for test_name in list(test_expectations.individual_exps.keys()) + } + + def trigger(self): +@@ -103,7 +103,7 @@ class TryFlag(object): + self._host.print_('Fetching results...') + # TODO: Get jobs from the _tryflag branch. Current branch for now. + jobs = self._git_cl.latest_try_jobs( +- builder_names=BUILDER_CONFIGS.keys()) ++ builder_names=list(BUILDER_CONFIGS.keys())) + results_fetcher = self._host.results_fetcher + for build in sorted(jobs): + self._host.print_('-- %s: %s/results.html' % +@@ -140,7 +140,7 @@ class TryFlag(object): + elif action == 'update': + self.update() + else: +- print >> self._host.stderr, 'specify "trigger" or "update"' ++ print('specify "trigger" or "update"', file=self._host.stderr) + return 1 + return 0 + +diff --git a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/web_tests/update_expectations.py b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/web_tests/update_expectations.py +index e4bf80a34..c26238cbe 100644 +--- a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/web_tests/update_expectations.py ++++ b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/web_tests/update_expectations.py +@@ -167,7 +167,7 @@ class ExpectationsRemover(object): + # Initialize OS version to OS dictionary. + if not self._version_to_os: + for os, os_versions in \ +- self._port.configuration_specifier_macros().items(): ++ list(self._port.configuration_specifier_macros().items()): + for version in os_versions: + self._version_to_os[version.lower()] = os.lower() + +@@ -203,7 +203,7 @@ class ExpectationsRemover(object): + + builders_checked.append(builder_name) + +- if builder_name not in self._builder_results_by_path.keys(): ++ if builder_name not in list(self._builder_results_by_path.keys()): + _log.error('Failed to find results for builder "%s"', + builder_name) + return False +@@ -211,7 +211,7 @@ class ExpectationsRemover(object): + results_by_path = self._builder_results_by_path[builder_name] + + # No results means the tests were all skipped, or all results are passing. +- if expectation.test not in results_by_path.keys(): ++ if expectation.test not in list(results_by_path.keys()): + if self._remove_missing: + continue + return False +diff --git a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/web_tests/update_expectations_unittest.py b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/web_tests/update_expectations_unittest.py +index 1ab0efd73..99167a3b3 100644 +--- a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/web_tests/update_expectations_unittest.py ++++ b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/web_tests/update_expectations_unittest.py +@@ -22,7 +22,7 @@ class FakeBotTestExpectations(object): + self._results = {} + + # Make the results distinct like the real BotTestExpectations. +- for path, results in results_by_path.iteritems(): ++ for path, results in results_by_path.items(): + self._results[path] = list(set(results)) + + def all_results_by_path(self): +@@ -171,7 +171,7 @@ class UpdateTestExpectationsTest(LoggingTestCase): + } + updated_expectations = ( + self._expectations_remover.get_updated_test_expectations()) +- self.assertEquals(updated_expectations, test_expectations_before) ++ self.assertEqual(updated_expectations, test_expectations_before) + + def test_fail_mode_doesnt_remove_non_fails(self): + """Tests that lines that aren't failing are not touched. +@@ -211,7 +211,7 @@ class UpdateTestExpectationsTest(LoggingTestCase): + self.FAIL_TYPE)) + updated_expectations = ( + self._expectations_remover.get_updated_test_expectations()) +- self.assertEquals(updated_expectations, test_expectations_before) ++ self.assertEqual(updated_expectations, test_expectations_before) + + def test_dont_remove_directory_flake(self): + """Tests that flake lines with directories are untouched.""" +@@ -244,7 +244,7 @@ class UpdateTestExpectationsTest(LoggingTestCase): + self.FLAKE_TYPE)) + updated_expectations = ( + self._expectations_remover.get_updated_test_expectations()) +- self.assertEquals(updated_expectations, test_expectations_before) ++ self.assertEqual(updated_expectations, test_expectations_before) + + def test_dont_remove_directory_fail(self): + """Tests that fail lines with directories are untouched.""" +@@ -277,7 +277,7 @@ class UpdateTestExpectationsTest(LoggingTestCase): + self.FAIL_TYPE)) + updated_expectations = ( + self._expectations_remover.get_updated_test_expectations()) +- self.assertEquals(updated_expectations, test_expectations_before) ++ self.assertEqual(updated_expectations, test_expectations_before) + + def test_dont_remove_skip(self): + """Tests that lines with Skip are untouched. +@@ -312,7 +312,7 @@ class UpdateTestExpectationsTest(LoggingTestCase): + self._expectations_remover = self._create_expectations_remover() + updated_expectations = ( + self._expectations_remover.get_updated_test_expectations()) +- self.assertEquals(updated_expectations, test_expectations_before) ++ self.assertEqual(updated_expectations, test_expectations_before) + + def test_all_failure_result_types(self): + """Tests that all failure types are treated as failure.""" +@@ -350,7 +350,7 @@ class UpdateTestExpectationsTest(LoggingTestCase): + self._expectations_remover = self._create_expectations_remover() + updated_expectations = ( + self._expectations_remover.get_updated_test_expectations()) +- self.assertEquals( ++ self.assertEqual( + updated_expectations, + _strip_multiline_string_spaces("""# results: [ Failure Pass ] + test/a.html [ Failure Pass ] +@@ -394,7 +394,7 @@ class UpdateTestExpectationsTest(LoggingTestCase): + # The line with test/d.html is not removed since + # --remove-missing is false by default; lines for + # tests with no actual results are kept. +- self.assertEquals( ++ self.assertEqual( + updated_expectations, + _strip_multiline_string_spaces( + """# results: [ Timeout Crash Failure ] +@@ -441,7 +441,7 @@ class UpdateTestExpectationsTest(LoggingTestCase): + self._expectations_remover = self._create_expectations_remover() + updated_expectations = ( + self._expectations_remover.get_updated_test_expectations()) +- self.assertEquals( ++ self.assertEqual( + updated_expectations, + _strip_multiline_string_spaces( + """# results: [ Failure Pass Crash Timeout ] +@@ -474,7 +474,7 @@ class UpdateTestExpectationsTest(LoggingTestCase): + self.FLAKE_TYPE)) + updated_expectations = ( + self._expectations_remover.get_updated_test_expectations()) +- self.assertEquals( ++ self.assertEqual( + updated_expectations, + _strip_multiline_string_spaces("""# results: [ Failure Pass ] + # Keep since it's all failures. +@@ -514,7 +514,7 @@ class UpdateTestExpectationsTest(LoggingTestCase): + self._expectations_remover = self._create_expectations_remover() + updated_expectations = ( + self._expectations_remover.get_updated_test_expectations()) +- self.assertEquals(updated_expectations, ++ self.assertEqual(updated_expectations, + ('# results: [ Failure Pass ]')) + + def test_empty_test_expectations(self): +@@ -539,7 +539,7 @@ class UpdateTestExpectationsTest(LoggingTestCase): + self._expectations_remover = self._create_expectations_remover() + updated_expectations = ( + self._expectations_remover.get_updated_test_expectations()) +- self.assertEquals(updated_expectations, '') ++ self.assertEqual(updated_expectations, '') + + def test_basic_multiple_builders(self): + """Tests basic functionality with multiple builders.""" +@@ -591,7 +591,7 @@ class UpdateTestExpectationsTest(LoggingTestCase): + self._expectations_remover = self._create_expectations_remover() + updated_expectations = ( + self._expectations_remover.get_updated_test_expectations()) +- self.assertEquals( ++ self.assertEqual( + updated_expectations, + _strip_multiline_string_spaces("""# results: [ Failure Pass ] + # Keep these two since they're failing on the Mac builder. +@@ -695,7 +695,7 @@ class UpdateTestExpectationsTest(LoggingTestCase): + self._expectations_remover = self._create_expectations_remover() + updated_expectations = ( + self._expectations_remover.get_updated_test_expectations()) +- self.assertEquals( ++ self.assertEqual( + updated_expectations, + _strip_multiline_string_spaces(""" + # tags: [ Linux Mac Win Mac ] +@@ -804,7 +804,7 @@ class UpdateTestExpectationsTest(LoggingTestCase): + self._expectations_remover = self._create_expectations_remover() + updated_expectations = ( + self._expectations_remover.get_updated_test_expectations()) +- self.assertEquals( ++ self.assertEqual( + updated_expectations, + _strip_multiline_string_spaces( + """# Keep these two since they fail in debug. +@@ -859,7 +859,7 @@ class UpdateTestExpectationsTest(LoggingTestCase): + self._expectations_remover = self._create_expectations_remover() + updated_expectations = ( + self._expectations_remover.get_updated_test_expectations()) +- self.assertEquals(updated_expectations, ++ self.assertEqual(updated_expectations, + (_strip_multiline_string_spaces(""" + # results: [ Failure Pass ] + # Comment A - Keep since these aren't part of any test. +@@ -909,7 +909,7 @@ class UpdateTestExpectationsTest(LoggingTestCase): + self._expectations_remover = self._create_expectations_remover() + updated_expectations = ( + self._expectations_remover.get_updated_test_expectations()) +- self.assertEquals(updated_expectations, test_expectations_before) ++ self.assertEqual(updated_expectations, test_expectations_before) + + def test_lines_with_no_results_on_builders_can_be_removed(self): + """Tests that we remove a line that has no results on the builders. +@@ -945,7 +945,7 @@ class UpdateTestExpectationsTest(LoggingTestCase): + remove_missing=True) + updated_expectations = ( + self._expectations_remover.get_updated_test_expectations()) +- self.assertEquals( ++ self.assertEqual( + updated_expectations, + _strip_multiline_string_spaces(""" + # results: [ Failure Timeout Pass Crash Skip ] +@@ -984,7 +984,7 @@ class UpdateTestExpectationsTest(LoggingTestCase): + self._expectations_remover = self._create_expectations_remover() + updated_expectations = ( + self._expectations_remover.get_updated_test_expectations()) +- self.assertEquals( ++ self.assertEqual( + updated_expectations, + _strip_multiline_string_spaces(""" + # results: [ Failure Pass ]""")) +@@ -993,7 +993,7 @@ class UpdateTestExpectationsTest(LoggingTestCase): + self._create_expectations_remover(include_cq_results=True)) + updated_expectations = ( + self._expectations_remover.get_updated_test_expectations()) +- self.assertEquals(updated_expectations, test_expectations_before) ++ self.assertEqual(updated_expectations, test_expectations_before) + + def test_missing_builders_for_some_configurations(self): + """Tests the behavior when there are no builders for some configurations. +@@ -1066,7 +1066,7 @@ class UpdateTestExpectationsTest(LoggingTestCase): + self._expectations_remover = self._create_expectations_remover() + updated_expectations = ( + self._expectations_remover.get_updated_test_expectations()) +- self.assertEquals( ++ self.assertEqual( + updated_expectations, + _strip_multiline_string_spaces(""" + # tags: [ Win Linux ] +diff --git a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/web_tests/views/metered_stream.py b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/web_tests/views/metered_stream.py +index ed834fb4e..82920d48c 100644 +--- a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/web_tests/views/metered_stream.py ++++ b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/web_tests/views/metered_stream.py +@@ -113,7 +113,7 @@ class MeteredStream(object): + + # This is the easiest way to make sure a byte stream is printable as ascii + # with all non-ascii characters replaced. +- uni_msg = msg if isinstance(msg, unicode) else msg.decode( ++ uni_msg = msg if isinstance(msg, str) else msg.decode( + 'ascii', errors='replace') + self._stream.write(uni_msg.encode('ascii', errors='replace')) + +diff --git a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/web_tests/views/metered_stream_unittest.py b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/web_tests/views/metered_stream_unittest.py +index 3e8290b68..c3b66c7ba 100644 +--- a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/web_tests/views/metered_stream_unittest.py ++++ b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/web_tests/views/metered_stream_unittest.py +@@ -26,7 +26,7 @@ + # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +-import StringIO ++import io + import logging + import re + import unittest +@@ -39,7 +39,7 @@ class RegularTest(unittest.TestCase): + isatty = False + + def setUp(self): +- self.stream = StringIO.StringIO() ++ self.stream = io.StringIO() + self.buflist = self.stream.buflist + self.stream.isatty = lambda: self.isatty + +@@ -49,7 +49,7 @@ class RegularTest(unittest.TestCase): + self.logger.propagate = False + + # add a dummy time counter for a default behavior. +- self.times = range(10) ++ self.times = list(range(10)) + + self.meter = MeteredStream(self.stream, self.verbose, self.logger, + self.time_fn, 8675) +@@ -65,7 +65,7 @@ class RegularTest(unittest.TestCase): + def test_logging_not_included(self): + # This tests that if we don't hand a logger to the MeteredStream, + # nothing is logged. +- logging_stream = StringIO.StringIO() ++ logging_stream = io.StringIO() + handler = logging.StreamHandler(logging_stream) + root_logger = logging.getLogger() + orig_level = root_logger.level +@@ -130,7 +130,7 @@ class TtyTest(RegularTest): + + def test_bytestream(self): + self.meter.write('German umlauts: \xe4\xf6\xfc') +- self.meter.write(u'German umlauts: \xe4\xf6\xfc') ++ self.meter.write('German umlauts: \xe4\xf6\xfc') + self.assertEqual(self.buflist, + ['German umlauts: ???', 'German umlauts: ???']) + +diff --git a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/web_tests/views/printing.py b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/web_tests/views/printing.py +index c073c5839..9c02e89bb 100644 +--- a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/web_tests/views/printing.py ++++ b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/web_tests/views/printing.py +@@ -185,7 +185,7 @@ class Printer(object): + self._print_debug('Thread timing:') + stats = {} + cuml_time = 0 +- for result in run_results.results_by_name.values(): ++ for result in list(run_results.results_by_name.values()): + stats.setdefault(result.worker_name, { + 'num_tests': 0, + 'total_time': 0 +@@ -206,7 +206,7 @@ class Printer(object): + if self._options.timing: + parallel_time = sum( + result.total_run_time +- for result in run_results.results_by_name.values()) ++ for result in list(run_results.results_by_name.values())) + + # There is serial overhead in web_test_runner.run() that we can't easily account for when + # really running in parallel, but taking the min() ensures that in the worst case +diff --git a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/web_tests/views/printing_unittest.py b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/web_tests/views/printing_unittest.py +index c284d9ff8..f67351ebb 100644 +--- a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/web_tests/views/printing_unittest.py ++++ b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/web_tests/views/printing_unittest.py +@@ -28,7 +28,7 @@ + """Unit tests for printing.py.""" + + import optparse +-import StringIO ++import io + import sys + import unittest + +@@ -96,7 +96,7 @@ class Testprinter(unittest.TestCase): + host = MockHost() + self._port = host.port_factory.get('test', options) + +- regular_output = StringIO.StringIO() ++ regular_output = io.StringIO() + printer = printing.Printer(host, options, regular_output) + return printer, regular_output + +diff --git a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/web_tests/web_tests_history.py b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/web_tests/web_tests_history.py +index 4cd3a4db6..398e00e3d 100644 +--- a/src/3rdparty/chromium/third_party/blink/tools/blinkpy/web_tests/web_tests_history.py ++++ b/src/3rdparty/chromium/third_party/blink/tools/blinkpy/web_tests/web_tests_history.py +@@ -97,7 +97,7 @@ class HistoryChecker(object): + + def _process_single(self, path): + _init(self) +- print _run(path) ++ print(_run(path)) + return 0 + + def _process_many(self, paths): +@@ -126,7 +126,7 @@ class HistoryChecker(object): + if isinstance(res, BaseException): + # Traceback is already printed in the worker; exit directly. + raise SystemExit +- print res ++ print(res) + pool.close() + except Exception: + # A user exception was raised from the manager (main) process. +diff --git a/src/3rdparty/chromium/third_party/blink/tools/extract_expectation_names.py b/src/3rdparty/chromium/third_party/blink/tools/extract_expectation_names.py +index 6268ec5eb..5de4a3b91 100755 +--- a/src/3rdparty/chromium/third_party/blink/tools/extract_expectation_names.py ++++ b/src/3rdparty/chromium/third_party/blink/tools/extract_expectation_names.py +@@ -26,4 +26,4 @@ with open(filename) as f: + parser = TaggedTestListParser(f.read()) + for test_expectation in parser.expectations: + if test_expectation.test: +- print test_expectation.test ++ print(test_expectation.test) +diff --git a/src/3rdparty/chromium/third_party/blink/tools/gdb/blink.py b/src/3rdparty/chromium/third_party/blink/tools/gdb/blink.py +index d86c6d4cb..c15e06ee1 100644 +--- a/src/3rdparty/chromium/third_party/blink/tools/gdb/blink.py ++++ b/src/3rdparty/chromium/third_party/blink/tools/gdb/blink.py +@@ -34,7 +34,7 @@ Add this to your gdb by amending your ~/.gdbinit as follows: + import blink + """ + +-from __future__ import print_function ++ + + import gdb + import re +@@ -338,7 +338,7 @@ class WTFVectorPrinter: + return ('[%d]' % count, element) + + # Python version < 3 compatibility: +- def next(self): ++ def __next__(self): + return self.__next__() + + def __init__(self, val): +diff --git a/src/3rdparty/chromium/third_party/blink/tools/lldb/lldb_blink.py b/src/3rdparty/chromium/third_party/blink/tools/lldb/lldb_blink.py +index 438ec6fa5..d86ef6a3d 100644 +--- a/src/3rdparty/chromium/third_party/blink/tools/lldb/lldb_blink.py ++++ b/src/3rdparty/chromium/third_party/blink/tools/lldb/lldb_blink.py +@@ -127,7 +127,7 @@ def guess_string_length(valobj, error): + if not valobj.GetValue(): + return 0 + +- for i in xrange(0, 2048): ++ for i in range(0, 2048): + if valobj.GetPointeeData(i, 1).GetUnsignedInt16(error, 0) == 0: + return i + +@@ -140,10 +140,10 @@ def ustring_to_string(valobj, error, length=None): + else: + length = int(length) + +- out_string = u"" +- for i in xrange(0, length): ++ out_string = "" ++ for i in range(0, length): + char_value = valobj.GetPointeeData(i, 1).GetUnsignedInt16(error, 0) +- out_string = out_string + unichr(char_value) ++ out_string = out_string + chr(char_value) + + return out_string.encode('utf-8') + +@@ -154,10 +154,10 @@ def lstring_to_string(valobj, error, length=None): + else: + length = int(length) + +- out_string = u"" +- for i in xrange(0, length): ++ out_string = "" ++ for i in range(0, length): + char_value = valobj.GetPointeeData(i, 1).GetUnsignedInt8(error, 0) +- out_string = out_string + unichr(char_value) ++ out_string = out_string + chr(char_value) + + return out_string.encode('utf-8') + +@@ -218,7 +218,7 @@ class WTFStringProvider: + def to_string(self): + impl = self.stringimpl() + if not impl: +- return u"" ++ return "" + return impl.to_string() + + +diff --git a/src/3rdparty/chromium/third_party/blink/tools/plan_blink_move.py b/src/3rdparty/chromium/third_party/blink/tools/plan_blink_move.py +index 3de4469d3..7a95a9f6a 100755 +--- a/src/3rdparty/chromium/third_party/blink/tools/plan_blink_move.py ++++ b/src/3rdparty/chromium/third_party/blink/tools/plan_blink_move.py +@@ -85,10 +85,10 @@ def plan_blink_move(fs, prefixes): + def main(): + fs = FileSystem() + file_pairs = plan_blink_move(fs, sys.argv[1:]) +- print 'Show renaming plan. It contains files not in the repository.' +- print ' => ' ++ print('Show renaming plan. It contains files not in the repository.') ++ print(' => ') + for pair in file_pairs: +- print '%s\t=>\t%s' % pair ++ print('%s\t=>\t%s' % pair) + + + if __name__ == '__main__': +diff --git a/src/3rdparty/chromium/third_party/blink/tools/print_stale_test_expectations_entries.py b/src/3rdparty/chromium/third_party/blink/tools/print_stale_test_expectations_entries.py +index 9870a4a82..39d6e5380 100755 +--- a/src/3rdparty/chromium/third_party/blink/tools/print_stale_test_expectations_entries.py ++++ b/src/3rdparty/chromium/third_party/blink/tools/print_stale_test_expectations_entries.py +@@ -33,9 +33,9 @@ import csv + import datetime + import json + import optparse +-import StringIO ++import io + import sys +-import urllib2 ++import urllib.request, urllib.error, urllib.parse + + from blinkpy.common.host import Host + from blinkpy.web_tests.models.test_expectations import TestExpectationParser +@@ -69,7 +69,7 @@ class StaleTestPrinter(object): + port = self.host.port_factory.get() + expectations = port.expectations_dict() + parser = TestExpectationParser(port, all_tests=(), is_lint_mode=False) +- expectations_file, expectations_contents = expectations.items()[0] ++ expectations_file, expectations_contents = list(expectations.items())[0] + expectation_lines = parser.parse(expectations_file, + expectations_contents) + csv_rows = [] +@@ -81,7 +81,7 @@ class StaleTestPrinter(object): + self.write_csv(csv_rows) + + def write_csv(self, rows): +- out = StringIO.StringIO() ++ out = io.StringIO() + writer = csv.writer(out) + writer.writerow(CSV_ROW_HEADERS) + for row in rows: +@@ -105,22 +105,22 @@ class StaleTestPrinter(object): + self.populate_bug_info(bug_link, test_name) + # Return the stale bug's information. + if all(self.is_stale(bug_link) for bug_link in bug_links): +- print line.original_string.strip() ++ print(line.original_string.strip()) + return [ + bug_links[0], self.bug_info[bug_links[0]].filename, + self.bug_info[bug_links[0]].days_since_last_update, + self.bug_info[bug_links[0]].owner, + self.bug_info[bug_links[0]].status + ] +- except urllib2.HTTPError as error: ++ except urllib.error.HTTPError as error: + if error.code == 404: + message = 'got 404, bug does not exist.' + elif error.code == 403: + message = 'got 403, not accessible. Not able to tell if it\'s stale.' + else: + message = str(error) +- print >> sys.stderr, 'Error when checking %s: %s' % ( +- ','.join(bug_links), message) ++ print('Error when checking %s: %s' % ( ++ ','.join(bug_links), message), file=sys.stderr) + return None + + def populate_bug_info(self, bug_link, test_name): +@@ -129,13 +129,13 @@ class StaleTestPrinter(object): + # In case there's an error in the request, don't make the same request again. + bug_number = bug_link.strip(CRBUG_PREFIX) + url = GOOGLE_CODE_URL % bug_number +- response = urllib2.urlopen(url) ++ response = urllib.request.urlopen(url) + parsed = json.loads(response.read()) + parsed_time = datetime.datetime.strptime( + parsed['updated'].split(".")[0] + "UTC", "%Y-%m-%dT%H:%M:%S%Z") + time_delta = datetime.datetime.now() - parsed_time + owner = 'none' +- if 'owner' in parsed.keys(): ++ if 'owner' in list(parsed.keys()): + owner = parsed['owner']['name'] + self.bug_info[bug_link] = BugInfo(bug_link, test_name, time_delta.days, + owner, parsed['state']) +diff --git a/src/3rdparty/chromium/third_party/blink/tools/print_web_test_json_results.py b/src/3rdparty/chromium/third_party/blink/tools/print_web_test_json_results.py +index 6386fcb46..566f2fdcf 100755 +--- a/src/3rdparty/chromium/third_party/blink/tools/print_web_test_json_results.py ++++ b/src/3rdparty/chromium/third_party/blink/tools/print_web_test_json_results.py +@@ -35,7 +35,7 @@ def main(argv): + with open(args[0], 'r') as fp: + txt = fp.read() + else: +- print >> sys.stderr, "file not found: %s" % args[0] ++ print("file not found: %s" % args[0], file=sys.stderr) + sys.exit(1) + else: + txt = host.filesystem.read_text_file( +@@ -51,12 +51,12 @@ def main(argv): + + tests_to_print = [] + if options.passes: +- tests_to_print += passes.keys() ++ tests_to_print += list(passes.keys()) + if options.failures: +- tests_to_print += failures.keys() ++ tests_to_print += list(failures.keys()) + if options.flakes: +- tests_to_print += flakes.keys() +- print "\n".join(sorted(tests_to_print)) ++ tests_to_print += list(flakes.keys()) ++ print("\n".join(sorted(tests_to_print))) + + if options.ignored_failures_path: + with open(options.ignored_failures_path, 'r') as fp: +@@ -67,12 +67,12 @@ def main(argv): + _, ignored_failures, _ = decode_results(results, options.expected) + new_failures = set(failures.keys()) - set(ignored_failures.keys()) + if new_failures: +- print "New failures:" +- print "\n".join(sorted(new_failures)) +- print ++ print("New failures:") ++ print("\n".join(sorted(new_failures))) ++ print() + if ignored_failures: +- print "Ignored failures:" +- print "\n".join(sorted(ignored_failures.keys())) ++ print("Ignored failures:") ++ print("\n".join(sorted(ignored_failures.keys()))) + if new_failures: + return 1 + return 0 +@@ -83,7 +83,7 @@ def decode_results(results, include_expected=False): + failures = {} + flakes = {} + passes = {} +- for (test, result) in tests.iteritems(): ++ for (test, result) in tests.items(): + if include_expected or result.get('is_unexpected'): + actual_results = result['actual'].split() + expected_results = result['expected'].split() +@@ -106,7 +106,7 @@ def convert_trie_to_flat_paths(trie, prefix=None): + # Cloned from blinkpy.web_tests.layout_package.json_results_generator + # so that this code can stand alone. + result = {} +- for name, data in trie.iteritems(): ++ for name, data in trie.items(): + if prefix: + name = prefix + "/" + name + +diff --git a/src/3rdparty/chromium/third_party/blink/tools/print_web_test_ordering.py b/src/3rdparty/chromium/third_party/blink/tools/print_web_test_ordering.py +index e6dd1b5e5..70fefa935 100755 +--- a/src/3rdparty/chromium/third_party/blink/tools/print_web_test_ordering.py ++++ b/src/3rdparty/chromium/third_party/blink/tools/print_web_test_ordering.py +@@ -52,7 +52,7 @@ def main(argv): + + stats = convert_trie_to_flat_paths(stats_trie) + stats_by_worker = {} +- for test_name, data in stats.items(): ++ for test_name, data in list(stats.items()): + worker = "worker/" + str(data["results"][0]) + if worker not in stats_by_worker: + stats_by_worker[worker] = [] +@@ -63,18 +63,18 @@ def main(argv): + }) + + for worker in sorted(stats_by_worker.keys()): +- print worker + ':' ++ print(worker + ':') + for test in sorted( + stats_by_worker[worker], key=lambda test: test["number"]): +- print test["name"] +- print ++ print(test["name"]) ++ print() + + + def convert_trie_to_flat_paths(trie, prefix=None): + # Cloned from blinkpy.web_tests.layout_package.json_results_generator + # so that this code can stand alone. + result = {} +- for name, data in trie.iteritems(): ++ for name, data in trie.items(): + if prefix: + name = prefix + "/" + name + if "results" in data: +diff --git a/src/3rdparty/chromium/third_party/blink/tools/read_checksum_from_png.py b/src/3rdparty/chromium/third_party/blink/tools/read_checksum_from_png.py +index fe9726100..eb5db52ab 100755 +--- a/src/3rdparty/chromium/third_party/blink/tools/read_checksum_from_png.py ++++ b/src/3rdparty/chromium/third_party/blink/tools/read_checksum_from_png.py +@@ -27,7 +27,7 @@ + # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +-from __future__ import with_statement ++ + import sys + + from blinkpy.common import read_checksum_from_png +@@ -35,5 +35,5 @@ from blinkpy.common import read_checksum_from_png + if '__main__' == __name__: + for filename in sys.argv[1:]: + with open(filename, 'r') as filehandle: +- print "%s: %s" % (read_checksum_from_png.read_checksum(filehandle), +- filename) ++ print("%s: %s" % (read_checksum_from_png.read_checksum(filehandle), ++ filename)) +diff --git a/src/3rdparty/chromium/third_party/boringssl/roll_boringssl.py b/src/3rdparty/chromium/third_party/boringssl/roll_boringssl.py +index 8918a3a85..bed60ef47 100755 +--- a/src/3rdparty/chromium/third_party/boringssl/roll_boringssl.py ++++ b/src/3rdparty/chromium/third_party/boringssl/roll_boringssl.py +@@ -100,10 +100,10 @@ def main(): + return 1 + + if not IsPristine(SRC_PATH): +- print >>sys.stderr, 'Chromium checkout not pristine.' ++ print('Chromium checkout not pristine.', file=sys.stderr) + return 0 + if not IsPristine(BORINGSSL_SRC_PATH): +- print >>sys.stderr, 'BoringSSL checkout not pristine.' ++ print('BoringSSL checkout not pristine.', file=sys.stderr) + return 0 + + if len(sys.argv) > 1: +@@ -114,10 +114,10 @@ def main(): + + old_head = RevParse(BORINGSSL_SRC_PATH, 'HEAD') + if old_head == new_head: +- print 'BoringSSL already up to date.' ++ print('BoringSSL already up to date.') + return 0 + +- print 'Rolling BoringSSL from %s to %s...' % (old_head, new_head) ++ print('Rolling BoringSSL from %s to %s...' % (old_head, new_head)) + + # Look for commits with associated Chromium bugs. + crbugs = set() +@@ -199,8 +199,8 @@ https://boringssl.googlesource.com/boringssl/+log/%s..%s + ['git', 'log', '--grep', '^Update-Note:', '-i', + '%s..%s' % (old_head, new_head)], cwd=BORINGSSL_SRC_PATH).strip() + if len(notes) > 0: +- print "\x1b[1mThe following changes contain updating notes\x1b[0m:\n\n" +- print notes ++ print("\x1b[1mThe following changes contain updating notes\x1b[0m:\n\n") ++ print(notes) + + return 0 + +diff --git a/src/3rdparty/chromium/third_party/boringssl/src/crypto/curve25519/make_curve25519_tables.py b/src/3rdparty/chromium/third_party/boringssl/src/crypto/curve25519/make_curve25519_tables.py +index 50dee2a98..028c6b6b2 100755 +--- a/src/3rdparty/chromium/third_party/boringssl/src/crypto/curve25519/make_curve25519_tables.py ++++ b/src/3rdparty/chromium/third_party/boringssl/src/crypto/curve25519/make_curve25519_tables.py +@@ -14,7 +14,7 @@ + # OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN + # CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +-import StringIO ++import io + import subprocess + + # Base field Z_p +@@ -140,7 +140,7 @@ def main(): + bi_precomp.append(to_ge_precomp(P)) + + +- buf = StringIO.StringIO() ++ buf = io.StringIO() + buf.write("""/* Copyright (c) 2020, Google Inc. + * + * Permission to use, copy, modify, and/or distribute this software for any +diff --git a/src/3rdparty/chromium/third_party/boringssl/src/third_party/googletest/scripts/gen_gtest_pred_impl.py b/src/3rdparty/chromium/third_party/boringssl/src/third_party/googletest/scripts/gen_gtest_pred_impl.py +index b43efdf41..1c5fea776 100755 +--- a/src/3rdparty/chromium/third_party/boringssl/src/third_party/googletest/scripts/gen_gtest_pred_impl.py ++++ b/src/3rdparty/chromium/third_party/boringssl/src/third_party/googletest/scripts/gen_gtest_pred_impl.py +@@ -182,7 +182,7 @@ def Title(word): + def OneTo(n): + """Returns the list [1, 2, 3, ..., n].""" + +- return range(1, n + 1) ++ return list(range(1, n + 1)) + + + def Iter(n, format, sep=''): +@@ -304,12 +304,12 @@ def GenerateFile(path, content): + """Given a file path and a content string + overwrites it with the given content. + """ +- print 'Updating file %s . . .' % path ++ print('Updating file %s . . .' % path) + f = file(path, 'w+') +- print >>f, content, ++ print(content, end=' ', file=f) + f.close() + +- print 'File %s has been updated.' % path ++ print('File %s has been updated.' % path) + + + def GenerateHeader(n): +@@ -717,8 +717,8 @@ def _Main(): + unit test.""" + + if len(sys.argv) != 2: +- print __doc__ +- print 'Author: ' + __author__ ++ print(__doc__) ++ print('Author: ' + __author__) + sys.exit(1) + + n = int(sys.argv[1]) +diff --git a/src/3rdparty/chromium/third_party/boringssl/src/third_party/googletest/scripts/pump.py b/src/3rdparty/chromium/third_party/boringssl/src/third_party/googletest/scripts/pump.py +index 5efb653c2..3f98065d8 100755 +--- a/src/3rdparty/chromium/third_party/boringssl/src/third_party/googletest/scripts/pump.py ++++ b/src/3rdparty/chromium/third_party/boringssl/src/third_party/googletest/scripts/pump.py +@@ -246,7 +246,7 @@ def ParseToken(lines, pos, regex, token_type): + if m and not m.start(): + return MakeToken(lines, pos, pos + m.end(), token_type) + else: +- print 'ERROR: %s expected at %s.' % (token_type, pos) ++ print('ERROR: %s expected at %s.' % (token_type, pos)) + sys.exit(1) + + +@@ -273,7 +273,7 @@ def SkipUntil(lines, pos, regex, token_type): + if m: + return pos + m.start() + else: +- print ('ERROR: %s expected on line %s after column %s.' % ++ print('ERROR: %s expected on line %s after column %s.' % + (token_type, pos.line + 1, pos.column)) + sys.exit(1) + +@@ -453,8 +453,8 @@ def PushFront(a_list, elem): + def PopToken(a_list, token_type=None): + token = PopFront(a_list) + if token_type is not None and token.token_type != token_type: +- print 'ERROR: %s expected at %s' % (token_type, token.start) +- print 'ERROR: %s found instead' % (token,) ++ print('ERROR: %s expected at %s' % (token_type, token.start)) ++ print('ERROR: %s found instead' % (token,)) + sys.exit(1) + + return token +@@ -616,15 +616,15 @@ class Env: + if identifier == var: + return value + +- print 'ERROR: meta variable %s is undefined.' % (identifier,) ++ print('ERROR: meta variable %s is undefined.' % (identifier,)) + sys.exit(1) + + def EvalExp(self, exp): + try: + result = eval(exp.python_exp) +- except Exception, e: +- print 'ERROR: caught exception %s: %s' % (e.__class__.__name__, e) +- print ('ERROR: failed to evaluate meta expression %s at %s' % ++ except Exception as e: ++ print('ERROR: caught exception %s: %s' % (e.__class__.__name__, e)) ++ print('ERROR: failed to evaluate meta expression %s at %s' % + (exp.python_exp, exp.token.start)) + sys.exit(1) + return result +@@ -634,7 +634,7 @@ class Env: + if identifier == var: + return (lower, upper) + +- print 'ERROR: range %s is undefined.' % (identifier,) ++ print('ERROR: range %s is undefined.' % (identifier,)) + sys.exit(1) + + +@@ -694,8 +694,8 @@ def RunAtomicCode(env, node, output): + elif isinstance(node, CodeNode): + RunCode(env.Clone(), node, output) + else: +- print 'BAD' +- print node ++ print('BAD') ++ print(node) + sys.exit(1) + + +@@ -830,7 +830,7 @@ def ConvertFromPumpSource(src_text): + + def main(argv): + if len(argv) == 1: +- print __doc__ ++ print(__doc__) + sys.exit(1) + + file_path = argv[-1] +@@ -840,7 +840,7 @@ def main(argv): + else: + output_file_path = '-' + if output_file_path == '-': +- print output_str, ++ print(output_str, end=' ') + else: + output_file = file(output_file_path, 'w') + output_file.write('// This file was GENERATED by command:\n') +diff --git a/src/3rdparty/chromium/third_party/boringssl/src/third_party/googletest/scripts/release_docs.py b/src/3rdparty/chromium/third_party/boringssl/src/third_party/googletest/scripts/release_docs.py +index 1291347f6..06ba0ffec 100755 +--- a/src/3rdparty/chromium/third_party/boringssl/src/third_party/googletest/scripts/release_docs.py ++++ b/src/3rdparty/chromium/third_party/boringssl/src/third_party/googletest/scripts/release_docs.py +@@ -127,11 +127,11 @@ class WikiBrancher(object): + def BranchFiles(self): + """Branches the .wiki files needed to be branched.""" + +- print 'Branching %d .wiki files:' % (len(self.files_to_branch),) ++ print('Branching %d .wiki files:' % (len(self.files_to_branch),)) + os.chdir(self.wiki_dir) + for f in self.files_to_branch: + command = 'svn cp %s %s%s' % (f, self.version_prefix, f) +- print command ++ print(command) + os.system(command) + + def UpdateLinksInBranchedFiles(self): +@@ -139,7 +139,7 @@ class WikiBrancher(object): + for f in self.files_to_branch: + source_file = os.path.join(self.wiki_dir, f) + versioned_file = os.path.join(self.wiki_dir, self.version_prefix + f) +- print 'Updating links in %s.' % (versioned_file,) ++ print('Updating links in %s.' % (versioned_file,)) + text = file(source_file, 'r').read() + new_text = self.search_for_re.sub(self.replace_with, text) + file(versioned_file, 'w').write(new_text) +diff --git a/src/3rdparty/chromium/third_party/boringssl/src/third_party/googletest/scripts/upload.py b/src/3rdparty/chromium/third_party/boringssl/src/third_party/googletest/scripts/upload.py +index c852e4c91..981a692bc 100755 +--- a/src/3rdparty/chromium/third_party/boringssl/src/third_party/googletest/scripts/upload.py ++++ b/src/3rdparty/chromium/third_party/boringssl/src/third_party/googletest/scripts/upload.py +@@ -31,7 +31,7 @@ against by using the '--rev' option. + # This code is derived from appcfg.py in the App Engine SDK (open source), + # and from ASPN recipe #146306. + +-import cookielib ++import http.cookiejar + import getpass + import logging + import md5 +@@ -42,9 +42,9 @@ import re + import socket + import subprocess + import sys +-import urllib +-import urllib2 +-import urlparse ++import urllib.request, urllib.parse, urllib.error ++import urllib.request, urllib.error, urllib.parse ++import urllib.parse + + try: + import readline +@@ -79,15 +79,15 @@ def GetEmail(prompt): + last_email = last_email_file.readline().strip("\n") + last_email_file.close() + prompt += " [%s]" % last_email +- except IOError, e: ++ except IOError as e: + pass +- email = raw_input(prompt + ": ").strip() ++ email = input(prompt + ": ").strip() + if email: + try: + last_email_file = open(last_email_file_name, "w") + last_email_file.write(email) + last_email_file.close() +- except IOError, e: ++ except IOError as e: + pass + else: + email = last_email +@@ -103,20 +103,20 @@ def StatusUpdate(msg): + msg: The string to print. + """ + if verbosity > 0: +- print msg ++ print(msg) + + + def ErrorExit(msg): + """Print an error message to stderr and exit.""" +- print >>sys.stderr, msg ++ print(msg, file=sys.stderr) + sys.exit(1) + + +-class ClientLoginError(urllib2.HTTPError): ++class ClientLoginError(urllib.error.HTTPError): + """Raised to indicate there was an error authenticating with ClientLogin.""" + + def __init__(self, url, code, msg, headers, args): +- urllib2.HTTPError.__init__(self, url, code, msg, headers, None) ++ urllib.error.HTTPError.__init__(self, url, code, msg, headers, None) + self.args = args + self.reason = args["Error"] + +@@ -162,10 +162,10 @@ class AbstractRpcServer(object): + def _CreateRequest(self, url, data=None): + """Creates a new urllib request.""" + logging.debug("Creating request for: '%s' with payload:\n%s", url, data) +- req = urllib2.Request(url, data=data) ++ req = urllib.request.Request(url, data=data) + if self.host_override: + req.add_header("Host", self.host_override) +- for key, value in self.extra_headers.iteritems(): ++ for key, value in self.extra_headers.items(): + req.add_header(key, value) + return req + +@@ -189,7 +189,7 @@ class AbstractRpcServer(object): + account_type = "HOSTED" + req = self._CreateRequest( + url="https://www.google.com/accounts/ClientLogin", +- data=urllib.urlencode({ ++ data=urllib.parse.urlencode({ + "Email": email, + "Passwd": password, + "service": "ah", +@@ -203,7 +203,7 @@ class AbstractRpcServer(object): + response_dict = dict(x.split("=") + for x in response_body.split("\n") if x) + return response_dict["Auth"] +- except urllib2.HTTPError, e: ++ except urllib.error.HTTPError as e: + if e.code == 403: + body = e.read() + response_dict = dict(x.split("=", 1) for x in body.split("\n") if x) +@@ -225,14 +225,14 @@ class AbstractRpcServer(object): + continue_location = "http://localhost/" + args = {"continue": continue_location, "auth": auth_token} + req = self._CreateRequest("http://%s/_ah/login?%s" % +- (self.host, urllib.urlencode(args))) ++ (self.host, urllib.parse.urlencode(args))) + try: + response = self.opener.open(req) +- except urllib2.HTTPError, e: ++ except urllib.error.HTTPError as e: + response = e + if (response.code != 302 or + response.info()["location"] != continue_location): +- raise urllib2.HTTPError(req.get_full_url(), response.code, response.msg, ++ raise urllib.error.HTTPError(req.get_full_url(), response.code, response.msg, + response.headers, response.fp) + self.authenticated = True + +@@ -255,34 +255,34 @@ class AbstractRpcServer(object): + credentials = self.auth_function() + try: + auth_token = self._GetAuthToken(credentials[0], credentials[1]) +- except ClientLoginError, e: ++ except ClientLoginError as e: + if e.reason == "BadAuthentication": +- print >>sys.stderr, "Invalid username or password." ++ print("Invalid username or password.", file=sys.stderr) + continue + if e.reason == "CaptchaRequired": +- print >>sys.stderr, ( ++ print(( + "Please go to\n" + "https://www.google.com/accounts/DisplayUnlockCaptcha\n" +- "and verify you are a human. Then try again.") ++ "and verify you are a human. Then try again."), file=sys.stderr) + break + if e.reason == "NotVerified": +- print >>sys.stderr, "Account not verified." ++ print("Account not verified.", file=sys.stderr) + break + if e.reason == "TermsNotAgreed": +- print >>sys.stderr, "User has not agreed to TOS." ++ print("User has not agreed to TOS.", file=sys.stderr) + break + if e.reason == "AccountDeleted": +- print >>sys.stderr, "The user account has been deleted." ++ print("The user account has been deleted.", file=sys.stderr) + break + if e.reason == "AccountDisabled": +- print >>sys.stderr, "The user account has been disabled." ++ print("The user account has been disabled.", file=sys.stderr) + break + if e.reason == "ServiceDisabled": +- print >>sys.stderr, ("The user's access to the service has been " +- "disabled.") ++ print(("The user's access to the service has been " ++ "disabled."), file=sys.stderr) + break + if e.reason == "ServiceUnavailable": +- print >>sys.stderr, "The service is not available; try again later." ++ print("The service is not available; try again later.", file=sys.stderr) + break + raise + self._GetAuthCookie(auth_token) +@@ -319,7 +319,7 @@ class AbstractRpcServer(object): + args = dict(kwargs) + url = "http://%s%s" % (self.host, request_path) + if args: +- url += "?" + urllib.urlencode(args) ++ url += "?" + urllib.parse.urlencode(args) + req = self._CreateRequest(url=url, data=payload) + req.add_header("Content-Type", content_type) + try: +@@ -327,7 +327,7 @@ class AbstractRpcServer(object): + response = f.read() + f.close() + return response +- except urllib2.HTTPError, e: ++ except urllib.error.HTTPError as e: + if tries > 3: + raise + elif e.code == 401: +@@ -357,35 +357,35 @@ class HttpRpcServer(AbstractRpcServer): + Returns: + A urllib2.OpenerDirector object. + """ +- opener = urllib2.OpenerDirector() +- opener.add_handler(urllib2.ProxyHandler()) +- opener.add_handler(urllib2.UnknownHandler()) +- opener.add_handler(urllib2.HTTPHandler()) +- opener.add_handler(urllib2.HTTPDefaultErrorHandler()) +- opener.add_handler(urllib2.HTTPSHandler()) ++ opener = urllib.request.OpenerDirector() ++ opener.add_handler(urllib.request.ProxyHandler()) ++ opener.add_handler(urllib.request.UnknownHandler()) ++ opener.add_handler(urllib.request.HTTPHandler()) ++ opener.add_handler(urllib.request.HTTPDefaultErrorHandler()) ++ opener.add_handler(urllib.request.HTTPSHandler()) + opener.add_handler(urllib2.HTTPErrorProcessor()) + if self.save_cookies: + self.cookie_file = os.path.expanduser("~/.codereview_upload_cookies") +- self.cookie_jar = cookielib.MozillaCookieJar(self.cookie_file) ++ self.cookie_jar = http.cookiejar.MozillaCookieJar(self.cookie_file) + if os.path.exists(self.cookie_file): + try: + self.cookie_jar.load() + self.authenticated = True + StatusUpdate("Loaded authentication cookies from %s" % + self.cookie_file) +- except (cookielib.LoadError, IOError): ++ except (http.cookiejar.LoadError, IOError): + # Failed to load cookies - just ignore them. + pass + else: + # Create an empty cookie file with mode 600 +- fd = os.open(self.cookie_file, os.O_CREAT, 0600) ++ fd = os.open(self.cookie_file, os.O_CREAT, 0o600) + os.close(fd) + # Always chmod the cookie file +- os.chmod(self.cookie_file, 0600) ++ os.chmod(self.cookie_file, 0o600) + else: + # Don't save cookies across runs of update.py. +- self.cookie_jar = cookielib.CookieJar() +- opener.add_handler(urllib2.HTTPCookieProcessor(self.cookie_jar)) ++ self.cookie_jar = http.cookiejar.CookieJar() ++ opener.add_handler(urllib.request.HTTPCookieProcessor(self.cookie_jar)) + return opener + + +@@ -560,7 +560,7 @@ def RunShellWithReturnCode(command, print_output=False, + line = p.stdout.readline() + if not line: + break +- print line.strip("\n") ++ print(line.strip("\n")) + output_array.append(line) + output = "".join(output_array) + else: +@@ -568,7 +568,7 @@ def RunShellWithReturnCode(command, print_output=False, + p.wait() + errout = p.stderr.read() + if print_output and errout: +- print >>sys.stderr, errout ++ print(errout, file=sys.stderr) + p.stdout.close() + p.stderr.close() + return output, p.returncode +@@ -614,11 +614,11 @@ class VersionControlSystem(object): + """Show an "are you sure?" prompt if there are unknown files.""" + unknown_files = self.GetUnknownFiles() + if unknown_files: +- print "The following files are not added to version control:" ++ print("The following files are not added to version control:") + for line in unknown_files: +- print line ++ print(line) + prompt = "Are you sure to continue?(y/N) " +- answer = raw_input(prompt).strip() ++ answer = input(prompt).strip() + if answer != "y": + ErrorExit("User aborted") + +@@ -670,13 +670,13 @@ class VersionControlSystem(object): + else: + type = "current" + if len(content) > MAX_UPLOAD_SIZE: +- print ("Not uploading the %s file for %s because it's too large." % ++ print("Not uploading the %s file for %s because it's too large." % + (type, filename)) + file_too_large = True + content = "" + checksum = md5.new(content).hexdigest() + if options.verbose > 0 and not file_too_large: +- print "Uploading %s file for %s" % (type, filename) ++ print("Uploading %s file for %s" % (type, filename)) + url = "/%d/upload_content/%d/%d" % (int(issue), int(patchset), file_id) + form_fields = [("filename", filename), + ("status", status), +@@ -698,7 +698,7 @@ class VersionControlSystem(object): + + patches = dict() + [patches.setdefault(v, k) for k, v in patch_list] +- for filename in patches.keys(): ++ for filename in list(patches.keys()): + base_content, new_content, is_binary, status = files[filename] + file_id_str = patches.get(filename) + if file_id_str.find("nobase") != -1: +@@ -755,8 +755,8 @@ class SubversionVCS(VersionControlSystem): + words = line.split() + if len(words) == 2 and words[0] == "URL:": + url = words[1] +- scheme, netloc, path, params, query, fragment = urlparse.urlparse(url) +- username, netloc = urllib.splituser(netloc) ++ scheme, netloc, path, params, query, fragment = urllib.parse.urlparse(url) ++ username, netloc = urllib.parse.splituser(netloc) + if username: + logging.info("Removed username from base URL") + if netloc.endswith("svn.python.org"): +@@ -774,12 +774,12 @@ class SubversionVCS(VersionControlSystem): + logging.info("Guessed CollabNet base = %s", base) + elif netloc.endswith(".googlecode.com"): + path = path + "/" +- base = urlparse.urlunparse(("http", netloc, path, params, ++ base = urllib.parse.urlunparse(("http", netloc, path, params, + query, fragment)) + logging.info("Guessed Google Code base = %s", base) + else: + path = path + "/" +- base = urlparse.urlunparse((scheme, netloc, path, params, ++ base = urllib.parse.urlunparse((scheme, netloc, path, params, + query, fragment)) + logging.info("Guessed base = %s", base) + return base +@@ -1187,7 +1187,7 @@ def UploadSeparatePatches(issue, rpc_server, patchset, data, options): + rv = [] + for patch in patches: + if len(patch[1]) > MAX_UPLOAD_SIZE: +- print ("Not uploading the patch for " + patch[0] + ++ print("Not uploading the patch for " + patch[0] + + " because the file is too large.") + continue + form_fields = [("filename", patch[0])] +@@ -1196,7 +1196,7 @@ def UploadSeparatePatches(issue, rpc_server, patchset, data, options): + files = [("data", "data.diff", patch[1])] + ctype, body = EncodeMultipartFormData(form_fields, files) + url = "/%d/upload_patch/%d" % (int(issue), int(patchset)) +- print "Uploading patch for " + patch[0] ++ print("Uploading patch for " + patch[0]) + response_body = rpc_server.Send(url, body, content_type=ctype) + lines = response_body.splitlines() + if not lines or lines[0] != "OK": +@@ -1223,7 +1223,8 @@ def GuessVCS(options): + out, returncode = RunShellWithReturnCode(["hg", "root"]) + if returncode == 0: + return MercurialVCS(options, out.strip()) +- except OSError, (errno, message): ++ except OSError as xxx_todo_changeme: ++ (errno, message) = xxx_todo_changeme.args + if errno != 2: # ENOENT -- they don't have hg installed. + raise + +@@ -1239,7 +1240,8 @@ def GuessVCS(options): + "--is-inside-work-tree"]) + if returncode == 0: + return GitVCS(options) +- except OSError, (errno, message): ++ except OSError as xxx_todo_changeme1: ++ (errno, message) = xxx_todo_changeme1.args + if errno != 2: # ENOENT -- they don't have git installed. + raise + +@@ -1286,12 +1288,12 @@ def RealMain(argv, data=None): + data = vcs.GenerateDiff(args) + files = vcs.GetBaseFiles(data) + if verbosity >= 1: +- print "Upload server:", options.server, "(change with -s/--server)" ++ print("Upload server:", options.server, "(change with -s/--server)") + if options.issue: + prompt = "Message describing this patch set: " + else: + prompt = "New issue subject: " +- message = options.message or raw_input(prompt).strip() ++ message = options.message or input(prompt).strip() + if not message: + ErrorExit("A non-empty message is required") + rpc_server = GetRpcServer(options) +@@ -1324,7 +1326,7 @@ def RealMain(argv, data=None): + # Send a hash of all the base file so the server can determine if a copy + # already exists in an earlier patchset. + base_hashes = "" +- for file, info in files.iteritems(): ++ for file, info in files.items(): + if not info[0] is None: + checksum = md5.new(info[0]).hexdigest() + if base_hashes: +@@ -1338,7 +1340,7 @@ def RealMain(argv, data=None): + if not options.download_base: + form_fields.append(("content_upload", "1")) + if len(data) > MAX_UPLOAD_SIZE: +- print "Patch is large, so uploading file patches separately." ++ print("Patch is large, so uploading file patches separately.") + uploaded_diff_file = [] + form_fields.append(("separate_patches", "1")) + else: +@@ -1378,7 +1380,7 @@ def main(): + try: + RealMain(sys.argv) + except KeyboardInterrupt: +- print ++ print() + StatusUpdate("Interrupted.") + sys.exit(1) + +diff --git a/src/3rdparty/chromium/third_party/boringssl/src/third_party/googletest/xcode/Scripts/versiongenerate.py b/src/3rdparty/chromium/third_party/boringssl/src/third_party/googletest/xcode/Scripts/versiongenerate.py +index bdd7541ad..a4b273a02 100755 +--- a/src/3rdparty/chromium/third_party/boringssl/src/third_party/googletest/xcode/Scripts/versiongenerate.py ++++ b/src/3rdparty/chromium/third_party/boringssl/src/third_party/googletest/xcode/Scripts/versiongenerate.py +@@ -54,7 +54,7 @@ import re + + # Read the command line argument (the output directory for Version.h) + if (len(sys.argv) < 3): +- print "Usage: versiongenerate.py input_dir output_dir" ++ print("Usage: versiongenerate.py input_dir output_dir") + sys.exit(1) + else: + input_dir = sys.argv[1] +diff --git a/src/3rdparty/chromium/third_party/boringssl/src/util/bot/extract.py b/src/3rdparty/chromium/third_party/boringssl/src/util/bot/extract.py +index 4680cfe3c..cd02a0607 100644 +--- a/src/3rdparty/chromium/third_party/boringssl/src/util/bot/extract.py ++++ b/src/3rdparty/chromium/third_party/boringssl/src/util/bot/extract.py +@@ -109,7 +109,7 @@ def main(args): + if os.path.exists(stamp_path): + with open(stamp_path) as f: + if f.read().strip() == digest: +- print "Already up-to-date." ++ print("Already up-to-date.") + return 0 + + if archive.endswith('.zip'): +@@ -123,10 +123,10 @@ def main(args): + + try: + if os.path.exists(output): +- print "Removing %s" % (output, ) ++ print("Removing %s" % (output, )) + shutil.rmtree(output) + +- print "Extracting %s to %s" % (archive, output) ++ print("Extracting %s to %s" % (archive, output)) + prefix = None + num_extracted = 0 + for entry in entries: +@@ -166,14 +166,14 @@ def main(args): + # Print every 100 files, so bots do not time out on large archives. + num_extracted += 1 + if num_extracted % 100 == 0: +- print "Extracted %d files..." % (num_extracted,) ++ print("Extracted %d files..." % (num_extracted,)) + finally: + entries.close() + + with open(stamp_path, 'w') as f: + f.write(digest) + +- print "Done. Extracted %d files." % (num_extracted,) ++ print("Done. Extracted %d files." % (num_extracted,)) + return 0 + + +diff --git a/src/3rdparty/chromium/third_party/boringssl/src/util/bot/go/bootstrap.py b/src/3rdparty/chromium/third_party/boringssl/src/util/bot/go/bootstrap.py +index 06ab4c835..364a2e886 100755 +--- a/src/3rdparty/chromium/third_party/boringssl/src/util/bot/go/bootstrap.py ++++ b/src/3rdparty/chromium/third_party/boringssl/src/util/bot/go/bootstrap.py +@@ -23,7 +23,7 @@ import subprocess + import sys + import tarfile + import tempfile +-import urllib ++import urllib.request, urllib.parse, urllib.error + import zipfile + + # TODO(vadimsh): Migrate to new golang.org/x/ paths once Golang moves to +@@ -147,10 +147,10 @@ def download_file(url, path): + def report(a, b, c): + progress = int(a * b * 100.0 / c) + if progress != last_progress[0]: +- print >> sys.stderr, 'Downloading... %d%%' % progress ++ print('Downloading... %d%%' % progress, file=sys.stderr) + last_progress[0] = progress + # TODO(vadimsh): Use something less crippled, something that validates SSL. +- urllib.urlretrieve(url, path, reporthook=report) ++ urllib.request.urlretrieve(url, path, reporthook=report) + + + @contextlib.contextmanager +@@ -286,7 +286,7 @@ def find_executable(name, workspaces): + + def main(args): + if args: +- print >> sys.stderr, sys.modules[__name__].__doc__, ++ print(sys.modules[__name__].__doc__, end=' ', file=sys.stderr) + return 2 + bootstrap(logging.DEBUG) + return 0 +diff --git a/src/3rdparty/chromium/third_party/boringssl/src/util/bot/go/env.py b/src/3rdparty/chromium/third_party/boringssl/src/util/bot/go/env.py +index 820968c9b..dc0731c09 100755 +--- a/src/3rdparty/chromium/third_party/boringssl/src/util/bot/go/env.py ++++ b/src/3rdparty/chromium/third_party/boringssl/src/util/bot/go/env.py +@@ -34,9 +34,9 @@ old = os.environ.copy() + new = bootstrap.prepare_go_environ() + + if len(sys.argv) == 1: +- for key, value in sorted(new.iteritems()): ++ for key, value in sorted(new.items()): + if old.get(key) != value: +- print 'export %s="%s"' % (key, value) ++ print('export %s="%s"' % (key, value)) + else: + exe = sys.argv[1] + if exe == 'python': +diff --git a/src/3rdparty/chromium/third_party/boringssl/src/util/bot/update_clang.py b/src/3rdparty/chromium/third_party/boringssl/src/util/bot/update_clang.py +index ceeb265ca..66e48670d 100644 +--- a/src/3rdparty/chromium/third_party/boringssl/src/util/bot/update_clang.py ++++ b/src/3rdparty/chromium/third_party/boringssl/src/util/bot/update_clang.py +@@ -13,7 +13,7 @@ import sys + import tarfile + import tempfile + import time +-import urllib2 ++import urllib.request, urllib.error, urllib.parse + + + # CLANG_REVISION and CLANG_SUB_REVISION determine the build of clang +@@ -54,7 +54,7 @@ def DownloadUrl(url, output_file): + try: + sys.stdout.write('Downloading %s ' % url) + sys.stdout.flush() +- response = urllib2.urlopen(url) ++ response = urllib.request.urlopen(url) + total_size = int(response.info().getheader('Content-Length').strip()) + bytes_done = 0 + dots_printed = 0 +@@ -69,24 +69,24 @@ def DownloadUrl(url, output_file): + sys.stdout.flush() + dots_printed = num_dots + if bytes_done != total_size: +- raise urllib2.URLError("only got %d of %d bytes" % ++ raise urllib.error.URLError("only got %d of %d bytes" % + (bytes_done, total_size)) +- print ' Done.' ++ print(' Done.') + return +- except urllib2.URLError as e: ++ except urllib.error.URLError as e: + sys.stdout.write('\n') +- print e +- if num_retries == 0 or isinstance(e, urllib2.HTTPError) and e.code == 404: ++ print(e) ++ if num_retries == 0 or isinstance(e, urllib.error.HTTPError) and e.code == 404: + raise e + num_retries -= 1 +- print 'Retrying in %d s ...' % retry_wait_s ++ print('Retrying in %d s ...' % retry_wait_s) + time.sleep(retry_wait_s) + retry_wait_s *= 2 + + + def EnsureDirExists(path): + if not os.path.exists(path): +- print "Creating directory %s" % path ++ print("Creating directory %s" % path) + os.makedirs(path) + + +@@ -129,7 +129,7 @@ def RmTree(dir): + + def CopyFile(src, dst): + """Copy a file from src to dst.""" +- print "Copying %s to %s" % (src, dst) ++ print("Copying %s to %s" % (src, dst)) + shutil.copy(src, dst) + + +@@ -170,28 +170,28 @@ def UpdateClang(): + else: + return 0 + +- print 'Updating Clang to %s...' % PACKAGE_VERSION ++ print('Updating Clang to %s...' % PACKAGE_VERSION) + + if ReadStampFile() == PACKAGE_VERSION: +- print 'Clang is already up to date.' ++ print('Clang is already up to date.') + return 0 + + # Reset the stamp file in case the build is unsuccessful. + WriteStampFile('') + +- print 'Downloading prebuilt clang' ++ print('Downloading prebuilt clang') + if os.path.exists(LLVM_BUILD_DIR): + RmTree(LLVM_BUILD_DIR) + try: + DownloadAndUnpack(cds_full_url, LLVM_BUILD_DIR) +- print 'clang %s unpacked' % PACKAGE_VERSION ++ print('clang %s unpacked' % PACKAGE_VERSION) + if sys.platform == 'win32': + CopyDiaDllTo(os.path.join(LLVM_BUILD_DIR, 'bin')) + WriteStampFile(PACKAGE_VERSION) + return 0 +- except urllib2.URLError: +- print 'Failed to download prebuilt clang %s' % cds_file +- print 'Exiting.' ++ except urllib.error.URLError: ++ print('Failed to download prebuilt clang %s' % cds_file) ++ print('Exiting.') + return 1 + + +diff --git a/src/3rdparty/chromium/third_party/boringssl/src/util/bot/vs_env.py b/src/3rdparty/chromium/third_party/boringssl/src/util/bot/vs_env.py +index 184750063..f85ec974f 100644 +--- a/src/3rdparty/chromium/third_party/boringssl/src/util/bot/vs_env.py ++++ b/src/3rdparty/chromium/third_party/boringssl/src/util/bot/vs_env.py +@@ -20,7 +20,7 @@ import vs_toolchain + import gyp.MSVSVersion + + if len(sys.argv) < 2: +- print >>sys.stderr, "Usage: vs_env.py TARGET_ARCH CMD..." ++ print("Usage: vs_env.py TARGET_ARCH CMD...", file=sys.stderr) + sys.exit(1) + + target_arch = sys.argv[1] +diff --git a/src/3rdparty/chromium/third_party/boringssl/src/util/bot/vs_toolchain.py b/src/3rdparty/chromium/third_party/boringssl/src/util/bot/vs_toolchain.py +index eb3e04973..2df5ce833 100644 +--- a/src/3rdparty/chromium/third_party/boringssl/src/util/bot/vs_toolchain.py ++++ b/src/3rdparty/chromium/third_party/boringssl/src/util/bot/vs_toolchain.py +@@ -56,7 +56,7 @@ def SetEnvironmentAndGetRuntimeDllDirs(): + gyp_defines_dict = gyp.NameValueListToDict(gyp.ShlexEnv('GYP_DEFINES')) + gyp_defines_dict['windows_sdk_path'] = win_sdk + os.environ['GYP_DEFINES'] = ' '.join('%s=%s' % (k, pipes.quote(str(v))) +- for k, v in gyp_defines_dict.iteritems()) ++ for k, v in gyp_defines_dict.items()) + os.environ['WINDOWSSDKDIR'] = win_sdk + os.environ['WDK_DIR'] = wdk + # Include the VS runtime in the PATH in case it's not machine-installed. +@@ -125,7 +125,7 @@ def main(): + 'update': Update, + } + if len(sys.argv) < 2 or sys.argv[1] not in commands: +- print >>sys.stderr, 'Expected one of: %s' % ', '.join(commands) ++ print('Expected one of: %s' % ', '.join(commands), file=sys.stderr) + return 1 + return commands[sys.argv[1]](*sys.argv[2:]) + +diff --git a/src/3rdparty/chromium/third_party/boringssl/src/util/generate-asm-lcov.py b/src/3rdparty/chromium/third_party/boringssl/src/util/generate-asm-lcov.py +index 257ae841c..adbf5ebdb 100755 +--- a/src/3rdparty/chromium/third_party/boringssl/src/util/generate-asm-lcov.py ++++ b/src/3rdparty/chromium/third_party/boringssl/src/util/generate-asm-lcov.py +@@ -118,7 +118,7 @@ def output(data): + """Takes a dictionary |data| of filenames and execution counts and generates + a LCOV coverage output.""" + out = '' +- for filename, counts in data.iteritems(): ++ for filename, counts in data.items(): + out += 'SF:%s\n' % (os.path.abspath(filename)) + for line, count in enumerate(counts): + if count != None: +@@ -128,7 +128,7 @@ def output(data): + + if __name__ == '__main__': + if len(sys.argv) != 3: +- print '%s ' % (__file__) ++ print('%s ' % (__file__)) + sys.exit() + + cg_folder = sys.argv[1] +@@ -149,4 +149,4 @@ if __name__ == '__main__': + + annotated = merge(cg_files, srcs) + lcov = generate(annotated) +- print output(lcov) ++ print(output(lcov)) +diff --git a/src/3rdparty/chromium/third_party/boringssl/src/util/generate_build_files.py b/src/3rdparty/chromium/third_party/boringssl/src/util/generate_build_files.py +index ba87f654e..9c99609c4 100644 +--- a/src/3rdparty/chromium/third_party/boringssl/src/util/generate_build_files.py ++++ b/src/3rdparty/chromium/third_party/boringssl/src/util/generate_build_files.py +@@ -155,7 +155,7 @@ class Android(object): + Returns: + A copy of |asm| with files filtered according to |want_bcm| + """ +- return [(archinfo, filter(lambda p: ("/crypto/fipsmodule/" in p) == want_bcm, files)) ++ return [(archinfo, [p for p in files if ("/crypto/fipsmodule/" in p) == want_bcm]) + for (archinfo, files) in asm] + + +@@ -816,10 +816,10 @@ def WriteAsmFiles(perlasms): + perlasm['extra_args'] + extra_args) + asmfiles.setdefault(key, []).append(output) + +- for (key, non_perl_asm_files) in NON_PERL_FILES.iteritems(): ++ for (key, non_perl_asm_files) in NON_PERL_FILES.items(): + asmfiles.setdefault(key, []).extend(non_perl_asm_files) + +- for files in asmfiles.itervalues(): ++ for files in asmfiles.values(): + files.sort() + + return asmfiles +@@ -952,7 +952,7 @@ def main(platforms): + 'urandom_test': urandom_test_files, + } + +- asm_outputs = sorted(WriteAsmFiles(ReadPerlAsmOperations()).iteritems()) ++ asm_outputs = sorted(WriteAsmFiles(ReadPerlAsmOperations()).items()) + + for platform in platforms: + platform.WriteFiles(files, asm_outputs) +diff --git a/src/3rdparty/chromium/third_party/breakpad/breakpad/src/tools/python/deps-to-manifest.py b/src/3rdparty/chromium/third_party/breakpad/breakpad/src/tools/python/deps-to-manifest.py +index b45628543..08ee44fdf 100755 +--- a/src/3rdparty/chromium/third_party/breakpad/breakpad/src/tools/python/deps-to-manifest.py ++++ b/src/3rdparty/chromium/third_party/breakpad/breakpad/src/tools/python/deps-to-manifest.py +@@ -30,7 +30,7 @@ + + """Convert gclient's DEPS file to repo's manifest xml file.""" + +-from __future__ import print_function ++ + + import argparse + import os +@@ -77,7 +77,7 @@ def ConvertDepsToManifest(deps, manifest): + """Convert the |deps| file to the |manifest|.""" + # Load the DEPS file data. + ctx = {} +- execfile(deps, ctx) ++ exec(compile(open(deps, "rb").read(), deps, 'exec'), ctx) + + new_contents = '' + +@@ -88,7 +88,7 @@ def ConvertDepsToManifest(deps, manifest): + new_contents += MANIFEST_HEAD % data + + # Write out the sections. +- for name, fetch in REMOTES.items(): ++ for name, fetch in list(REMOTES.items()): + data = { + 'name': name, + 'fetch': fetch, +@@ -106,8 +106,8 @@ def ConvertDepsToManifest(deps, manifest): + new_contents += MANIFEST_PROJECT % data + + # Write out the sections. +- for path, url in ctx['deps'].items(): +- for name, fetch in REMOTES.items(): ++ for path, url in list(ctx['deps'].items()): ++ for name, fetch in list(REMOTES.items()): + if url.startswith(fetch): + remote = name + break +diff --git a/src/3rdparty/chromium/third_party/breakpad/breakpad/src/tools/python/filter_syms.py b/src/3rdparty/chromium/third_party/breakpad/breakpad/src/tools/python/filter_syms.py +index abddf7893..965fe2cb8 100644 +--- a/src/3rdparty/chromium/third_party/breakpad/breakpad/src/tools/python/filter_syms.py ++++ b/src/3rdparty/chromium/third_party/breakpad/breakpad/src/tools/python/filter_syms.py +@@ -133,8 +133,8 @@ class SymbolFileParser(object): + Returns: + The actual path to use when writing the FILE record. + """ +- return path[len(filter(path.startswith, +- self.ignored_prefixes + [''])[0]):] ++ return path[len(list(filter(path.startswith, ++ self.ignored_prefixes + ['']))[0]):] + + def _ParseFileRecord(self, file_record): + """Parses and corrects a FILE record.""" +@@ -194,9 +194,9 @@ def main(): + symbol_parser = SymbolFileParser(sys.stdin, sys.stdout, options.prefixes, + path_handler) + symbol_parser.Process() +- except BreakpadParseError, e: +- print >> sys.stderr, 'Got an error while processing symbol file' +- print >> sys.stderr, str(e) ++ except BreakpadParseError as e: ++ print('Got an error while processing symbol file', file=sys.stderr) ++ print(str(e), file=sys.stderr) + return 1 + return 0 + +diff --git a/src/3rdparty/chromium/third_party/catapult/catapult_build/appengine_deploy.py b/src/3rdparty/chromium/third_party/catapult/catapult_build/appengine_deploy.py +index 8c2732eac..776582343 100644 +--- a/src/3rdparty/chromium/third_party/catapult/catapult_build/appengine_deploy.py ++++ b/src/3rdparty/chromium/third_party/catapult/catapult_build/appengine_deploy.py +@@ -21,7 +21,7 @@ def Deploy(paths, args, version=None): + version = _VersionName() + with temp_deployment_dir.TempDeploymentDir( + paths, use_symlinks=False) as temp_dir: +- print 'Deploying from "%s".' % temp_dir ++ print('Deploying from "%s".' % temp_dir) + + # google-cloud-sdk/bin/gcloud is a shell script, which we can't subprocess + # on Windows with shell=False. So, execute the Python script directly. +@@ -30,9 +30,9 @@ def Deploy(paths, args, version=None): + else: + script_path = _FindScriptInPath('gcloud') + if not script_path: +- print 'This script requires the Google Cloud SDK to be in PATH.' +- print 'Install at https://cloud.google.com/sdk and then run' +- print '`gcloud components install app-engine-python`' ++ print('This script requires the Google Cloud SDK to be in PATH.') ++ print('Install at https://cloud.google.com/sdk and then run') ++ print('`gcloud components install app-engine-python`') + sys.exit(1) + + subprocess.check_call([script_path, 'app', 'deploy', '--no-promote', +diff --git a/src/3rdparty/chromium/third_party/catapult/catapult_build/appengine_dev_server.py b/src/3rdparty/chromium/third_party/catapult/catapult_build/appengine_dev_server.py +index ad8582f0f..609b60f09 100644 +--- a/src/3rdparty/chromium/third_party/catapult/catapult_build/appengine_dev_server.py ++++ b/src/3rdparty/chromium/third_party/catapult/catapult_build/appengine_dev_server.py +@@ -24,11 +24,11 @@ def DevAppserver(paths, args, reuse_path=None): + """ + with temp_deployment_dir.TempDeploymentDir( + paths, reuse_path=reuse_path) as temp_dir: +- print 'Running dev server on "%s".' % temp_dir ++ print('Running dev server on "%s".' % temp_dir) + + script_path = _FindScriptInPath('dev_appserver.py') + if not script_path: +- print 'This script requires the App Engine SDK to be in PATH.' ++ print('This script requires the App Engine SDK to be in PATH.') + sys.exit(1) + + subprocess.call([sys.executable, script_path] + +diff --git a/src/3rdparty/chromium/third_party/catapult/catapult_build/dev_server.py b/src/3rdparty/chromium/third_party/catapult/catapult_build/dev_server.py +index d109f2c8f..0a3c3e6e5 100644 +--- a/src/3rdparty/chromium/third_party/catapult/catapult_build/dev_server.py ++++ b/src/3rdparty/chromium/third_party/catapult/catapult_build/dev_server.py +@@ -2,13 +2,13 @@ + # Use of this source code is governed by a BSD-style license that can be + # found in the LICENSE file. + +-from __future__ import print_function ++ + + import argparse + import json + import os + import sys +-import urlparse ++import urllib.parse + + from hooks import install + +@@ -169,7 +169,7 @@ class SimpleDirectoryHandler(webapp2.RequestHandler): + class TestOverviewHandler(webapp2.RequestHandler): + def get(self, *args, **kwargs): # pylint: disable=unused-argument + test_links = [] +- for name, path in kwargs.pop('pds').iteritems(): ++ for name, path in kwargs.pop('pds').items(): + test_links.append(_LINK_ITEM % (path, name)) + quick_links = [] + for name, path in _QUICK_LINKS: +@@ -259,14 +259,14 @@ class DevServerApp(webapp2.WSGIApplication): + continue + rel = os.path.relpath(filename, source_path) + unix_rel = _RelPathToUnixPath(rel) +- url = urlparse.urljoin(mapped_path, unix_rel) ++ url = urllib.parse.urljoin(mapped_path, unix_rel) + return url + + path = SourcePathsHandler.GetServingPathForAbsFilename( + self._all_source_paths, filename) + if path is None: + return None +- return urlparse.urljoin('/', path) ++ return urllib.parse.urljoin('/', path) + + + def _AddPleaseExitMixinToServer(server): +diff --git a/src/3rdparty/chromium/third_party/catapult/catapult_build/html_checks.py b/src/3rdparty/chromium/third_party/catapult/catapult_build/html_checks.py +index bd3d9f056..1e96ab52b 100644 +--- a/src/3rdparty/chromium/third_party/catapult/catapult_build/html_checks.py ++++ b/src/3rdparty/chromium/third_party/catapult/catapult_build/html_checks.py +@@ -60,7 +60,7 @@ def CheckImportOrder(path, soup, results, output_api): + + grouped_hrefs[','.join(link.get('rel'))].append(link.get('href')) + +- for rel, actual_hrefs in grouped_hrefs.iteritems(): ++ for rel, actual_hrefs in grouped_hrefs.items(): + expected_hrefs = list(sorted(set(actual_hrefs))) + if actual_hrefs != expected_hrefs: + error_text = ( +diff --git a/src/3rdparty/chromium/third_party/catapult/catapult_build/perfbot_stats/chrome_perf_stats.py b/src/3rdparty/chromium/third_party/catapult/catapult_build/perfbot_stats/chrome_perf_stats.py +index e98fddfd3..59e31834f 100755 +--- a/src/3rdparty/chromium/third_party/catapult/catapult_build/perfbot_stats/chrome_perf_stats.py ++++ b/src/3rdparty/chromium/third_party/catapult/catapult_build/perfbot_stats/chrome_perf_stats.py +@@ -1,4 +1,4 @@ +-#!/usr/bin/env python2.7 ++#!/usr/bin/env python3 + # Copyright 2015 The Chromium Authors. All rights reserved. + # Use of this source code is governed by a BSD-style license that can be + # found in the LICENSE file. +@@ -18,8 +18,8 @@ import calendar + import datetime + import json + import sys +-import urllib +-import urllib2 ++import urllib.request, urllib.parse, urllib.error ++import urllib.request, urllib.error, urllib.parse + + BUILDER_LIST_URL = ('https://chrome-infra-stats.appspot.com/' + '_ah/api/stats/v1/masters/chromium.perf') +@@ -33,7 +33,7 @@ USAGE = ('Usage: chrome_perf_stats.py . If date is not ' + + def main(): + if len(sys.argv) == 2 and sys.argv[0] == '--help': +- print USAGE ++ print(USAGE) + sys.exit(0) + year = None + month = None +@@ -41,22 +41,22 @@ def main(): + if len(sys.argv) == 4 or len(sys.argv) == 3: + year = int(sys.argv[1]) + if year > 2016 or year < 2014: +- print USAGE ++ print(USAGE) + sys.exit(0) + month = int(sys.argv[2]) + if month > 12 or month <= 0: +- print USAGE ++ print(USAGE) + sys.exit(0) + if len(sys.argv) == 3: +- days = range(1, calendar.monthrange(year, month)[1] + 1) ++ days = list(range(1, calendar.monthrange(year, month)[1] + 1)) + else: + day = int(sys.argv[3]) + if day > 31 or day <= 0: +- print USAGE ++ print(USAGE) + sys.exit(0) + days = [day] + elif len(sys.argv) != 1: +- print USAGE ++ print(USAGE) + sys.exit(0) + else: + yesterday = datetime.date.today() - datetime.timedelta(days=1) +@@ -64,7 +64,7 @@ def main(): + month = yesterday.month + days = [yesterday.day] + +- response = urllib2.urlopen(BUILDER_LIST_URL) ++ response = urllib.request.urlopen(BUILDER_LIST_URL) + builders = [builder['name'] for builder in json.load(response)['builders']] + success_rates = CalculateSuccessRates(year, month, days, builders) + UploadToPerfDashboard(success_rates) +@@ -87,10 +87,10 @@ def _UpdateSuccessRatesWithResult( + + def _SummarizeSuccessRates(success_rates): + overall_success_rates = [] +- for day, results in success_rates.iteritems(): ++ for day, results in success_rates.items(): + success_rate_sum = 0 + success_rate_count = 0 +- for rates in results.values(): ++ for rates in list(results.values()): + if rates['count'] == 0: + continue + success_rate_sum += ( +@@ -131,8 +131,8 @@ def UploadToPerfDashboard(success_rates): + } + } + url = 'https://chromeperf.appspot.com/add_point' +- data = urllib.urlencode({'data': json.dumps(dashboard_data)}) +- urllib2.urlopen(url=url, data=data).read() ++ data = urllib.parse.urlencode({'data': json.dumps(dashboard_data)}) ++ urllib.request.urlopen(url=url, data=data).read() + + + def CalculateSuccessRates(year, month, days, builders): +@@ -143,8 +143,8 @@ def CalculateSuccessRates(year, month, days, builders): + date_dict_str = '%d%02d%02d' % (year, month, day) + for builder in builders: + url = BUILDER_STATS_URL % ( +- urllib.quote(builder), urllib.quote(date_str)) +- response = urllib2.urlopen(url) ++ urllib.parse.quote(builder), urllib.parse.quote(date_str)) ++ response = urllib.request.urlopen(url) + results = json.load(response) + _UpdateSuccessRatesWithResult( + success_rates, results, date_dict_str, builder) +diff --git a/src/3rdparty/chromium/third_party/catapult/catapult_build/perfbot_stats/chrome_perf_stats_unittest.py b/src/3rdparty/chromium/third_party/catapult/catapult_build/perfbot_stats/chrome_perf_stats_unittest.py +index bb72bf190..80a405657 100644 +--- a/src/3rdparty/chromium/third_party/catapult/catapult_build/perfbot_stats/chrome_perf_stats_unittest.py ++++ b/src/3rdparty/chromium/third_party/catapult/catapult_build/perfbot_stats/chrome_perf_stats_unittest.py +@@ -1,4 +1,4 @@ +-#!/usr/bin/env python2.7 ++#!/usr/bin/env python3 + # Copyright 2015 The Chromium Authors. All rights reserved. + # Use of this source code is governed by a BSD-style license that can be + # found in the LICENSE file. +diff --git a/src/3rdparty/chromium/third_party/catapult/catapult_build/perfbot_stats/chrome_perf_step_timings.py b/src/3rdparty/chromium/third_party/catapult/catapult_build/perfbot_stats/chrome_perf_step_timings.py +index 30558abd1..1384862c8 100755 +--- a/src/3rdparty/chromium/third_party/catapult/catapult_build/perfbot_stats/chrome_perf_step_timings.py ++++ b/src/3rdparty/chromium/third_party/catapult/catapult_build/perfbot_stats/chrome_perf_step_timings.py +@@ -1,4 +1,4 @@ +-#!/usr/bin/env python2.7 ++#!/usr/bin/env python3 + # Copyright 2015 The Chromium Authors. All rights reserved. + # Use of this source code is governed by a BSD-style license that can be + # found in the LICENSE file. +@@ -17,8 +17,8 @@ import csv + import datetime + import json + import sys +-import urllib +-import urllib2 ++import urllib.request, urllib.parse, urllib.error ++import urllib.request, urllib.error, urllib.parse + + + BUILDER_STEPS_URL = ('https://chrome-infra-stats.appspot.com/_ah/api/stats/v1/' +@@ -115,7 +115,7 @@ USAGE = 'Usage: chrome-perf-step-timings.py ' + + def main(): + if len(sys.argv) != 2: +- print USAGE ++ print(USAGE) + sys.exit(0) + outfilename = sys.argv[1] + +@@ -129,18 +129,18 @@ def main(): + + for builder in KNOWN_TESTERS_LIST: + step_timings = [] +- url = BUILDER_STEPS_URL % urllib.quote(builder) +- response = urllib2.urlopen(url) ++ url = BUILDER_STEPS_URL % urllib.parse.quote(builder) ++ response = urllib.request.urlopen(url) + results = json.load(response) + steps = results['steps'] + steps.sort() # to group tests and their references together. + for step in steps: + if step in IGNORED_STEPS: + continue +- url = STEP_ACTIVE_URL % (urllib.quote(builder), urllib.quote(step)) +- response = urllib2.urlopen(url) ++ url = STEP_ACTIVE_URL % (urllib.parse.quote(builder), urllib.parse.quote(step)) ++ response = urllib.request.urlopen(url) + results = json.load(response) +- if ('step_records' not in results.keys() or ++ if ('step_records' not in list(results.keys()) or + len(results['step_records']) == 0): + continue + first_record = results['step_records'][0] +@@ -149,8 +149,8 @@ def main(): + # ignore steps that did not run for more than 2 days + if last_step_time < threshold_time: + continue +- url = STEP_STATS_URL % (urllib.quote(builder), urllib.quote(step)) +- response = urllib2.urlopen(url) ++ url = STEP_STATS_URL % (urllib.parse.quote(builder), urllib.parse.quote(step)) ++ response = urllib.request.urlopen(url) + results = json.load(response) + step_timings.append( + [builder, step, results['count'], results['stddev'], +diff --git a/src/3rdparty/chromium/third_party/catapult/catapult_build/run_dev_server_tests.py b/src/3rdparty/chromium/third_party/catapult/catapult_build/run_dev_server_tests.py +index 44f51a309..434e724e6 100644 +--- a/src/3rdparty/chromium/third_party/catapult/catapult_build/run_dev_server_tests.py ++++ b/src/3rdparty/chromium/third_party/catapult/catapult_build/run_dev_server_tests.py +@@ -3,7 +3,7 @@ + # Use of this source code is governed by a BSD-style license that can be + # found in the LICENSE file. + +-from __future__ import print_function ++ + + import argparse + import json +diff --git a/src/3rdparty/chromium/third_party/catapult/catapult_build/test_runner.py b/src/3rdparty/chromium/third_party/catapult/catapult_build/test_runner.py +index d3286a8fd..b30bc3cde 100644 +--- a/src/3rdparty/chromium/third_party/catapult/catapult_build/test_runner.py ++++ b/src/3rdparty/chromium/third_party/catapult/catapult_build/test_runner.py +@@ -8,8 +8,8 @@ import os + import subprocess + import sys + +-FAIL_EMOJI = u'\U0001F631'.encode('utf-8') +-PASS_EMOJI = u'\U0001F601'.encode('utf-8') ++FAIL_EMOJI = '\U0001F631'.encode('utf-8') ++PASS_EMOJI = '\U0001F601'.encode('utf-8') + + GREEN = '\033[92m' + RED = '\033[91m' +@@ -53,9 +53,9 @@ def Main(name, tests, argv): + os.path.basename(test['path']), test['path']) + + if exit_code: +- print _Color('Oops! Some tests failed.', RED), FAIL_EMOJI ++ print(_Color('Oops! Some tests failed.', RED), FAIL_EMOJI) + sys.stderr.writelines(errors) + else: +- print _Color('Woho! All tests passed.', GREEN), PASS_EMOJI ++ print(_Color('Woho! All tests passed.', GREEN), PASS_EMOJI) + + sys.exit(exit_code) +diff --git a/src/3rdparty/chromium/third_party/catapult/common/bin/update_chrome_reference_binaries.py b/src/3rdparty/chromium/third_party/catapult/common/bin/update_chrome_reference_binaries.py +index 86a1d7fe9..5a9c4195e 100755 +--- a/src/3rdparty/chromium/third_party/catapult/common/bin/update_chrome_reference_binaries.py ++++ b/src/3rdparty/chromium/third_party/catapult/common/bin/update_chrome_reference_binaries.py +@@ -20,7 +20,7 @@ import shutil + import subprocess + import sys + import tempfile +-import urllib2 ++import urllib.request, urllib.error, urllib.parse + import zipfile + + sys.path.append(os.path.join(os.path.dirname(__file__), '..', 'py_utils')) +@@ -170,7 +170,7 @@ def _ChannelVersionsMap(channel): + + def _OmahaReportVersionInfo(channel): + url ='https://omahaproxy.appspot.com/all?channel=%s' % channel +- lines = urllib2.urlopen(url).readlines() ++ lines = urllib.request.urlopen(url).readlines() + return [l.split(',') for l in lines] + + +@@ -263,7 +263,7 @@ def _ResolveChromiumRemotePath(channel, platform, version_info): + closest_snapshot = _FindClosestChromiumSnapshot( + branch_base_position, build_dir) + if closest_snapshot != branch_base_position: +- print ('Channel %s corresponds to commit position ' % channel + ++ print('Channel %s corresponds to commit position ' % channel + + '%d on %s, ' % (branch_base_position, platform) + + 'but closest chromium snapshot available on ' + + '%s is %d' % (_CHROMIUM_GS_BUCKET, closest_snapshot)) +@@ -348,11 +348,11 @@ def _RemoveKeystoneFromBuild(location): + + def _NeedsUpdate(config, binary, channel, platform, version_info): + channel_version = version_info.version +- print 'Checking %s (%s channel) on %s' % (binary, channel, platform) ++ print('Checking %s (%s channel) on %s' % (binary, channel, platform)) + current_version = config.GetVersion('%s_%s' % (binary, channel), platform) +- print 'current: %s, channel: %s' % (current_version, channel_version) ++ print('current: %s, channel: %s' % (current_version, channel_version)) + if current_version and current_version == channel_version: +- print 'Already up to date.' ++ print('Already up to date.') + return False + return True + +@@ -372,7 +372,7 @@ def UpdateBuilds(args): + _QueuePlatformUpdate('chromium', platform, version_info, + config, channel) + +- print 'Updating builds with downloaded binaries' ++ print('Updating builds with downloaded binaries') + config.ExecuteUpdateJobs(force=True) + + +diff --git a/src/3rdparty/chromium/third_party/catapult/common/lab/commits.py b/src/3rdparty/chromium/third_party/catapult/common/lab/commits.py +index 6d47b9166..415a18577 100755 +--- a/src/3rdparty/chromium/third_party/catapult/common/lab/commits.py ++++ b/src/3rdparty/chromium/third_party/catapult/common/lab/commits.py +@@ -9,8 +9,8 @@ import datetime + import itertools + import json + import math +-import urllib +-import urllib2 ++import urllib.request, urllib.parse, urllib.error ++import urllib.request, urllib.error, urllib.parse + + + _BASE_URL = 'https://chromium.googlesource.com' +@@ -29,7 +29,7 @@ def Pairwise(iterable): + """s -> (s0,s1), (s1,s2), (s2, s3), ...""" + a, b = itertools.tee(iterable) + next(b, None) +- return itertools.izip(a, b) ++ return zip(a, b) + + + def Percentile(data, percentile): +@@ -58,9 +58,9 @@ def Percentile(data, percentile): + + + def CommitTimes(repository, revision_count): +- parameters = urllib.urlencode((('n', revision_count), ('format', 'JSON'))) +- url = '%s/%s/+log?%s' % (_BASE_URL, urllib.quote(repository), parameters) +- data = json.loads(''.join(urllib2.urlopen(url).read().splitlines()[1:])) ++ parameters = urllib.parse.urlencode((('n', revision_count), ('format', 'JSON'))) ++ url = '%s/%s/+log?%s' % (_BASE_URL, urllib.parse.quote(repository), parameters) ++ data = json.loads(''.join(urllib.request.urlopen(url).read().splitlines()[1:])) + + commit_times = [] + for revision in data['log']: +@@ -87,18 +87,18 @@ def main(): + commit_durations.append((time1 - time2).total_seconds() / 60.) + commit_durations.sort() + +- print 'REPOSITORY:', repository +- print 'Start Date:', min(commit_times), 'PDT' +- print ' End Date:', max(commit_times), 'PDT' +- print ' Duration:', max(commit_times) - min(commit_times) +- print ' n:', len(commit_times) ++ print('REPOSITORY:', repository) ++ print('Start Date:', min(commit_times), 'PDT') ++ print(' End Date:', max(commit_times), 'PDT') ++ print(' Duration:', max(commit_times) - min(commit_times)) ++ print(' n:', len(commit_times)) + + for p in (0.25, 0.50, 0.90): + percentile = Percentile(commit_durations, p) +- print '%3d%% commit duration:' % (p * 100), '%6.1fm' % percentile ++ print('%3d%% commit duration:' % (p * 100), '%6.1fm' % percentile) + mean = math.fsum(commit_durations) / len(commit_durations) +- print 'Mean commit duration:', '%6.1fm' % mean +- print ++ print('Mean commit duration:', '%6.1fm' % mean) ++ print() + + + if __name__ == '__main__': +diff --git a/src/3rdparty/chromium/third_party/catapult/common/lab/hardware.py b/src/3rdparty/chromium/third_party/catapult/common/lab/hardware.py +index 5e49c5c86..d6ff43fab 100755 +--- a/src/3rdparty/chromium/third_party/catapult/common/lab/hardware.py ++++ b/src/3rdparty/chromium/third_party/catapult/common/lab/hardware.py +@@ -9,7 +9,7 @@ import csv + import json + import logging + import sys +-import urllib2 ++import urllib.request, urllib.error, urllib.parse + + + _MASTERS = [ +@@ -50,11 +50,11 @@ def main(): + writer.writeheader() + + for master_name in _MASTERS: +- master_data = json.load(urllib2.urlopen( ++ master_data = json.load(urllib.request.urlopen( + 'http://build.chromium.org/p/%s/json/slaves' % master_name)) + +- slaves = sorted(master_data.iteritems(), +- key=lambda x: (x[1]['builders'].keys(), x[0])) ++ slaves = sorted(iter(master_data.items()), ++ key=lambda x: (list(x[1]['builders'].keys()), x[0])) + for slave_name, slave_data in slaves: + for builder_name in slave_data['builders']: + row = { +@@ -76,7 +76,7 @@ def main(): + row[key] = value + + # Munge keys. +- row = {key.replace('_', ' '): value for key, value in row.iteritems()} ++ row = {key.replace('_', ' '): value for key, value in row.items()} + if 'osfamily' in row: + row['os family'] = row.pop('osfamily') + if 'product name' not in row and slave_name.startswith('slave'): +diff --git a/src/3rdparty/chromium/third_party/catapult/common/node_runner/node_runner/node_util.py b/src/3rdparty/chromium/third_party/catapult/common/node_runner/node_runner/node_util.py +index 05d0084bb..8b9918a38 100644 +--- a/src/3rdparty/chromium/third_party/catapult/common/node_runner/node_runner/node_util.py ++++ b/src/3rdparty/chromium/third_party/catapult/common/node_runner/node_runner/node_util.py +@@ -56,5 +56,5 @@ def GetNodeModulesPath(): + 'node_modules')) + if sys.platform.startswith('win'): + # Escape path on Windows because it's very long and must be passed to NTFS. +- path = u'\\\\?\\' + path ++ path = '\\\\?\\' + path + return path +diff --git a/src/3rdparty/chromium/third_party/catapult/common/py_trace_event/py_trace_event/trace_event.py b/src/3rdparty/chromium/third_party/catapult/common/py_trace_event/py_trace_event/trace_event.py +index 88eef21e6..95a5416c3 100644 +--- a/src/3rdparty/chromium/third_party/catapult/common/py_trace_event/py_trace_event/trace_event.py ++++ b/src/3rdparty/chromium/third_party/catapult/common/py_trace_event/py_trace_event/trace_event.py +@@ -48,7 +48,7 @@ in the child processes. + """ + + try: +- import trace_event_impl ++ from . import trace_event_impl + except ImportError: + trace_event_impl = None + +@@ -88,7 +88,7 @@ if trace_event_impl: + trace_event_impl.trace_flush() + + def trace_begin(name, **kwargs): +- args_to_log = {key: repr(value) for key, value in kwargs.iteritems()} ++ args_to_log = {key: repr(value) for key, value in kwargs.items()} + trace_event_impl.add_trace_event("B", trace_time.Now(), "python", name, + args_to_log) + +diff --git a/src/3rdparty/chromium/third_party/catapult/common/py_trace_event/py_trace_event/trace_event_impl/__init__.py b/src/3rdparty/chromium/third_party/catapult/common/py_trace_event/py_trace_event/trace_event_impl/__init__.py +index d250e0312..1d3ba8611 100644 +--- a/src/3rdparty/chromium/third_party/catapult/common/py_trace_event/py_trace_event/trace_event_impl/__init__.py ++++ b/src/3rdparty/chromium/third_party/catapult/common/py_trace_event/py_trace_event/trace_event_impl/__init__.py +@@ -1,7 +1,7 @@ + # Copyright 2016 The Chromium Authors. All rights reserved. + # Use of this source code is governed by a BSD-style license that can be + # found in the LICENSE file. +-from log import * +-from decorators import * +-from meta_class import * +-import multiprocessing_shim ++from .log import * ++from .decorators import * ++from .meta_class import * ++from . import multiprocessing_shim +diff --git a/src/3rdparty/chromium/third_party/catapult/common/py_trace_event/py_trace_event/trace_event_impl/decorators.py b/src/3rdparty/chromium/third_party/catapult/common/py_trace_event/py_trace_event/trace_event_impl/decorators.py +index dc753f1f6..7fbd8212b 100644 +--- a/src/3rdparty/chromium/third_party/catapult/common/py_trace_event/py_trace_event/trace_event_impl/decorators.py ++++ b/src/3rdparty/chromium/third_party/catapult/common/py_trace_event/py_trace_event/trace_event_impl/decorators.py +@@ -6,7 +6,7 @@ import inspect + import time + import functools + +-import log ++from . import log + from py_trace_event import trace_time + + +@@ -14,7 +14,7 @@ from py_trace_event import trace_time + def trace(name, **kwargs): + category = "python" + start = trace_time.Now() +- args_to_log = {key: repr(value) for key, value in kwargs.iteritems()} ++ args_to_log = {key: repr(value) for key, value in kwargs.items()} + log.add_trace_event("B", start, category, name, args_to_log) + try: + yield +@@ -42,7 +42,7 @@ def traced(*args): + default = None + return (name, arg_index, default) + +- args_to_log = map(arg_spec_tuple, arg_names) ++ args_to_log = list(map(arg_spec_tuple, arg_names)) + + @functools.wraps(func) + def traced_function(*args, **kwargs): +diff --git a/src/3rdparty/chromium/third_party/catapult/common/py_trace_event/py_trace_event/trace_event_impl/decorators_test.py b/src/3rdparty/chromium/third_party/catapult/common/py_trace_event/py_trace_event/trace_event_impl/decorators_test.py +index 434a3516f..1ec2bc2b4 100644 +--- a/src/3rdparty/chromium/third_party/catapult/common/py_trace_event/py_trace_event/trace_event_impl/decorators_test.py ++++ b/src/3rdparty/chromium/third_party/catapult/common/py_trace_event/py_trace_event/trace_event_impl/decorators_test.py +@@ -2,11 +2,11 @@ + # Copyright 2016 The Chromium Authors. All rights reserved. + # Use of this source code is governed by a BSD-style license that can be + # found in the LICENSE file. +-import decorators ++from . import decorators + import logging + import unittest + +-from trace_test import TraceTest ++from .trace_test import TraceTest + #from .trace_test import TraceTest + + def generator(): +@@ -41,21 +41,21 @@ class DecoratorTests(TraceTest): + events = res.findEventsOnThread(res.findThreadIds()[0]) + + # Sanity checks. +- self.assertEquals(2, len(events)) +- self.assertEquals(events[0]["name"], events[1]["name"]) ++ self.assertEqual(2, len(events)) ++ self.assertEqual(events[0]["name"], events[1]["name"]) + return events[1]["name"] + + + def test_func_names_work(self): + expected_method_name = __name__ + '.traced_func' +- self.assertEquals(expected_method_name, ++ self.assertEqual(expected_method_name, + self._get_decorated_method_name(traced_func)) + + def test_method_names_work(self): + ctt = ClassToTest() +- self.assertEquals('ClassToTest.method1', ++ self.assertEqual('ClassToTest.method1', + self._get_decorated_method_name(ctt.method1)) +- self.assertEquals('ClassToTest.method2', ++ self.assertEqual('ClassToTest.method2', + self._get_decorated_method_name(ctt.method2)) + + if __name__ == '__main__': +diff --git a/src/3rdparty/chromium/third_party/catapult/common/py_trace_event/py_trace_event/trace_event_impl/log.py b/src/3rdparty/chromium/third_party/catapult/common/py_trace_event/py_trace_event/trace_event_impl/log.py +index 130d16838..969c03c17 100644 +--- a/src/3rdparty/chromium/third_party/catapult/common/py_trace_event/py_trace_event/trace_event_impl/log.py ++++ b/src/3rdparty/chromium/third_party/catapult/common/py_trace_event/py_trace_event/trace_event_impl/log.py +@@ -8,7 +8,7 @@ import sys + import time + import threading + import multiprocessing +-import multiprocessing_shim ++from . import multiprocessing_shim + + from py_trace_event.trace_event_impl import perfetto_trace_writer + from py_trace_event import trace_time +@@ -170,7 +170,7 @@ def _trace_enable(log_file=None, format=None): + log_file = open("%s.pb" % n, "ab", False) + else: + log_file = open("%s.json" % n, "ab", False) +- elif isinstance(log_file, basestring): ++ elif isinstance(log_file, str): + log_file = open("%s" % log_file, "ab", False) + elif not hasattr(log_file, 'fileno'): + raise TraceException( +diff --git a/src/3rdparty/chromium/third_party/catapult/common/py_trace_event/py_trace_event/trace_event_impl/log_io_test.py b/src/3rdparty/chromium/third_party/catapult/common/py_trace_event/py_trace_event/trace_event_impl/log_io_test.py +index 6c03ea814..88c8f3810 100644 +--- a/src/3rdparty/chromium/third_party/catapult/common/py_trace_event/py_trace_event/trace_event_impl/log_io_test.py ++++ b/src/3rdparty/chromium/third_party/catapult/common/py_trace_event/py_trace_event/trace_event_impl/log_io_test.py +@@ -7,8 +7,8 @@ import os + import sys + import unittest + +-from log import * +-from parsed_trace_events import * ++from .log import * ++from .parsed_trace_events import * + from py_utils import tempfile_ext + + +diff --git a/src/3rdparty/chromium/third_party/catapult/common/py_trace_event/py_trace_event/trace_event_impl/meta_class.py b/src/3rdparty/chromium/third_party/catapult/common/py_trace_event/py_trace_event/trace_event_impl/meta_class.py +index 7aaa3faf6..012ace3e0 100644 +--- a/src/3rdparty/chromium/third_party/catapult/common/py_trace_event/py_trace_event/trace_event_impl/meta_class.py ++++ b/src/3rdparty/chromium/third_party/catapult/common/py_trace_event/py_trace_event/trace_event_impl/meta_class.py +@@ -9,7 +9,7 @@ from py_trace_event.trace_event_impl import decorators + + class TracedMetaClass(type): + def __new__(cls, name, bases, attrs): +- for attr_name, attr_value in attrs.iteritems(): ++ for attr_name, attr_value in attrs.items(): + if (not attr_name.startswith('_') and + isinstance(attr_value, types.FunctionType)): + attrs[attr_name] = decorators.traced(attr_value) +diff --git a/src/3rdparty/chromium/third_party/catapult/common/py_trace_event/py_trace_event/trace_event_impl/multiprocessing_shim.py b/src/3rdparty/chromium/third_party/catapult/common/py_trace_event/py_trace_event/trace_event_impl/multiprocessing_shim.py +index c2295edaf..746981538 100644 +--- a/src/3rdparty/chromium/third_party/catapult/common/py_trace_event/py_trace_event/trace_event_impl/multiprocessing_shim.py ++++ b/src/3rdparty/chromium/third_party/catapult/common/py_trace_event/py_trace_event/trace_event_impl/multiprocessing_shim.py +@@ -2,7 +2,7 @@ + # Use of this source code is governed by a BSD-style license that can be + # found in the LICENSE file. + import multiprocessing +-import log ++from . import log + import time + + +diff --git a/src/3rdparty/chromium/third_party/catapult/common/py_trace_event/py_trace_event/trace_event_impl/parsed_trace_events.py b/src/3rdparty/chromium/third_party/catapult/common/py_trace_event/py_trace_event/trace_event_impl/parsed_trace_events.py +index fdc751454..7eb76a384 100644 +--- a/src/3rdparty/chromium/third_party/catapult/common/py_trace_event/py_trace_event/trace_event_impl/parsed_trace_events.py ++++ b/src/3rdparty/chromium/third_party/catapult/common/py_trace_event/py_trace_event/trace_event_impl/parsed_trace_events.py +@@ -47,7 +47,7 @@ class ParsedTraceEvents(object): + events = events['traceEvents'] + + if not hasattr(events, '__iter__'): +- raise Exception, 'events must be iteraable.' ++ raise Exception('events must be iteraable.') + self.events = events + self.pids = None + self.tids = None +diff --git a/src/3rdparty/chromium/third_party/catapult/common/py_trace_event/py_trace_event/trace_event_impl/perfetto_trace_writer.py b/src/3rdparty/chromium/third_party/catapult/common/py_trace_event/py_trace_event/trace_event_impl/perfetto_trace_writer.py +index 584352cfd..693ce6c6e 100644 +--- a/src/3rdparty/chromium/third_party/catapult/common/py_trace_event/py_trace_event/trace_event_impl/perfetto_trace_writer.py ++++ b/src/3rdparty/chromium/third_party/catapult/common/py_trace_event/py_trace_event/trace_event_impl/perfetto_trace_writer.py +@@ -7,7 +7,7 @@ + + import collections + +-import perfetto_proto_classes as proto ++from . import perfetto_proto_classes as proto + + CLOCK_BOOTTIME = 6 + CLOCK_TELEMETRY = 64 +@@ -128,7 +128,7 @@ def write_event(output, ph, category, name, ts, args, tid): + legacy_event.name_iid = _intern_event_name(name, packet, tid) + packet.track_event.legacy_event = legacy_event + +- for name, value in args.iteritems(): ++ for name, value in args.items(): + debug_annotation = proto.DebugAnnotation() + debug_annotation.name = name + if isinstance(value, int): +diff --git a/src/3rdparty/chromium/third_party/catapult/common/py_trace_event/py_trace_event/trace_event_impl/perfetto_trace_writer_unittest.py b/src/3rdparty/chromium/third_party/catapult/common/py_trace_event/py_trace_event/trace_event_impl/perfetto_trace_writer_unittest.py +index 00dafa463..aea1d23bd 100644 +--- a/src/3rdparty/chromium/third_party/catapult/common/py_trace_event/py_trace_event/trace_event_impl/perfetto_trace_writer_unittest.py ++++ b/src/3rdparty/chromium/third_party/catapult/common/py_trace_event/py_trace_event/trace_event_impl/perfetto_trace_writer_unittest.py +@@ -4,7 +4,7 @@ + # found in the LICENSE file. + + import unittest +-import StringIO ++import io + + from py_trace_event.trace_event_impl import perfetto_trace_writer + +@@ -19,7 +19,7 @@ class PerfettoTraceWriterTest(unittest.TestCase): + perfetto_trace_writer.reset_global_state() + + def testWriteThreadDescriptorEvent(self): +- result = StringIO.StringIO() ++ result = io.StringIO() + perfetto_trace_writer.write_thread_descriptor_event( + output=result, + pid=1, +@@ -33,7 +33,7 @@ class PerfettoTraceWriterTest(unittest.TestCase): + self.assertEqual(expected_output, result.getvalue()) + + def testWriteTwoEvents(self): +- result = StringIO.StringIO() ++ result = io.StringIO() + perfetto_trace_writer.write_thread_descriptor_event( + output=result, + pid=1, +@@ -59,7 +59,7 @@ class PerfettoTraceWriterTest(unittest.TestCase): + self.assertEqual(expected_output, result.getvalue()) + + def testWriteMetadata(self): +- result = StringIO.StringIO() ++ result = io.StringIO() + perfetto_trace_writer.write_metadata( + output=result, + benchmark_start_time_us=1556716807306000, +@@ -79,7 +79,7 @@ class PerfettoTraceWriterTest(unittest.TestCase): + self.assertEqual(expected_output, result.getvalue()) + + def testWriteArgs(self): +- result = StringIO.StringIO() ++ result = io.StringIO() + perfetto_trace_writer.write_thread_descriptor_event( + output=result, + pid=1, +@@ -105,7 +105,7 @@ class PerfettoTraceWriterTest(unittest.TestCase): + self.assertEqual(expected_output, result.getvalue()) + + def testWriteChromeMetadata(self): +- result = StringIO.StringIO() ++ result = io.StringIO() + perfetto_trace_writer.write_chrome_metadata( + output=result, + clock_domain='FOO', +@@ -116,7 +116,7 @@ class PerfettoTraceWriterTest(unittest.TestCase): + self.assertEqual(expected_output, result.getvalue()) + + def testWriteClockSnapshot(self): +- result = StringIO.StringIO() ++ result = io.StringIO() + perfetto_trace_writer.write_clock_snapshot( + output=result, + tid=1, +diff --git a/src/3rdparty/chromium/third_party/catapult/common/py_trace_event/py_trace_event/trace_event_impl/trace_test.py b/src/3rdparty/chromium/third_party/catapult/common/py_trace_event/py_trace_event/trace_event_impl/trace_test.py +index 1216037f1..462bd23bb 100644 +--- a/src/3rdparty/chromium/third_party/catapult/common/py_trace_event/py_trace_event/trace_event_impl/trace_test.py ++++ b/src/3rdparty/chromium/third_party/catapult/common/py_trace_event/py_trace_event/trace_event_impl/trace_test.py +@@ -6,8 +6,8 @@ import unittest + #from .log import * + #from .parsed_trace_events import * + +-from log import * +-from parsed_trace_events import * ++from .log import * ++from .parsed_trace_events import * + from py_utils import tempfile_ext + + class TraceTest(unittest.TestCase): +diff --git a/src/3rdparty/chromium/third_party/catapult/common/py_trace_event/py_trace_event/trace_event_unittest.py b/src/3rdparty/chromium/third_party/catapult/common/py_trace_event/py_trace_event/trace_event_unittest.py +index de7b59478..fb16e6a90 100644 +--- a/src/3rdparty/chromium/third_party/catapult/common/py_trace_event/py_trace_event/trace_event_unittest.py ++++ b/src/3rdparty/chromium/third_party/catapult/common/py_trace_event/py_trace_event/trace_event_unittest.py +@@ -54,13 +54,13 @@ class TraceEventTests(unittest.TestCase): + with self._test_trace(): + with open(self._log_path, 'r') as f: + log_output = json.loads(f.read() + ']') +- self.assertEquals(len(log_output), 1) ++ self.assertEqual(len(log_output), 1) + self.assertTrue(trace_event.trace_is_enabled()) + log_output = log_output.pop() +- self.assertEquals(log_output['category'], 'process_argv') +- self.assertEquals(log_output['name'], 'process_argv') ++ self.assertEqual(log_output['category'], 'process_argv') ++ self.assertEqual(log_output['name'], 'process_argv') + self.assertTrue(log_output['args']['argv']) +- self.assertEquals(log_output['ph'], 'M') ++ self.assertEqual(log_output['ph'], 'M') + + def testDoubleEnable(self): + try: +@@ -81,7 +81,7 @@ class TraceEventTests(unittest.TestCase): + trace_event.trace_disable() + self.assertEqual( + multiprocessing.Process, _old_multiprocessing_process) +- self.assertEquals(len(json.loads(f.read() + ']')), 1) ++ self.assertEqual(len(json.loads(f.read() + ']')), 1) + self.assertFalse(trace_event.trace_is_enabled()) + + def testDoubleDisable(self): +@@ -93,28 +93,28 @@ class TraceEventTests(unittest.TestCase): + with self._test_trace(): + with open(self._log_path, 'r') as f: + trace_event.clock_sync('1') +- self.assertEquals(len(json.loads(f.read() + ']')), 1) ++ self.assertEqual(len(json.loads(f.read() + ']')), 1) + f.seek(0) + trace_event.trace_flush() +- self.assertEquals(len(json.loads(f.read() + ']')), 2) ++ self.assertEqual(len(json.loads(f.read() + ']')), 2) + + def testFlushNoChanges(self): + with self._test_trace(): + with open(self._log_path, 'r') as f: +- self.assertEquals(len(json.loads(f.read() + ']')),1) ++ self.assertEqual(len(json.loads(f.read() + ']')),1) + f.seek(0) + trace_event.trace_flush() +- self.assertEquals(len(json.loads(f.read() + ']')), 1) ++ self.assertEqual(len(json.loads(f.read() + ']')), 1) + + def testDoubleFlush(self): + with self._test_trace(): + with open(self._log_path, 'r') as f: + trace_event.clock_sync('1') +- self.assertEquals(len(json.loads(f.read() + ']')), 1) ++ self.assertEqual(len(json.loads(f.read() + ']')), 1) + f.seek(0) + trace_event.trace_flush() + trace_event.trace_flush() +- self.assertEquals(len(json.loads(f.read() + ']')), 2) ++ self.assertEqual(len(json.loads(f.read() + ']')), 2) + + def testTraceBegin(self): + with self._test_trace(): +@@ -122,17 +122,17 @@ class TraceEventTests(unittest.TestCase): + trace_event.trace_begin('test_event', this='that') + trace_event.trace_flush() + log_output = json.loads(f.read() + ']') +- self.assertEquals(len(log_output), 2) ++ self.assertEqual(len(log_output), 2) + current_entry = log_output.pop(0) +- self.assertEquals(current_entry['category'], 'process_argv') +- self.assertEquals(current_entry['name'], 'process_argv') ++ self.assertEqual(current_entry['category'], 'process_argv') ++ self.assertEqual(current_entry['name'], 'process_argv') + self.assertTrue( current_entry['args']['argv']) +- self.assertEquals( current_entry['ph'], 'M') ++ self.assertEqual( current_entry['ph'], 'M') + current_entry = log_output.pop(0) +- self.assertEquals(current_entry['category'], 'python') +- self.assertEquals(current_entry['name'], 'test_event') +- self.assertEquals(current_entry['args']['this'], '\'that\'') +- self.assertEquals(current_entry['ph'], 'B') ++ self.assertEqual(current_entry['category'], 'python') ++ self.assertEqual(current_entry['name'], 'test_event') ++ self.assertEqual(current_entry['args']['this'], '\'that\'') ++ self.assertEqual(current_entry['ph'], 'B') + + def testTraceEnd(self): + with self._test_trace(): +@@ -140,17 +140,17 @@ class TraceEventTests(unittest.TestCase): + trace_event.trace_end('test_event') + trace_event.trace_flush() + log_output = json.loads(f.read() + ']') +- self.assertEquals(len(log_output), 2) ++ self.assertEqual(len(log_output), 2) + current_entry = log_output.pop(0) +- self.assertEquals(current_entry['category'], 'process_argv') +- self.assertEquals(current_entry['name'], 'process_argv') ++ self.assertEqual(current_entry['category'], 'process_argv') ++ self.assertEqual(current_entry['name'], 'process_argv') + self.assertTrue(current_entry['args']['argv']) +- self.assertEquals(current_entry['ph'], 'M') ++ self.assertEqual(current_entry['ph'], 'M') + current_entry = log_output.pop(0) +- self.assertEquals(current_entry['category'], 'python') +- self.assertEquals(current_entry['name'], 'test_event') +- self.assertEquals(current_entry['args'], {}) +- self.assertEquals(current_entry['ph'], 'E') ++ self.assertEqual(current_entry['category'], 'python') ++ self.assertEqual(current_entry['name'], 'test_event') ++ self.assertEqual(current_entry['args'], {}) ++ self.assertEqual(current_entry['ph'], 'E') + + def testTrace(self): + with self._test_trace(): +@@ -159,22 +159,22 @@ class TraceEventTests(unittest.TestCase): + trace_event.trace_flush() + with open(self._log_path, 'r') as f: + log_output = json.loads(f.read() + ']') +- self.assertEquals(len(log_output), 3) ++ self.assertEqual(len(log_output), 3) + current_entry = log_output.pop(0) +- self.assertEquals(current_entry['category'], 'process_argv') +- self.assertEquals(current_entry['name'], 'process_argv') ++ self.assertEqual(current_entry['category'], 'process_argv') ++ self.assertEqual(current_entry['name'], 'process_argv') + self.assertTrue(current_entry['args']['argv']) +- self.assertEquals(current_entry['ph'], 'M') ++ self.assertEqual(current_entry['ph'], 'M') + current_entry = log_output.pop(0) +- self.assertEquals(current_entry['category'], 'python') +- self.assertEquals(current_entry['name'], 'test_event') +- self.assertEquals(current_entry['args']['this'], '\'that\'') +- self.assertEquals(current_entry['ph'], 'B') ++ self.assertEqual(current_entry['category'], 'python') ++ self.assertEqual(current_entry['name'], 'test_event') ++ self.assertEqual(current_entry['args']['this'], '\'that\'') ++ self.assertEqual(current_entry['ph'], 'B') + current_entry = log_output.pop(0) +- self.assertEquals(current_entry['category'], 'python') +- self.assertEquals(current_entry['name'], 'test_event') +- self.assertEquals(current_entry['args'], {}) +- self.assertEquals(current_entry['ph'], 'E') ++ self.assertEqual(current_entry['category'], 'python') ++ self.assertEqual(current_entry['name'], 'test_event') ++ self.assertEqual(current_entry['args'], {}) ++ self.assertEqual(current_entry['ph'], 'E') + + def testTracedDecorator(self): + @trace_event.traced("this") +@@ -186,23 +186,23 @@ class TraceEventTests(unittest.TestCase): + trace_event.trace_flush() + with open(self._log_path, 'r') as f: + log_output = json.loads(f.read() + ']') +- self.assertEquals(len(log_output), 3) ++ self.assertEqual(len(log_output), 3) + expected_name = __name__ + '.test_decorator' + current_entry = log_output.pop(0) +- self.assertEquals(current_entry['category'], 'process_argv') +- self.assertEquals(current_entry['name'], 'process_argv') ++ self.assertEqual(current_entry['category'], 'process_argv') ++ self.assertEqual(current_entry['name'], 'process_argv') + self.assertTrue(current_entry['args']['argv']) +- self.assertEquals(current_entry['ph'], 'M') ++ self.assertEqual(current_entry['ph'], 'M') + current_entry = log_output.pop(0) +- self.assertEquals(current_entry['category'], 'python') +- self.assertEquals(current_entry['name'], expected_name) +- self.assertEquals(current_entry['args']['this'], '\'that\'') +- self.assertEquals(current_entry['ph'], 'B') ++ self.assertEqual(current_entry['category'], 'python') ++ self.assertEqual(current_entry['name'], expected_name) ++ self.assertEqual(current_entry['args']['this'], '\'that\'') ++ self.assertEqual(current_entry['ph'], 'B') + current_entry = log_output.pop(0) +- self.assertEquals(current_entry['category'], 'python') +- self.assertEquals(current_entry['name'], expected_name) +- self.assertEquals(current_entry['args'], {}) +- self.assertEquals(current_entry['ph'], 'E') ++ self.assertEqual(current_entry['category'], 'python') ++ self.assertEqual(current_entry['name'], expected_name) ++ self.assertEqual(current_entry['args'], {}) ++ self.assertEqual(current_entry['ph'], 'E') + + def testClockSyncWithTs(self): + with self._test_trace(): +@@ -210,17 +210,17 @@ class TraceEventTests(unittest.TestCase): + trace_event.clock_sync('id', issue_ts=trace_time.Now()) + trace_event.trace_flush() + log_output = json.loads(f.read() + ']') +- self.assertEquals(len(log_output), 2) ++ self.assertEqual(len(log_output), 2) + current_entry = log_output.pop(0) +- self.assertEquals(current_entry['category'], 'process_argv') +- self.assertEquals(current_entry['name'], 'process_argv') ++ self.assertEqual(current_entry['category'], 'process_argv') ++ self.assertEqual(current_entry['name'], 'process_argv') + self.assertTrue(current_entry['args']['argv']) +- self.assertEquals(current_entry['ph'], 'M') ++ self.assertEqual(current_entry['ph'], 'M') + current_entry = log_output.pop(0) +- self.assertEquals(current_entry['category'], 'python') +- self.assertEquals(current_entry['name'], 'clock_sync') ++ self.assertEqual(current_entry['category'], 'python') ++ self.assertEqual(current_entry['name'], 'clock_sync') + self.assertTrue(current_entry['args']['issue_ts']) +- self.assertEquals(current_entry['ph'], 'c') ++ self.assertEqual(current_entry['ph'], 'c') + + def testClockSyncWithoutTs(self): + with self._test_trace(): +@@ -228,17 +228,17 @@ class TraceEventTests(unittest.TestCase): + trace_event.clock_sync('id') + trace_event.trace_flush() + log_output = json.loads(f.read() + ']') +- self.assertEquals(len(log_output), 2) ++ self.assertEqual(len(log_output), 2) + current_entry = log_output.pop(0) +- self.assertEquals(current_entry['category'], 'process_argv') +- self.assertEquals(current_entry['name'], 'process_argv') ++ self.assertEqual(current_entry['category'], 'process_argv') ++ self.assertEqual(current_entry['name'], 'process_argv') + self.assertTrue(current_entry['args']['argv']) +- self.assertEquals(current_entry['ph'], 'M') ++ self.assertEqual(current_entry['ph'], 'M') + current_entry = log_output.pop(0) +- self.assertEquals(current_entry['category'], 'python') +- self.assertEquals(current_entry['name'], 'clock_sync') ++ self.assertEqual(current_entry['category'], 'python') ++ self.assertEqual(current_entry['name'], 'clock_sync') + self.assertFalse(current_entry['args'].get('issue_ts')) +- self.assertEquals(current_entry['ph'], 'c') ++ self.assertEqual(current_entry['ph'], 'c') + + def testTime(self): + actual_diff = [] +@@ -258,20 +258,20 @@ class TraceEventTests(unittest.TestCase): + trace_event.trace_flush() + with open(self._log_path, 'r') as f: + log_output = json.loads(f.read() + ']') +- self.assertEquals(len(log_output), 3) ++ self.assertEqual(len(log_output), 3) + meta_data = log_output[0] + open_data = log_output[1] + close_data = log_output[2] +- self.assertEquals(meta_data['category'], 'process_argv') +- self.assertEquals(meta_data['name'], 'process_argv') ++ self.assertEqual(meta_data['category'], 'process_argv') ++ self.assertEqual(meta_data['name'], 'process_argv') + self.assertTrue(meta_data['args']['argv']) +- self.assertEquals(meta_data['ph'], 'M') +- self.assertEquals(open_data['category'], 'python') +- self.assertEquals(open_data['name'], 'test') +- self.assertEquals(open_data['ph'], 'B') +- self.assertEquals(close_data['category'], 'python') +- self.assertEquals(close_data['name'], 'test') +- self.assertEquals(close_data['ph'], 'E') ++ self.assertEqual(meta_data['ph'], 'M') ++ self.assertEqual(open_data['category'], 'python') ++ self.assertEqual(open_data['name'], 'test') ++ self.assertEqual(open_data['ph'], 'B') ++ self.assertEqual(close_data['category'], 'python') ++ self.assertEqual(close_data['name'], 'test') ++ self.assertEqual(close_data['ph'], 'E') + event_time_diff = close_data['ts'] - open_data['ts'] + recorded_time_diff = (end_ts - start_ts) * 1000000 + self.assertLess(math.fabs(event_time_diff - recorded_time_diff), 1000) +@@ -285,30 +285,30 @@ class TraceEventTests(unittest.TestCase): + trace_event.trace_flush() + with open(self._log_path, 'r') as f: + log_output = json.loads(f.read() + ']') +- self.assertEquals(len(log_output), 5) ++ self.assertEqual(len(log_output), 5) + meta_data = log_output[0] + one_open = log_output[1] + two_open = log_output[2] + two_close = log_output[3] + one_close = log_output[4] +- self.assertEquals(meta_data['category'], 'process_argv') +- self.assertEquals(meta_data['name'], 'process_argv') ++ self.assertEqual(meta_data['category'], 'process_argv') ++ self.assertEqual(meta_data['name'], 'process_argv') + self.assertTrue(meta_data['args']['argv']) +- self.assertEquals(meta_data['ph'], 'M') +- +- self.assertEquals(one_open['category'], 'python') +- self.assertEquals(one_open['name'], 'one') +- self.assertEquals(one_open['ph'], 'B') +- self.assertEquals(one_close['category'], 'python') +- self.assertEquals(one_close['name'], 'one') +- self.assertEquals(one_close['ph'], 'E') +- +- self.assertEquals(two_open['category'], 'python') +- self.assertEquals(two_open['name'], 'two') +- self.assertEquals(two_open['ph'], 'B') +- self.assertEquals(two_close['category'], 'python') +- self.assertEquals(two_close['name'], 'two') +- self.assertEquals(two_close['ph'], 'E') ++ self.assertEqual(meta_data['ph'], 'M') ++ ++ self.assertEqual(one_open['category'], 'python') ++ self.assertEqual(one_open['name'], 'one') ++ self.assertEqual(one_open['ph'], 'B') ++ self.assertEqual(one_close['category'], 'python') ++ self.assertEqual(one_close['name'], 'one') ++ self.assertEqual(one_close['ph'], 'E') ++ ++ self.assertEqual(two_open['category'], 'python') ++ self.assertEqual(two_open['name'], 'two') ++ self.assertEqual(two_open['ph'], 'B') ++ self.assertEqual(two_close['category'], 'python') ++ self.assertEqual(two_close['name'], 'two') ++ self.assertEqual(two_close['ph'], 'E') + + self.assertLessEqual(one_open['ts'], two_open['ts']) + self.assertGreaterEqual(one_close['ts'], two_close['ts']) +@@ -322,30 +322,30 @@ class TraceEventTests(unittest.TestCase): + trace_event.trace_flush() + with open(self._log_path, 'r') as f: + log_output = json.loads(f.read() + ']') +- self.assertEquals(len(log_output), 5) ++ self.assertEqual(len(log_output), 5) + meta_data = log_output[0] + one_open = log_output[1] + two_open = log_output[2] + two_close = log_output[4] + one_close = log_output[3] +- self.assertEquals(meta_data['category'], 'process_argv') +- self.assertEquals(meta_data['name'], 'process_argv') ++ self.assertEqual(meta_data['category'], 'process_argv') ++ self.assertEqual(meta_data['name'], 'process_argv') + self.assertTrue(meta_data['args']['argv']) +- self.assertEquals(meta_data['ph'], 'M') +- +- self.assertEquals(one_open['category'], 'python') +- self.assertEquals(one_open['name'], 'one') +- self.assertEquals(one_open['ph'], 'B') +- self.assertEquals(one_close['category'], 'python') +- self.assertEquals(one_close['name'], 'one') +- self.assertEquals(one_close['ph'], 'E') +- +- self.assertEquals(two_open['category'], 'python') +- self.assertEquals(two_open['name'], 'two') +- self.assertEquals(two_open['ph'], 'B') +- self.assertEquals(two_close['category'], 'python') +- self.assertEquals(two_close['name'], 'two') +- self.assertEquals(two_close['ph'], 'E') ++ self.assertEqual(meta_data['ph'], 'M') ++ ++ self.assertEqual(one_open['category'], 'python') ++ self.assertEqual(one_open['name'], 'one') ++ self.assertEqual(one_open['ph'], 'B') ++ self.assertEqual(one_close['category'], 'python') ++ self.assertEqual(one_close['name'], 'one') ++ self.assertEqual(one_close['ph'], 'E') ++ ++ self.assertEqual(two_open['category'], 'python') ++ self.assertEqual(two_open['name'], 'two') ++ self.assertEqual(two_open['ph'], 'B') ++ self.assertEqual(two_close['category'], 'python') ++ self.assertEqual(two_close['name'], 'two') ++ self.assertEqual(two_close['ph'], 'E') + + self.assertLessEqual(one_open['ts'], two_open['ts']) + self.assertLessEqual(one_close['ts'], two_close['ts']) +@@ -367,32 +367,32 @@ class TraceEventTests(unittest.TestCase): + trace_event.trace_flush() + with open(self._log_path, 'r') as f: + log_output = json.loads(f.read() + ']') +- self.assertEquals(len(log_output), 5) ++ self.assertEqual(len(log_output), 5) + meta_data = log_output[0] + parent_open = log_output[1] + child_open = log_output[2] + child_close = log_output[3] + parent_close = log_output[4] +- self.assertEquals(meta_data['category'], 'process_argv') +- self.assertEquals(meta_data['name'], 'process_argv') ++ self.assertEqual(meta_data['category'], 'process_argv') ++ self.assertEqual(meta_data['name'], 'process_argv') + self.assertTrue(meta_data['args']['argv']) +- self.assertEquals(meta_data['ph'], 'M') ++ self.assertEqual(meta_data['ph'], 'M') + +- self.assertEquals(parent_open['category'], 'python') +- self.assertEquals(parent_open['name'], 'parent_event') +- self.assertEquals(parent_open['ph'], 'B') ++ self.assertEqual(parent_open['category'], 'python') ++ self.assertEqual(parent_open['name'], 'parent_event') ++ self.assertEqual(parent_open['ph'], 'B') + +- self.assertEquals(child_open['category'], 'python') +- self.assertEquals(child_open['name'], 'child_event') +- self.assertEquals(child_open['ph'], 'B') ++ self.assertEqual(child_open['category'], 'python') ++ self.assertEqual(child_open['name'], 'child_event') ++ self.assertEqual(child_open['ph'], 'B') + +- self.assertEquals(child_close['category'], 'python') +- self.assertEquals(child_close['name'], 'child_event') +- self.assertEquals(child_close['ph'], 'E') ++ self.assertEqual(child_close['category'], 'python') ++ self.assertEqual(child_close['name'], 'child_event') ++ self.assertEqual(child_close['ph'], 'E') + +- self.assertEquals(parent_close['category'], 'python') +- self.assertEquals(parent_close['name'], 'parent_event') +- self.assertEquals(parent_close['ph'], 'E') ++ self.assertEqual(parent_close['category'], 'python') ++ self.assertEqual(parent_close['name'], 'parent_event') ++ self.assertEqual(parent_close['ph'], 'E') + + @unittest.skipIf(sys.platform == 'win32', 'crbug.com/945819') + def testTracingControlDisabledInChildButNotInParent(self): +@@ -424,34 +424,34 @@ class TraceEventTests(unittest.TestCase): + trace_event.trace_flush() + with open(self._log_path, 'r') as f: + log_output = json.loads(f.read() + ']') +- self.assertEquals(len(log_output), 3) ++ self.assertEqual(len(log_output), 3) + meta_data = log_output[0] + parent_open = log_output[1] + parent_close = log_output[2] +- self.assertEquals(parent_open['category'], 'python') +- self.assertEquals(parent_open['name'], 'parent') +- self.assertEquals(parent_open['ph'], 'B') +- self.assertEquals(parent_close['category'], 'python') +- self.assertEquals(parent_close['name'], 'parent') +- self.assertEquals(parent_close['ph'], 'E') ++ self.assertEqual(parent_open['category'], 'python') ++ self.assertEqual(parent_open['name'], 'parent') ++ self.assertEqual(parent_open['ph'], 'B') ++ self.assertEqual(parent_close['category'], 'python') ++ self.assertEqual(parent_close['name'], 'parent') ++ self.assertEqual(parent_close['ph'], 'E') + + def testFormatJson(self): + with self._test_trace(format=trace_event.JSON): + trace_event.trace_flush() + with open(self._log_path, 'r') as f: + log_output = json.loads(f.read() + ']') +- self.assertEquals(len(log_output), 1) +- self.assertEquals(log_output[0]['ph'], 'M') ++ self.assertEqual(len(log_output), 1) ++ self.assertEqual(log_output[0]['ph'], 'M') + + def testFormatJsonWithMetadata(self): + with self._test_trace(format=trace_event.JSON_WITH_METADATA): + trace_event.trace_disable() + with open(self._log_path, 'r') as f: + log_output = json.load(f) +- self.assertEquals(len(log_output), 2) ++ self.assertEqual(len(log_output), 2) + events = log_output['traceEvents'] +- self.assertEquals(len(events), 1) +- self.assertEquals(events[0]['ph'], 'M') ++ self.assertEqual(len(events), 1) ++ self.assertEqual(events[0]['ph'], 'M') + + def testFormatProtobuf(self): + with self._test_trace(format=trace_event.PROTOBUF): +@@ -473,16 +473,16 @@ class TraceEventTests(unittest.TestCase): + trace_event.trace_disable() + with open(self._log_path, 'r') as f: + log_output = json.load(f) +- self.assertEquals(len(log_output), 2) ++ self.assertEqual(len(log_output), 2) + telemetry_metadata = log_output['metadata']['telemetry'] +- self.assertEquals(len(telemetry_metadata), 7) +- self.assertEquals(telemetry_metadata['benchmarkStart'], 1) +- self.assertEquals(telemetry_metadata['traceStart'], 2) +- self.assertEquals(telemetry_metadata['benchmarks'], ['benchmark']) +- self.assertEquals(telemetry_metadata['benchmarkDescriptions'], ['desc']) +- self.assertEquals(telemetry_metadata['stories'], ['story']) +- self.assertEquals(telemetry_metadata['storyTags'], ['tag1', 'tag2']) +- self.assertEquals(telemetry_metadata['storysetRepeats'], [0]) ++ self.assertEqual(len(telemetry_metadata), 7) ++ self.assertEqual(telemetry_metadata['benchmarkStart'], 1) ++ self.assertEqual(telemetry_metadata['traceStart'], 2) ++ self.assertEqual(telemetry_metadata['benchmarks'], ['benchmark']) ++ self.assertEqual(telemetry_metadata['benchmarkDescriptions'], ['desc']) ++ self.assertEqual(telemetry_metadata['stories'], ['story']) ++ self.assertEqual(telemetry_metadata['storyTags'], ['tag1', 'tag2']) ++ self.assertEqual(telemetry_metadata['storysetRepeats'], [0]) + + def testAddMetadataProtobuf(self): + with self._test_trace(format=trace_event.PROTOBUF): +diff --git a/src/3rdparty/chromium/third_party/catapult/common/py_trace_event/py_trace_event/trace_time_unittest.py b/src/3rdparty/chromium/third_party/catapult/common/py_trace_event/py_trace_event/trace_time_unittest.py +index bae7ea81f..cbf982b86 100644 +--- a/src/3rdparty/chromium/third_party/catapult/common/py_trace_event/py_trace_event/trace_time_unittest.py ++++ b/src/3rdparty/chromium/third_party/catapult/common/py_trace_event/py_trace_event/trace_time_unittest.py +@@ -98,21 +98,21 @@ class TimerTest(unittest.TestCase): + + # Linux tests. + def testGetClockGetTimeClockNumber_linux(self): +- self.assertEquals(trace_time.GetClockGetTimeClockNumber('linux'), 1) ++ self.assertEqual(trace_time.GetClockGetTimeClockNumber('linux'), 1) + + def testGetClockGetTimeClockNumber_freebsd(self): +- self.assertEquals(trace_time.GetClockGetTimeClockNumber('freebsd'), 4) ++ self.assertEqual(trace_time.GetClockGetTimeClockNumber('freebsd'), 4) + + def testGetClockGetTimeClockNumber_bsd(self): +- self.assertEquals(trace_time.GetClockGetTimeClockNumber('bsd'), 3) ++ self.assertEqual(trace_time.GetClockGetTimeClockNumber('bsd'), 3) + + def testGetClockGetTimeClockNumber_sunos(self): +- self.assertEquals(trace_time.GetClockGetTimeClockNumber('sunos5'), 4) ++ self.assertEqual(trace_time.GetClockGetTimeClockNumber('sunos5'), 4) + + # Smoke Test. + def testMonotonic(self): + time_one = trace_time.Now() +- for _ in xrange(1000): ++ for _ in range(1000): + time_two = trace_time.Now() + self.assertLessEqual(time_one, time_two) + time_one = time_two +diff --git a/src/3rdparty/chromium/third_party/catapult/common/py_utils/py_utils/__init__.py b/src/3rdparty/chromium/third_party/catapult/common/py_utils/py_utils/__init__.py +index 0d7b052af..334513b6b 100644 +--- a/src/3rdparty/chromium/third_party/catapult/common/py_utils/py_utils/__init__.py ++++ b/src/3rdparty/chromium/third_party/catapult/common/py_utils/py_utils/__init__.py +@@ -4,7 +4,7 @@ + # Use of this source code is governed by a BSD-style license that can be + # found in the LICENSE file. + +-from __future__ import print_function ++ + + import functools + import inspect +diff --git a/src/3rdparty/chromium/third_party/catapult/common/py_utils/py_utils/camel_case.py b/src/3rdparty/chromium/third_party/catapult/common/py_utils/py_utils/camel_case.py +index dbebb2273..8f7ceaf66 100644 +--- a/src/3rdparty/chromium/third_party/catapult/common/py_utils/py_utils/camel_case.py ++++ b/src/3rdparty/chromium/third_party/catapult/common/py_utils/py_utils/camel_case.py +@@ -2,9 +2,9 @@ + # Use of this source code is governed by a BSD-style license that can be + # found in the LICENSE file. + +-from __future__ import absolute_import +-from __future__ import division +-from __future__ import print_function ++ ++ ++ + import re + import six + +diff --git a/src/3rdparty/chromium/third_party/catapult/common/py_utils/py_utils/discover_unittest.py b/src/3rdparty/chromium/third_party/catapult/common/py_utils/py_utils/discover_unittest.py +index bdc50b2ff..686c7e69f 100644 +--- a/src/3rdparty/chromium/third_party/catapult/common/py_utils/py_utils/discover_unittest.py ++++ b/src/3rdparty/chromium/third_party/catapult/common/py_utils/py_utils/discover_unittest.py +@@ -1,9 +1,9 @@ + # Copyright 2013 The Chromium Authors. All rights reserved. + # Use of this source code is governed by a BSD-style license that can be + # found in the LICENSE file. +-from __future__ import absolute_import +-from __future__ import division +-from __future__ import print_function ++ ++ ++ + + import os + import unittest +diff --git a/src/3rdparty/chromium/third_party/catapult/common/py_utils/py_utils/exc_util_unittest.py b/src/3rdparty/chromium/third_party/catapult/common/py_utils/py_utils/exc_util_unittest.py +index 31e3b57a8..3688b799e 100644 +--- a/src/3rdparty/chromium/third_party/catapult/common/py_utils/py_utils/exc_util_unittest.py ++++ b/src/3rdparty/chromium/third_party/catapult/common/py_utils/py_utils/exc_util_unittest.py +@@ -55,7 +55,7 @@ class FaultyClient(object): + + class ReraiseTests(unittest.TestCase): + def assertLogMatches(self, pattern): +- self.assertRegexpMatches( ++ self.assertRegex( + sys.stderr.getvalue(), pattern) # pylint: disable=no-member + + def assertLogNotMatches(self, pattern): +diff --git a/src/3rdparty/chromium/third_party/catapult/common/py_utils/py_utils/expectations_parser.py b/src/3rdparty/chromium/third_party/catapult/common/py_utils/py_utils/expectations_parser.py +index 534b35263..3f7127a38 100644 +--- a/src/3rdparty/chromium/third_party/catapult/common/py_utils/py_utils/expectations_parser.py ++++ b/src/3rdparty/chromium/third_party/catapult/common/py_utils/py_utils/expectations_parser.py +@@ -2,9 +2,9 @@ + # Use of this source code is governed by a BSD-style license that can be + # found in the LICENSE file. + +-from __future__ import absolute_import +-from __future__ import division +-from __future__ import print_function ++ ++ ++ + import re + import six + +diff --git a/src/3rdparty/chromium/third_party/catapult/common/py_utils/py_utils/expectations_parser_unittest.py b/src/3rdparty/chromium/third_party/catapult/common/py_utils/py_utils/expectations_parser_unittest.py +index 82debc5dc..398b52291 100644 +--- a/src/3rdparty/chromium/third_party/catapult/common/py_utils/py_utils/expectations_parser_unittest.py ++++ b/src/3rdparty/chromium/third_party/catapult/common/py_utils/py_utils/expectations_parser_unittest.py +@@ -3,9 +3,9 @@ + # found in the LICENSE file. + + +-from __future__ import absolute_import +-from __future__ import division +-from __future__ import print_function ++ ++ ++ + + import unittest + +diff --git a/src/3rdparty/chromium/third_party/catapult/common/py_utils/py_utils/lock_unittest.py b/src/3rdparty/chromium/third_party/catapult/common/py_utils/py_utils/lock_unittest.py +index 2ba288bd1..05e109b74 100644 +--- a/src/3rdparty/chromium/third_party/catapult/common/py_utils/py_utils/lock_unittest.py ++++ b/src/3rdparty/chromium/third_party/catapult/common/py_utils/py_utils/lock_unittest.py +@@ -2,9 +2,9 @@ + # Use of this source code is governed by a BSD-style license that can be + # found in the LICENSE file. + +-from __future__ import absolute_import +-from __future__ import division +-from __future__ import print_function ++ ++ ++ + + import multiprocessing + import os +@@ -102,7 +102,7 @@ class FileLockTest(unittest.TestCase): + + # temp_write_file should contains 10 copy of temp_file_path's content. + with open(temp_write_file, 'r') as f: +- self.assertEquals('0123456789'*10, f.read()) ++ self.assertEqual('0123456789'*10, f.read()) + finally: + os.remove(temp_write_file) + +@@ -119,7 +119,7 @@ class FileLockTest(unittest.TestCase): + p.start() + p.join() + with open(temp_status_file, 'r') as f: +- self.assertEquals('LockException raised', f.read()) ++ self.assertEqual('LockException raised', f.read()) + finally: + os.remove(temp_status_file) + +@@ -137,7 +137,7 @@ class FileLockTest(unittest.TestCase): + p.start() + p.join() + with open(temp_status_file, 'r') as f: +- self.assertEquals('LockException was not raised', f.read()) ++ self.assertEqual('LockException was not raised', f.read()) + finally: + os.remove(temp_status_file) + +@@ -156,7 +156,7 @@ class FileLockTest(unittest.TestCase): + p.start() + p.join() + with open(temp_status_file, 'r') as f: +- self.assertEquals('LockException raised', f.read()) ++ self.assertEqual('LockException raised', f.read()) + + # Accessing self.temp_file_path here should not raise exception. + p = multiprocessing.Process( +@@ -165,6 +165,6 @@ class FileLockTest(unittest.TestCase): + p.start() + p.join() + with open(temp_status_file, 'r') as f: +- self.assertEquals('LockException was not raised', f.read()) ++ self.assertEqual('LockException was not raised', f.read()) + finally: + os.remove(temp_status_file) +diff --git a/src/3rdparty/chromium/third_party/catapult/common/py_utils/py_utils/refactor/annotated_symbol/base_symbol.py b/src/3rdparty/chromium/third_party/catapult/common/py_utils/py_utils/refactor/annotated_symbol/base_symbol.py +index bdaec61b3..4eaa0528f 100644 +--- a/src/3rdparty/chromium/third_party/catapult/common/py_utils/py_utils/refactor/annotated_symbol/base_symbol.py ++++ b/src/3rdparty/chromium/third_party/catapult/common/py_utils/py_utils/refactor/annotated_symbol/base_symbol.py +@@ -2,9 +2,9 @@ + # Use of this source code is governed by a BSD-style license that can be + # found in the LICENSE file. + +-from __future__ import absolute_import +-from __future__ import division +-from __future__ import print_function ++ ++ ++ + + from six.moves import range # pylint: disable=redefined-builtin + +diff --git a/src/3rdparty/chromium/third_party/catapult/common/py_utils/py_utils/refactor/annotated_symbol/import_statement.py b/src/3rdparty/chromium/third_party/catapult/common/py_utils/py_utils/refactor/annotated_symbol/import_statement.py +index 54a3935ca..02e70f814 100644 +--- a/src/3rdparty/chromium/third_party/catapult/common/py_utils/py_utils/refactor/annotated_symbol/import_statement.py ++++ b/src/3rdparty/chromium/third_party/catapult/common/py_utils/py_utils/refactor/annotated_symbol/import_statement.py +@@ -2,9 +2,9 @@ + # Use of this source code is governed by a BSD-style license that can be + # found in the LICENSE file. + +-from __future__ import absolute_import +-from __future__ import division +-from __future__ import print_function ++ ++ ++ + + import keyword + import symbol +diff --git a/src/3rdparty/chromium/third_party/catapult/common/py_utils/py_utils/refactor/annotated_symbol/reference.py b/src/3rdparty/chromium/third_party/catapult/common/py_utils/py_utils/refactor/annotated_symbol/reference.py +index 493176e65..15ce2e6a1 100644 +--- a/src/3rdparty/chromium/third_party/catapult/common/py_utils/py_utils/refactor/annotated_symbol/reference.py ++++ b/src/3rdparty/chromium/third_party/catapult/common/py_utils/py_utils/refactor/annotated_symbol/reference.py +@@ -2,9 +2,9 @@ + # Use of this source code is governed by a BSD-style license that can be + # found in the LICENSE file. + +-from __future__ import absolute_import +-from __future__ import division +-from __future__ import print_function ++ ++ ++ + + import symbol + import token +diff --git a/src/3rdparty/chromium/third_party/catapult/common/py_utils/py_utils/refactor/offset_token.py b/src/3rdparty/chromium/third_party/catapult/common/py_utils/py_utils/refactor/offset_token.py +index deca08587..2cd695900 100644 +--- a/src/3rdparty/chromium/third_party/catapult/common/py_utils/py_utils/refactor/offset_token.py ++++ b/src/3rdparty/chromium/third_party/catapult/common/py_utils/py_utils/refactor/offset_token.py +@@ -1,11 +1,11 @@ +-# Lint as: python2, python3 ++# Lint as: python3, python3 + # Copyright 2015 The Chromium Authors. All rights reserved. + # Use of this source code is governed by a BSD-style license that can be + # found in the LICENSE file. + +-from __future__ import absolute_import +-from __future__ import division +-from __future__ import print_function ++ ++ ++ + import collections + import itertools + import token +@@ -17,7 +17,7 @@ def _Pairwise(iterable): + """s -> (None, s0), (s0, s1), (s1, s2), (s2, s3), ...""" + a, b = itertools.tee(iterable) + a = itertools.chain((None,), a) +- return zip(a, b) ++ return list(zip(a, b)) + + + class OffsetToken(object): +diff --git a/src/3rdparty/chromium/third_party/catapult/common/py_utils/py_utils/refactor/snippet.py b/src/3rdparty/chromium/third_party/catapult/common/py_utils/py_utils/refactor/snippet.py +index 7056abf74..f4b27c84a 100644 +--- a/src/3rdparty/chromium/third_party/catapult/common/py_utils/py_utils/refactor/snippet.py ++++ b/src/3rdparty/chromium/third_party/catapult/common/py_utils/py_utils/refactor/snippet.py +@@ -2,7 +2,7 @@ + # Use of this source code is governed by a BSD-style license that can be + # found in the LICENSE file. + +-from __future__ import print_function ++ + + import parser + import symbol +diff --git a/src/3rdparty/chromium/third_party/catapult/common/py_utils/py_utils/refactor_util/move.py b/src/3rdparty/chromium/third_party/catapult/common/py_utils/py_utils/refactor_util/move.py +index 6d0a7cb81..bfc0e6ef2 100644 +--- a/src/3rdparty/chromium/third_party/catapult/common/py_utils/py_utils/refactor_util/move.py ++++ b/src/3rdparty/chromium/third_party/catapult/common/py_utils/py_utils/refactor_util/move.py +@@ -2,7 +2,7 @@ + # Use of this source code is governed by a BSD-style license that can be + # found in the LICENSE file. + +-from __future__ import print_function ++ + + import functools + import os +diff --git a/src/3rdparty/chromium/third_party/catapult/common/py_utils/py_utils/retry_util.py b/src/3rdparty/chromium/third_party/catapult/common/py_utils/py_utils/retry_util.py +index a11bd806d..06f39f6a0 100644 +--- a/src/3rdparty/chromium/third_party/catapult/common/py_utils/py_utils/retry_util.py ++++ b/src/3rdparty/chromium/third_party/catapult/common/py_utils/py_utils/retry_util.py +@@ -1,9 +1,9 @@ + # Copyright 2018 The Chromium Authors. All rights reserved. + # Use of this source code is governed by a BSD-style license that can be + # found in the LICENSE file. +-from __future__ import absolute_import +-from __future__ import division +-from __future__ import print_function ++ ++ ++ + import functools + import logging + import time +diff --git a/src/3rdparty/chromium/third_party/catapult/common/py_utils/py_utils/shell_util.py b/src/3rdparty/chromium/third_party/catapult/common/py_utils/py_utils/shell_util.py +index 6af7f8e28..3d02465fb 100644 +--- a/src/3rdparty/chromium/third_party/catapult/common/py_utils/py_utils/shell_util.py ++++ b/src/3rdparty/chromium/third_party/catapult/common/py_utils/py_utils/shell_util.py +@@ -4,7 +4,7 @@ + # + # Shell scripting helpers (created for Telemetry dependency roll scripts). + +-from __future__ import print_function ++ + + import os as _os + import shutil as _shutil +diff --git a/src/3rdparty/chromium/third_party/catapult/common/py_utils/py_utils/slots_metaclass_unittest.py b/src/3rdparty/chromium/third_party/catapult/common/py_utils/py_utils/slots_metaclass_unittest.py +index 702371a79..aaa0950ba 100644 +--- a/src/3rdparty/chromium/third_party/catapult/common/py_utils/py_utils/slots_metaclass_unittest.py ++++ b/src/3rdparty/chromium/third_party/catapult/common/py_utils/py_utils/slots_metaclass_unittest.py +@@ -2,9 +2,9 @@ + # Use of this source code is governed by a BSD-style license that can be + # found in the LICENSE file. + +-from __future__ import absolute_import +-from __future__ import division +-from __future__ import print_function ++ ++ ++ + + import unittest + +diff --git a/src/3rdparty/chromium/third_party/catapult/common/py_utils/py_utils/tempfile_ext_unittest.py b/src/3rdparty/chromium/third_party/catapult/common/py_utils/py_utils/tempfile_ext_unittest.py +index 76a0efd97..d25f22154 100644 +--- a/src/3rdparty/chromium/third_party/catapult/common/py_utils/py_utils/tempfile_ext_unittest.py ++++ b/src/3rdparty/chromium/third_party/catapult/common/py_utils/py_utils/tempfile_ext_unittest.py +@@ -37,7 +37,7 @@ class NamedTemporaryDirectoryTest(fake_filesystem_unittest.TestCase): + test_dir = '/baz' + self.fs.CreateDirectory(test_dir) + with tempfile_ext.NamedTemporaryDirectory(dir=test_dir) as d: +- self.assertEquals(test_dir, os.path.dirname(d)) ++ self.assertEqual(test_dir, os.path.dirname(d)) + + + class TemporaryFilesTest(fake_filesystem_unittest.TestCase): +diff --git a/src/3rdparty/chromium/third_party/catapult/common/py_utils/py_utils/ts_proxy_server_unittest.py b/src/3rdparty/chromium/third_party/catapult/common/py_utils/py_utils/ts_proxy_server_unittest.py +index 4bb75c8b1..1913f312a 100644 +--- a/src/3rdparty/chromium/third_party/catapult/common/py_utils/py_utils/ts_proxy_server_unittest.py ++++ b/src/3rdparty/chromium/third_party/catapult/common/py_utils/py_utils/ts_proxy_server_unittest.py +@@ -8,15 +8,15 @@ from py_utils import ts_proxy_server + + class TsProxyServerTest(unittest.TestCase): + def testParseTsProxyPort(self): +- self.assertEquals( ++ self.assertEqual( + ts_proxy_server.ParseTsProxyPortFromOutput( + 'Started Socks5 proxy server on 127.0.0.1:54430 \n'), + 54430) +- self.assertEquals( ++ self.assertEqual( + ts_proxy_server.ParseTsProxyPortFromOutput( + 'Started Socks5 proxy server on foo.bar.com:430 \n'), + 430) +- self.assertEquals( ++ self.assertEqual( + ts_proxy_server.ParseTsProxyPortFromOutput( + 'Failed to start sock5 proxy.'), + None) +@@ -44,13 +44,13 @@ class TsProxyServerTest(unittest.TestCase): + server.UpdateTrafficSettings(download_bandwidth_kbps=5000) + server.UpdateTrafficSettings(upload_bandwidth_kbps=2000) + +- self.assertEquals(server._rtt, 100) +- self.assertEquals(server._inbkps, 5000) +- self.assertEquals(server._outkbps, 2000) ++ self.assertEqual(server._rtt, 100) ++ self.assertEqual(server._inbkps, 5000) ++ self.assertEqual(server._outkbps, 2000) + + server.UpdateTrafficSettings( + round_trip_latency_ms=200, download_bandwidth_kbps=500, + upload_bandwidth_kbps=2000) +- self.assertEquals(server._rtt, 200) +- self.assertEquals(server._inbkps, 500) +- self.assertEquals(server._outkbps, 2000) ++ self.assertEqual(server._rtt, 200) ++ self.assertEqual(server._inbkps, 500) ++ self.assertEqual(server._outkbps, 2000) +diff --git a/src/3rdparty/chromium/third_party/catapult/common/py_utils/py_utils/webpagereplay_go_server.py b/src/3rdparty/chromium/third_party/catapult/common/py_utils/py_utils/webpagereplay_go_server.py +index 95e44954c..bdbf0347b 100644 +--- a/src/3rdparty/chromium/third_party/catapult/common/py_utils/py_utils/webpagereplay_go_server.py ++++ b/src/3rdparty/chromium/third_party/catapult/common/py_utils/py_utils/webpagereplay_go_server.py +@@ -11,7 +11,7 @@ import signal + import subprocess + import sys + import tempfile +-import urllib ++import urllib.request, urllib.parse, urllib.error + + import py_utils + from py_utils import atexit_with_log +@@ -136,7 +136,7 @@ class ReplayServer(object): + cur_cwd = os.getcwd() + os.chdir(go_folder) + try: +- print subprocess.check_output(['go', 'build', os.path.join(go_folder, 'wpr.go')]) ++ print(subprocess.check_output(['go', 'build', os.path.join(go_folder, 'wpr.go')])) + except subprocess.CalledProcessError: + exit(1) + os.chdir(cur_cwd) +@@ -392,7 +392,7 @@ class ReplayServer(object): + if logging.getLogger('').isEnabledFor(log_level): + logging.log(log_level, output) + else: +- print output ++ print(output) + + os.remove(self._temp_log_file_path) + self._temp_log_file_path = None +@@ -420,7 +420,7 @@ class ReplayServer(object): + """ + url = '%s://%s:%s/%s' % ( + protocol, self._replay_host, self._started_ports[protocol], url_path) +- return urllib.urlopen(url, proxies={}) ++ return urllib.request.urlopen(url, proxies={}) + + def _ResetInterruptHandler(): + """Reset the interrupt handler back to the default. +diff --git a/src/3rdparty/chromium/third_party/catapult/common/py_vulcanize/py_vulcanize/fake_fs.py b/src/3rdparty/chromium/third_party/catapult/common/py_vulcanize/py_vulcanize/fake_fs.py +index 40b01bb5f..e3857e183 100644 +--- a/src/3rdparty/chromium/third_party/catapult/common/py_vulcanize/py_vulcanize/fake_fs.py ++++ b/src/3rdparty/chromium/third_party/catapult/common/py_vulcanize/py_vulcanize/fake_fs.py +@@ -2,9 +2,9 @@ + # Use of this source code is governed by a BSD-style license that can be + # found in the LICENSE file. + +-from __future__ import absolute_import +-from __future__ import division +-from __future__ import print_function ++ ++ ++ + + import codecs + import collections +diff --git a/src/3rdparty/chromium/third_party/catapult/common/py_vulcanize/py_vulcanize/fake_fs_unittest.py b/src/3rdparty/chromium/third_party/catapult/common/py_vulcanize/py_vulcanize/fake_fs_unittest.py +index 7e225f595..4efe00d9d 100644 +--- a/src/3rdparty/chromium/third_party/catapult/common/py_vulcanize/py_vulcanize/fake_fs_unittest.py ++++ b/src/3rdparty/chromium/third_party/catapult/common/py_vulcanize/py_vulcanize/fake_fs_unittest.py +@@ -15,7 +15,7 @@ class FakeFSUnittest(unittest.TestCase): + fs.AddFile('/blah/x', 'foobar') + with fs: + assert os.path.exists(os.path.normpath('/blah/x')) +- self.assertEquals( ++ self.assertEqual( + 'foobar', + open(os.path.normpath('/blah/x'), 'r').read()) + +@@ -24,7 +24,7 @@ class FakeFSUnittest(unittest.TestCase): + fs.AddFile('/blah/x', 'foobar') + with fs: + with open(os.path.normpath('/blah/x'), 'r') as f: +- self.assertEquals('foobar', f.read()) ++ self.assertEqual('foobar', f.read()) + + def testWalk(self): + fs = fake_fs.FakeFS() +@@ -35,18 +35,18 @@ class FakeFSUnittest(unittest.TestCase): + with fs: + gen = os.walk(os.path.normpath('/')) + r = next(gen) +- self.assertEquals((os.path.normpath('/'), ['x'], ['a.txt']), r) ++ self.assertEqual((os.path.normpath('/'), ['x'], ['a.txt']), r) + + r = next(gen) +- self.assertEquals((os.path.normpath('/x'), ['w', 'w2'], ['y.txt']), r) ++ self.assertEqual((os.path.normpath('/x'), ['w', 'w2'], ['y.txt']), r) + + r = next(gen) +- self.assertEquals((os.path.normpath('/x/w'), [], ['z.txt']), r) ++ self.assertEqual((os.path.normpath('/x/w'), [], ['z.txt']), r) + + r = next(gen) +- self.assertEquals((os.path.normpath('/x/w2'), ['w3'], []), r) ++ self.assertEqual((os.path.normpath('/x/w2'), ['w3'], []), r) + + r = next(gen) +- self.assertEquals((os.path.normpath('/x/w2/w3'), [], ['z3.txt']), r) ++ self.assertEqual((os.path.normpath('/x/w2/w3'), [], ['z3.txt']), r) + +- self.assertRaises(StopIteration, gen.next) ++ self.assertRaises(StopIteration, gen.__next__) +diff --git a/src/3rdparty/chromium/third_party/catapult/common/py_vulcanize/py_vulcanize/generate.py b/src/3rdparty/chromium/third_party/catapult/common/py_vulcanize/py_vulcanize/generate.py +index 8af373102..53b1347d0 100644 +--- a/src/3rdparty/chromium/third_party/catapult/common/py_vulcanize/py_vulcanize/generate.py ++++ b/src/3rdparty/chromium/third_party/catapult/common/py_vulcanize/py_vulcanize/generate.py +@@ -2,9 +2,9 @@ + # Use of this source code is governed by a BSD-style license that can be + # found in the LICENSE file. + +-from __future__ import absolute_import +-from __future__ import division +-from __future__ import print_function ++ ++ ++ + + import os + import subprocess +@@ -165,8 +165,8 @@ def GenerateJSToFile(f, + sln = '.'.join(parts[:2]) + + # Output +- print(('%i\t%s\t%s\t%s\t%s' % +- (len(js), min_js_size, module.name, tln, sln))) ++ print('%i\t%s\t%s\t%s\t%s' % ++ (len(js), min_js_size, module.name, tln, sln)) + sys.stdout.flush() + + +@@ -208,7 +208,7 @@ def _MinifyCSS(css_text): + stdin=subprocess.PIPE, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE) +- res = p.communicate(input=css_text) ++ res = p.communicate(input=css_text.encode()) + errorcode = p.wait() + if errorcode != 0: + sys.stderr.write('rCSSmin exited with error code %d' % errorcode) +diff --git a/src/3rdparty/chromium/third_party/catapult/common/py_vulcanize/py_vulcanize/html_module_unittest.py b/src/3rdparty/chromium/third_party/catapult/common/py_vulcanize/py_vulcanize/html_module_unittest.py +index e8438f4db..fbce2c726 100644 +--- a/src/3rdparty/chromium/third_party/catapult/common/py_vulcanize/py_vulcanize/html_module_unittest.py ++++ b/src/3rdparty/chromium/third_party/catapult/common/py_vulcanize/py_vulcanize/html_module_unittest.py +@@ -2,9 +2,9 @@ + # Use of this source code is governed by a BSD-style license that can be + # found in the LICENSE file. + +-from __future__ import absolute_import +-from __future__ import division +-from __future__ import print_function ++ ++ ++ + + import os + import unittest +@@ -56,7 +56,7 @@ class FakeLoader(object): + return None + + # Sort by length. Longest match wins. +- candidate_paths.sort(lambda x, y: len(x) - len(y)) ++ candidate_paths.sort(key=lambda x: len(x), reverse=True) + longest_candidate = candidate_paths[-1] + + return ResourceWithFakeContents( +@@ -91,8 +91,8 @@ class ParseTests(unittest.TestCase): + '/tmp/a/b/', + is_component=False, + parser_results=parse_results) +- self.assertEquals([], metadata.dependent_module_names) +- self.assertEquals( ++ self.assertEqual([], metadata.dependent_module_names) ++ self.assertEqual( + ['a/foo.js'], metadata.dependent_raw_script_relative_paths) + + def testExternalScriptReferenceToModuleOutsideScriptPath(self): +@@ -141,7 +141,7 @@ class ParseTests(unittest.TestCase): + '/tmp/a/b/', + is_component=False, + parser_results=parse_results) +- self.assertEquals(['a.foo'], metadata.dependent_module_names) ++ self.assertEqual(['a.foo'], metadata.dependent_module_names) + + def testStyleSheetImport(self): + parse_results = parse_html_deps.HTMLModuleParserResults(""" +@@ -157,8 +157,8 @@ class ParseTests(unittest.TestCase): + '/tmp/a/b/', + is_component=False, + parser_results=parse_results) +- self.assertEquals([], metadata.dependent_module_names) +- self.assertEquals(['a.foo'], metadata.style_sheet_names) ++ self.assertEqual([], metadata.dependent_module_names) ++ self.assertEqual(['a.foo'], metadata.style_sheet_names) + + def testUsingAbsoluteHref(self): + parse_results = parse_html_deps.HTMLModuleParserResults(""" +@@ -175,7 +175,7 @@ class ParseTests(unittest.TestCase): + "/tmp/a/b/", + is_component=False, + parser_results=parse_results) +- self.assertEquals(['foo.js'], metadata.dependent_raw_script_relative_paths) ++ self.assertEqual(['foo.js'], metadata.dependent_raw_script_relative_paths) + + + class HTMLModuleTests(unittest.TestCase): +@@ -231,7 +231,7 @@ console.log('/raw/raw_script.js was written'); + + # Check load sequence names. + load_sequence_names = [x.name for x in load_sequence] +- self.assertEquals(['py_vulcanize', ++ self.assertEqual(['py_vulcanize', + 'widget', + 'a.b.start'], load_sequence_names) + +@@ -288,7 +288,7 @@ console.log('/raw/raw_script.js was written'); + is: "my-component" + }); + """.rstrip() +- self.assertEquals(expected_js, js) ++ self.assertEqual(expected_js, js) + + def testInlineStylesheetURLs(self): + file_contents = {} +@@ -309,7 +309,7 @@ console.log('/raw/raw_script.js was written'); + + computed_deps = [] + my_component.AppendDirectlyDependentFilenamesTo(computed_deps) +- self.assertEquals(set(computed_deps), ++ self.assertEqual(set(computed_deps), + set([os.path.normpath('/tmp/a/b/my_component.html'), + os.path.normpath('/tmp/a/something.jpg')])) + +diff --git a/src/3rdparty/chromium/third_party/catapult/common/py_vulcanize/py_vulcanize/module.py b/src/3rdparty/chromium/third_party/catapult/common/py_vulcanize/py_vulcanize/module.py +index bebf1b465..38b7cbc06 100644 +--- a/src/3rdparty/chromium/third_party/catapult/common/py_vulcanize/py_vulcanize/module.py ++++ b/src/3rdparty/chromium/third_party/catapult/common/py_vulcanize/py_vulcanize/module.py +@@ -11,9 +11,9 @@ template objects, raw JavaScript, or other modules. + Other resources include HTML templates, raw JavaScript files, and stylesheets. + """ + +-from __future__ import absolute_import +-from __future__ import division +-from __future__ import print_function ++ ++ ++ + + import codecs + import inspect +diff --git a/src/3rdparty/chromium/third_party/catapult/common/py_vulcanize/py_vulcanize/module_unittest.py b/src/3rdparty/chromium/third_party/catapult/common/py_vulcanize/py_vulcanize/module_unittest.py +index ed92db353..7224c61a5 100644 +--- a/src/3rdparty/chromium/third_party/catapult/common/py_vulcanize/py_vulcanize/module_unittest.py ++++ b/src/3rdparty/chromium/third_party/catapult/common/py_vulcanize/py_vulcanize/module_unittest.py +@@ -39,7 +39,7 @@ class ModuleIntegrationTests(unittest.TestCase): + loader = resource_loader.ResourceLoader(project) + x_module = loader.LoadModule('x') + +- self.assertEquals([loader.loaded_modules['y'], ++ self.assertEqual([loader.loaded_modules['y'], + loader.loaded_modules['z']], + x_module.dependent_modules) + +@@ -47,7 +47,7 @@ class ModuleIntegrationTests(unittest.TestCase): + load_sequence = [] + x_module.ComputeLoadSequenceRecursive(load_sequence, already_loaded_set) + +- self.assertEquals([loader.loaded_modules['z'], ++ self.assertEqual([loader.loaded_modules['z'], + loader.loaded_modules['y'], + x_module], + load_sequence) +@@ -68,7 +68,7 @@ class ModuleIntegrationTests(unittest.TestCase): + with fs: + my_module = loader.LoadModule(module_name='src.my_module') + dep_names = [x.name for x in my_module.dependent_modules] +- self.assertEquals(['py_vulcanize.foo'], dep_names) ++ self.assertEqual(['py_vulcanize.foo'], dep_names) + + def testDepsExceptionContext(self): + fs = fake_fs.FakeFS() +@@ -89,7 +89,7 @@ class ModuleIntegrationTests(unittest.TestCase): + assert False, 'Expected an exception' + except module.DepsException as e: + exc = e +- self.assertEquals( ++ self.assertEqual( + ['src.my_module', 'py_vulcanize.foo'], + exc.context) + +@@ -116,10 +116,10 @@ class ModuleIntegrationTests(unittest.TestCase): + loader = resource_loader.ResourceLoader(project) + with fs: + my_module = loader.LoadModule(module_name='z.foo') +- self.assertEquals(1, len(my_module.dependent_raw_scripts)) ++ self.assertEqual(1, len(my_module.dependent_raw_scripts)) + + dependent_filenames = my_module.GetAllDependentFilenamesRecursive() +- self.assertEquals( ++ self.assertEqual( + [ + os.path.normpath('/x/y/z/foo.html'), + os.path.normpath('/x/raw/bar.js'), +diff --git a/src/3rdparty/chromium/third_party/catapult/common/py_vulcanize/py_vulcanize/parse_html_deps.py b/src/3rdparty/chromium/third_party/catapult/common/py_vulcanize/py_vulcanize/parse_html_deps.py +index 441edc5ed..47617d4c3 100644 +--- a/src/3rdparty/chromium/third_party/catapult/common/py_vulcanize/py_vulcanize/parse_html_deps.py ++++ b/src/3rdparty/chromium/third_party/catapult/common/py_vulcanize/py_vulcanize/parse_html_deps.py +@@ -2,9 +2,9 @@ + # Use of this source code is governed by a BSD-style license that can be + # found in the LICENSE file. + +-from __future__ import absolute_import +-from __future__ import division +-from __future__ import print_function ++ ++ ++ + + import os + import sys +diff --git a/src/3rdparty/chromium/third_party/catapult/common/py_vulcanize/py_vulcanize/parse_html_deps_unittest.py b/src/3rdparty/chromium/third_party/catapult/common/py_vulcanize/py_vulcanize/parse_html_deps_unittest.py +index 2a30a29b0..56ded1eaa 100755 +--- a/src/3rdparty/chromium/third_party/catapult/common/py_vulcanize/py_vulcanize/parse_html_deps_unittest.py ++++ b/src/3rdparty/chromium/third_party/catapult/common/py_vulcanize/py_vulcanize/parse_html_deps_unittest.py +@@ -15,18 +15,18 @@ class ParseTests(unittest.TestCase): + def test_parse_empty(self): + parser = parse_html_deps.HTMLModuleParser() + module = parser.Parse('') +- self.assertEquals([], module.scripts_external) +- self.assertEquals([], module.inline_scripts) +- self.assertEquals([], module.stylesheets) +- self.assertEquals([], module.imports) ++ self.assertEqual([], module.scripts_external) ++ self.assertEqual([], module.inline_scripts) ++ self.assertEqual([], module.stylesheets) ++ self.assertEqual([], module.imports) + + def test_parse_none(self): + parser = parse_html_deps.HTMLModuleParser() + module = parser.Parse(None) +- self.assertEquals([], module.scripts_external) +- self.assertEquals([], module.inline_scripts) +- self.assertEquals([], module.stylesheets) +- self.assertEquals([], module.imports) ++ self.assertEqual([], module.scripts_external) ++ self.assertEqual([], module.inline_scripts) ++ self.assertEqual([], module.stylesheets) ++ self.assertEqual([], module.imports) + + def test_parse_script_src_basic(self): + html = """ +@@ -40,10 +40,10 @@ class ParseTests(unittest.TestCase): + """ + parser = parse_html_deps.HTMLModuleParser() + module = parser.Parse(html) +- self.assertEquals(['polymer.min.js', 'foo.js'], module.scripts_external) +- self.assertEquals([], module.inline_scripts) +- self.assertEquals([], module.stylesheets) +- self.assertEquals([], module.imports) ++ self.assertEqual(['polymer.min.js', 'foo.js'], module.scripts_external) ++ self.assertEqual([], module.inline_scripts) ++ self.assertEqual([], module.stylesheets) ++ self.assertEqual([], module.imports) + self.assertNotIn( + 'DOCTYPE html', + module.html_contents_without_links_and_script) +@@ -59,10 +59,10 @@ class ParseTests(unittest.TestCase): + """ + parser = parse_html_deps.HTMLModuleParser() + module = parser.Parse(html) +- self.assertEquals([], module.scripts_external) +- self.assertEquals([], module.inline_scripts) +- self.assertEquals([], module.stylesheets) +- self.assertEquals(['x-foo.html'], module.imports) ++ self.assertEqual([], module.scripts_external) ++ self.assertEqual([], module.inline_scripts) ++ self.assertEqual([], module.stylesheets) ++ self.assertEqual(['x-foo.html'], module.imports) + + def test_parse_script_inline(self): + html = """ +@@ -76,18 +76,18 @@ class ParseTests(unittest.TestCase): + + parser = parse_html_deps.HTMLModuleParser() + module = parser.Parse(html) +- self.assertEquals([], module.scripts_external) +- self.assertEquals(1, len(module.inline_scripts)) +- self.assertEquals([], module.stylesheets) +- self.assertEquals([], module.imports) ++ self.assertEqual([], module.scripts_external) ++ self.assertEqual(1, len(module.inline_scripts)) ++ self.assertEqual([], module.stylesheets) ++ self.assertEqual([], module.imports) + + script0 = module.inline_scripts[0] + val = re.sub(r'\s+', '', script0.contents) + inner_script = """py_vulcanize.require("foo");py_vulcanize.require('bar');""" +- self.assertEquals(inner_script, val) ++ self.assertEqual(inner_script, val) + +- self.assertEquals(3, len(script0.open_tags)) +- self.assertEquals('polymer-element', script0.open_tags[2].tag) ++ self.assertEqual(3, len(script0.open_tags)) ++ self.assertEqual('polymer-element', script0.open_tags[2].tag) + + self.assertNotIn( + 'py_vulcanize.require("foo");', +@@ -104,19 +104,19 @@ class ParseTests(unittest.TestCase): + + parser = parse_html_deps.HTMLModuleParser() + module = parser.Parse(html) +- self.assertEquals(3, len(module.scripts)) +- self.assertEquals('window = {}', module.scripts[0].contents) +- self.assertEquals("foo.js",module.scripts[1].src) ++ self.assertEqual(3, len(module.scripts)) ++ self.assertEqual('window = {}', module.scripts[0].contents) ++ self.assertEqual("foo.js",module.scripts[1].src) + self.assertTrue(module.scripts[1].is_external) +- self.assertEquals('window = undefined', module.scripts[2].contents) +- self.assertEquals([], module.imports) ++ self.assertEqual('window = undefined', module.scripts[2].contents) ++ self.assertEqual([], module.imports) + + def test_parse_script_src_sripping(self): + html = """ + + """ + module = parse_html_deps.HTMLModuleParser().Parse(html) +- self.assertEquals('', ++ self.assertEqual('', + module.html_contents_without_links_and_script) + + def test_parse_link_rel_stylesheet(self): +@@ -127,10 +127,10 @@ class ParseTests(unittest.TestCase): + """ + parser = parse_html_deps.HTMLModuleParser() + module = parser.Parse(html) +- self.assertEquals([], module.scripts_external) +- self.assertEquals([], module.inline_scripts) +- self.assertEquals(['frameworkstyles.css'], module.stylesheets) +- self.assertEquals([], module.imports) ++ self.assertEqual([], module.scripts_external) ++ self.assertEqual([], module.inline_scripts) ++ self.assertEqual(['frameworkstyles.css'], module.stylesheets) ++ self.assertEqual([], module.imports) + + class Ctl(html_generation_controller.HTMLGenerationController): + +@@ -145,14 +145,14 @@ class ParseTests(unittest.TestCase): + + + """ +- self.assertEquals(ghtm, gen_html) ++ self.assertEqual(ghtm, gen_html) + + def test_parse_inline_style(self): + html = """""" + module = parse_html_deps.HTMLModuleParser().Parse(html) +- self.assertEquals(html, module.html_contents_without_links_and_script) ++ self.assertEqual(html, module.html_contents_without_links_and_script) + + class Ctl(html_generation_controller.HTMLGenerationController): + +@@ -165,7 +165,7 @@ class ParseTests(unittest.TestCase): + ghtm = """""" +- self.assertEquals(ghtm, gen_html) ++ self.assertEqual(ghtm, gen_html) + + def test_parse_style_import(self): + html = """ +@@ -187,40 +187,40 @@ class ParseTests(unittest.TestCase): + parser = parse_html_deps.HTMLModuleParser() + res = parser.Parse(orig_html) + html = res.html_contents_without_links_and_script +- self.assertEquals(html, orig_html) ++ self.assertEqual(html, orig_html) + + def test_html_contents_basic(self): + html = """
d""" + parser = parse_html_deps.HTMLModuleParser() + module = parser.Parse(html) +- self.assertEquals(html, module.html_contents_without_links_and_script) ++ self.assertEqual(html, module.html_contents_without_links_and_script) + + def test_html_contents_with_entity(self): + html = """""" + parser = parse_html_deps.HTMLModuleParser() + module = parser.Parse(html) +- self.assertEquals(u'\u2192', ++ self.assertEqual('\u2192', + module.html_contents_without_links_and_script) + + def test_html_content_with_charref(self): + html = """>""" + parser = parse_html_deps.HTMLModuleParser() + module = parser.Parse(html) +- self.assertEquals('>', ++ self.assertEqual('>', + module.html_contents_without_links_and_script) + + def test_html_content_start_end_br(self): + html = """
""" + parser = parse_html_deps.HTMLModuleParser() + module = parser.Parse(html) +- self.assertEquals('
', ++ self.assertEqual('
', + module.html_contents_without_links_and_script) + + def test_html_content_start_end_img(self): + html = """""" + parser = parse_html_deps.HTMLModuleParser() + module = parser.Parse(html) +- self.assertEquals('', ++ self.assertEqual('', + module.html_contents_without_links_and_script) + + def test_html_contents_with_link_stripping(self): +@@ -228,7 +228,7 @@ class ParseTests(unittest.TestCase): + """ + parser = parse_html_deps.HTMLModuleParser() + module = parser.Parse(html) +- self.assertEquals("""d""", ++ self.assertEqual("""d""", + module.html_contents_without_links_and_script.strip()) + + def test_html_contents_with_style_link_stripping(self): +@@ -236,7 +236,7 @@ class ParseTests(unittest.TestCase): + """ + parser = parse_html_deps.HTMLModuleParser() + module = parser.Parse(html) +- self.assertEquals("""d""", ++ self.assertEqual("""d""", + module.html_contents_without_links_and_script.strip()) + + def test_br_does_not_raise(self): +@@ -288,5 +288,5 @@ class ParseTests(unittest.TestCase): + html = """""" + parser = parse_html_deps.HTMLModuleParser() + module = parser.Parse(html) +- self.assertEquals(1, len(module.inline_scripts)) +- self.assertEquals('', module.inline_scripts[0].contents) ++ self.assertEqual(1, len(module.inline_scripts)) ++ self.assertEqual('', module.inline_scripts[0].contents) +diff --git a/src/3rdparty/chromium/third_party/catapult/common/py_vulcanize/py_vulcanize/project.py b/src/3rdparty/chromium/third_party/catapult/common/py_vulcanize/py_vulcanize/project.py +index 7a169882d..f090e4f29 100644 +--- a/src/3rdparty/chromium/third_party/catapult/common/py_vulcanize/py_vulcanize/project.py ++++ b/src/3rdparty/chromium/third_party/catapult/common/py_vulcanize/py_vulcanize/project.py +@@ -2,9 +2,9 @@ + # Use of this source code is governed by a BSD-style license that can be + # found in the LICENSE file. + +-from __future__ import absolute_import +-from __future__ import division +-from __future__ import print_function ++ ++ ++ + import collections + import os + +diff --git a/src/3rdparty/chromium/third_party/catapult/common/py_vulcanize/py_vulcanize/resource_loader.py b/src/3rdparty/chromium/third_party/catapult/common/py_vulcanize/py_vulcanize/resource_loader.py +index 015adaa66..8d1356124 100644 +--- a/src/3rdparty/chromium/third_party/catapult/common/py_vulcanize/py_vulcanize/resource_loader.py ++++ b/src/3rdparty/chromium/third_party/catapult/common/py_vulcanize/py_vulcanize/resource_loader.py +@@ -59,7 +59,7 @@ class ResourceLoader(object): + return None + + # Sort by length. Longest match wins. +- candidate_paths.sort(lambda x, y: len(x) - len(y)) ++ candidate_paths.sort(key=lambda x: len(x), reverse=True) + longest_candidate = candidate_paths[-1] + return resource_module.Resource(longest_candidate, absolute_path, binary) + +diff --git a/src/3rdparty/chromium/third_party/catapult/common/py_vulcanize/py_vulcanize/resource_unittest.py b/src/3rdparty/chromium/third_party/catapult/common/py_vulcanize/py_vulcanize/resource_unittest.py +index 4da23556f..3f0dedae3 100644 +--- a/src/3rdparty/chromium/third_party/catapult/common/py_vulcanize/py_vulcanize/resource_unittest.py ++++ b/src/3rdparty/chromium/third_party/catapult/common/py_vulcanize/py_vulcanize/resource_unittest.py +@@ -12,6 +12,6 @@ class ResourceUnittest(unittest.TestCase): + + def testBasic(self): + r = resource.Resource('/a', '/a/b/c.js') +- self.assertEquals('b.c', r.name) +- self.assertEquals(os.path.join('b', 'c.js'), r.relative_path) +- self.assertEquals('b/c.js', r.unix_style_relative_path) ++ self.assertEqual('b.c', r.name) ++ self.assertEqual(os.path.join('b', 'c.js'), r.relative_path) ++ self.assertEqual('b/c.js', r.unix_style_relative_path) +diff --git a/src/3rdparty/chromium/third_party/catapult/common/py_vulcanize/py_vulcanize/strip_js_comments_unittest.py b/src/3rdparty/chromium/third_party/catapult/common/py_vulcanize/py_vulcanize/strip_js_comments_unittest.py +index 685cb824a..289e2b0b7 100644 +--- a/src/3rdparty/chromium/third_party/catapult/common/py_vulcanize/py_vulcanize/strip_js_comments_unittest.py ++++ b/src/3rdparty/chromium/third_party/catapult/common/py_vulcanize/py_vulcanize/strip_js_comments_unittest.py +@@ -16,38 +16,38 @@ class JavaScriptStripCommentTests(unittest.TestCase): + """Test case for _strip_js_comments and _TokenizeJS.""" + + def test_strip_comments(self): +- self.assertEquals( ++ self.assertEqual( + 'A ', strip_js_comments.StripJSComments('A // foo')) +- self.assertEquals( ++ self.assertEqual( + 'A bar', strip_js_comments.StripJSComments('A // foo\nbar')) +- self.assertEquals( ++ self.assertEqual( + 'A b', strip_js_comments.StripJSComments('A /* foo */ b')) +- self.assertEquals( ++ self.assertEqual( + 'A b', strip_js_comments.StripJSComments('A /* foo\n */ b')) + + def test_tokenize_empty(self): + tokens = list(strip_js_comments._TokenizeJS('')) +- self.assertEquals([], tokens) ++ self.assertEqual([], tokens) + + def test_tokenize_nl(self): + tokens = list(strip_js_comments._TokenizeJS('\n')) +- self.assertEquals(['\n'], tokens) ++ self.assertEqual(['\n'], tokens) + + def test_tokenize_slashslash_comment(self): + tokens = list(strip_js_comments._TokenizeJS('A // foo')) +- self.assertEquals(['A ', '//', ' foo'], tokens) ++ self.assertEqual(['A ', '//', ' foo'], tokens) + + def test_tokenize_slashslash_comment_then_newline(self): + tokens = list(strip_js_comments._TokenizeJS('A // foo\nbar')) +- self.assertEquals(['A ', '//', ' foo', '\n', 'bar'], tokens) ++ self.assertEqual(['A ', '//', ' foo', '\n', 'bar'], tokens) + + def test_tokenize_cstyle_comment_one_line(self): + tokens = list(strip_js_comments._TokenizeJS('A /* foo */')) +- self.assertEquals(['A ', '/*', ' foo ', '*/'], tokens) ++ self.assertEqual(['A ', '/*', ' foo ', '*/'], tokens) + + def test_tokenize_cstyle_comment_multi_line(self): + tokens = list(strip_js_comments._TokenizeJS('A /* foo\n*bar\n*/')) +- self.assertEquals(['A ', '/*', ' foo', '\n', '*bar', '\n', '*/'], tokens) ++ self.assertEqual(['A ', '/*', ' foo', '\n', '*bar', '\n', '*/'], tokens) + + + if __name__ == '__main__': +diff --git a/src/3rdparty/chromium/third_party/catapult/common/py_vulcanize/py_vulcanize/style_sheet_unittest.py b/src/3rdparty/chromium/third_party/catapult/common/py_vulcanize/py_vulcanize/style_sheet_unittest.py +index 4ebc71d56..555111dde 100644 +--- a/src/3rdparty/chromium/third_party/catapult/common/py_vulcanize/py_vulcanize/style_sheet_unittest.py ++++ b/src/3rdparty/chromium/third_party/catapult/common/py_vulcanize/py_vulcanize/style_sheet_unittest.py +@@ -27,14 +27,14 @@ class StyleSheetUnittest(unittest.TestCase): + loader = resource_loader.ResourceLoader(project) + + foo_x = loader.LoadStyleSheet('foo.x') +- self.assertEquals(1, len(foo_x.images)) ++ self.assertEqual(1, len(foo_x.images)) + + r0 = foo_x.images[0] +- self.assertEquals(os.path.normpath('/src/images/bar.jpeg'), ++ self.assertEqual(os.path.normpath('/src/images/bar.jpeg'), + r0.absolute_path) + + inlined = foo_x.contents_with_inlined_images +- self.assertEquals(""" ++ self.assertEqual(""" + .x .y { + background-image: url(data:image/jpeg;base64,%s); + } +diff --git a/src/3rdparty/chromium/third_party/catapult/common/py_vulcanize/third_party/rcssmin/_setup/py2/__init__.py b/src/3rdparty/chromium/third_party/catapult/common/py_vulcanize/third_party/rcssmin/_setup/py2/__init__.py +index 9582ecc10..6139d510a 100644 +--- a/src/3rdparty/chromium/third_party/catapult/common/py_vulcanize/third_party/rcssmin/_setup/py2/__init__.py ++++ b/src/3rdparty/chromium/third_party/catapult/common/py_vulcanize/third_party/rcssmin/_setup/py2/__init__.py +@@ -21,7 +21,7 @@ + + This package provides tools for main package setup. + """ +-__author__ = u"Andr\xe9 Malo" ++__author__ = "Andr\xe9 Malo" + __docformat__ = "restructuredtext en" + + from _setup.setup import run # pylint: disable = W0611 +diff --git a/src/3rdparty/chromium/third_party/catapult/common/py_vulcanize/third_party/rcssmin/_setup/py2/commands.py b/src/3rdparty/chromium/third_party/catapult/common/py_vulcanize/third_party/rcssmin/_setup/py2/commands.py +index a41b166e0..fcc9dd742 100644 +--- a/src/3rdparty/chromium/third_party/catapult/common/py_vulcanize/third_party/rcssmin/_setup/py2/commands.py ++++ b/src/3rdparty/chromium/third_party/catapult/common/py_vulcanize/third_party/rcssmin/_setup/py2/commands.py +@@ -21,7 +21,7 @@ + + Command extenders. + """ +-__author__ = u"Andr\xe9 Malo" ++__author__ = "Andr\xe9 Malo" + __docformat__ = "restructuredtext en" + __test__ = False + +@@ -65,17 +65,17 @@ def add_option(command, long_name, help_text, short_name=None, default=None, + attr_name = _fancy_getopt.translate_longopt(long_name) + else: + attr_name = _fancy_getopt.translate_longopt(long_name[:-1]) +- if not _option_defaults.has_key(command): ++ if command not in _option_defaults: + _option_defaults[command] = [] + if inherit is not None: +- if isinstance(inherit, (str, unicode)): ++ if isinstance(inherit, str): + inherit = [inherit] + for i_inherit in inherit: + add_option( + i_inherit, long_name, help_text, short_name, default + ) + default = None +- if not _option_inherits.has_key(command): ++ if command not in _option_inherits: + _option_inherits[command] = [] + for i_inherit in inherit: + for i_command, opt_name in _option_inherits[command]: +@@ -88,9 +88,9 @@ def add_option(command, long_name, help_text, short_name=None, default=None, + + def add_finalizer(command, key, func): + """ Add finalizer """ +- if not _option_finalizers.has_key(command): ++ if command not in _option_finalizers: + _option_finalizers[command] = {} +- if not _option_finalizers[command].has_key(key): ++ if key not in _option_finalizers[command]: + _option_finalizers[command][key] = func + + +@@ -108,18 +108,18 @@ class Install(_install.install): + """ Prepare for new options """ + _install.install.initialize_options(self) + self.single_version_externally_managed = None +- if _option_defaults.has_key('install'): ++ if 'install' in _option_defaults: + for opt_name, default in _option_defaults['install']: + setattr(self, opt_name, default) + + def finalize_options(self): + """ Finalize options """ + _install.install.finalize_options(self) +- if _option_inherits.has_key('install'): ++ if 'install' in _option_inherits: + for parent, opt_name in _option_inherits['install']: + self.set_undefined_options(parent, (opt_name, opt_name)) +- if _option_finalizers.has_key('install'): +- for func in _option_finalizers['install'].values(): ++ if 'install' in _option_finalizers: ++ for func in list(_option_finalizers['install'].values()): + func(self) + + +@@ -131,18 +131,18 @@ class InstallData(_install_data.install_data): + def initialize_options(self): + """ Prepare for new options """ + _install_data.install_data.initialize_options(self) +- if _option_defaults.has_key('install_data'): ++ if 'install_data' in _option_defaults: + for opt_name, default in _option_defaults['install_data']: + setattr(self, opt_name, default) + + def finalize_options(self): + """ Finalize options """ + _install_data.install_data.finalize_options(self) +- if _option_inherits.has_key('install_data'): ++ if 'install_data' in _option_inherits: + for parent, opt_name in _option_inherits['install_data']: + self.set_undefined_options(parent, (opt_name, opt_name)) +- if _option_finalizers.has_key('install_data'): +- for func in _option_finalizers['install_data'].values(): ++ if 'install_data' in _option_finalizers: ++ for func in list(_option_finalizers['install_data'].values()): + func(self) + + +@@ -154,18 +154,18 @@ class InstallLib(_install_lib.install_lib): + def initialize_options(self): + """ Prepare for new options """ + _install_lib.install_lib.initialize_options(self) +- if _option_defaults.has_key('install_lib'): ++ if 'install_lib' in _option_defaults: + for opt_name, default in _option_defaults['install_lib']: + setattr(self, opt_name, default) + + def finalize_options(self): + """ Finalize options """ + _install_lib.install_lib.finalize_options(self) +- if _option_inherits.has_key('install_lib'): ++ if 'install_lib' in _option_inherits: + for parent, opt_name in _option_inherits['install_lib']: + self.set_undefined_options(parent, (opt_name, opt_name)) +- if _option_finalizers.has_key('install_lib'): +- for func in _option_finalizers['install_lib'].values(): ++ if 'install_lib' in _option_finalizers: ++ for func in list(_option_finalizers['install_lib'].values()): + func(self) + + +@@ -182,18 +182,18 @@ class BuildExt(_build_ext.build_ext): + def initialize_options(self): + """ Prepare for new options """ + _build_ext.build_ext.initialize_options(self) +- if _option_defaults.has_key('build_ext'): ++ if 'build_ext' in _option_defaults: + for opt_name, default in _option_defaults['build_ext']: + setattr(self, opt_name, default) + + def finalize_options(self): + """ Finalize options """ + _build_ext.build_ext.finalize_options(self) +- if _option_inherits.has_key('build_ext'): ++ if 'build_ext' in _option_inherits: + for parent, opt_name in _option_inherits['build_ext']: + self.set_undefined_options(parent, (opt_name, opt_name)) +- if _option_finalizers.has_key('build_ext'): +- for func in _option_finalizers['build_ext'].values(): ++ if 'build_ext' in _option_finalizers: ++ for func in list(_option_finalizers['build_ext'].values()): + func(self) + + def build_extension(self, ext): +@@ -252,16 +252,16 @@ class Build(_build.build): + def initialize_options(self): + """ Prepare for new options """ + _build.build.initialize_options(self) +- if _option_defaults.has_key('build'): ++ if 'build' in _option_defaults: + for opt_name, default in _option_defaults['build']: + setattr(self, opt_name, default) + + def finalize_options(self): + """ Finalize options """ + _build.build.finalize_options(self) +- if _option_inherits.has_key('build'): ++ if 'build' in _option_inherits: + for parent, opt_name in _option_inherits['build']: + self.set_undefined_options(parent, (opt_name, opt_name)) +- if _option_finalizers.has_key('build'): +- for func in _option_finalizers['build'].values(): ++ if 'build' in _option_finalizers: ++ for func in list(_option_finalizers['build'].values()): + func(self) +diff --git a/src/3rdparty/chromium/third_party/catapult/common/py_vulcanize/third_party/rcssmin/_setup/py2/data.py b/src/3rdparty/chromium/third_party/catapult/common/py_vulcanize/third_party/rcssmin/_setup/py2/data.py +index 9cf04e104..d42217357 100644 +--- a/src/3rdparty/chromium/third_party/catapult/common/py_vulcanize/third_party/rcssmin/_setup/py2/data.py ++++ b/src/3rdparty/chromium/third_party/catapult/common/py_vulcanize/third_party/rcssmin/_setup/py2/data.py +@@ -21,7 +21,7 @@ + + This module provides tools to simplify data distribution. + """ +-__author__ = u"Andr\xe9 Malo" ++__author__ = "Andr\xe9 Malo" + __docformat__ = "restructuredtext en" + + from distutils import filelist as _filelist +@@ -121,7 +121,7 @@ class Data(object): + if len(name) > 1: + target = telems + name[:-1] + tmap.setdefault(_posixpath.join(*target), []).append(fname) +- return tmap.items() ++ return list(tmap.items()) + + + class Documentation(Data): +@@ -155,7 +155,7 @@ class Manpages(Documentation): + mpmap.setdefault(ext, []).append(manpage) + return [cls(manpages, prefix=_posixpath.join( + 'share', 'man', 'man%s' % section, +- )) for section, manpages in mpmap.items()] ++ )) for section, manpages in list(mpmap.items())] + dispatch = classmethod(dispatch) + + def flatten(self, installer): +diff --git a/src/3rdparty/chromium/third_party/catapult/common/py_vulcanize/third_party/rcssmin/_setup/py2/dist.py b/src/3rdparty/chromium/third_party/catapult/common/py_vulcanize/third_party/rcssmin/_setup/py2/dist.py +index 21a654151..cce21e57a 100644 +--- a/src/3rdparty/chromium/third_party/catapult/common/py_vulcanize/third_party/rcssmin/_setup/py2/dist.py ++++ b/src/3rdparty/chromium/third_party/catapult/common/py_vulcanize/third_party/rcssmin/_setup/py2/dist.py +@@ -21,7 +21,7 @@ + + dist utilities. + """ +-__author__ = u"Andr\xe9 Malo" ++__author__ = "Andr\xe9 Malo" + __docformat__ = "restructuredtext en" + + import sys as _sys +diff --git a/src/3rdparty/chromium/third_party/catapult/common/py_vulcanize/third_party/rcssmin/_setup/py2/ext.py b/src/3rdparty/chromium/third_party/catapult/common/py_vulcanize/third_party/rcssmin/_setup/py2/ext.py +index bcc0209a5..4fd4a065d 100644 +--- a/src/3rdparty/chromium/third_party/catapult/common/py_vulcanize/third_party/rcssmin/_setup/py2/ext.py ++++ b/src/3rdparty/chromium/third_party/catapult/common/py_vulcanize/third_party/rcssmin/_setup/py2/ext.py +@@ -21,7 +21,7 @@ + + C extension tools. + """ +-__author__ = u"Andr\xe9 Malo" ++__author__ = "Andr\xe9 Malo" + __docformat__ = "restructuredtext en" + __test__ = False + +@@ -60,7 +60,7 @@ class Extension(_core.Extension): + + def __init__(self, *args, **kwargs): + """ Initialization """ +- if kwargs.has_key('depends'): ++ if 'depends' in kwargs: + self.depends = kwargs['depends'] or [] + else: + self.depends = [] +diff --git a/src/3rdparty/chromium/third_party/catapult/common/py_vulcanize/third_party/rcssmin/_setup/py2/setup.py b/src/3rdparty/chromium/third_party/catapult/common/py_vulcanize/third_party/rcssmin/_setup/py2/setup.py +index fd86f62f9..d1fc70fbb 100644 +--- a/src/3rdparty/chromium/third_party/catapult/common/py_vulcanize/third_party/rcssmin/_setup/py2/setup.py ++++ b/src/3rdparty/chromium/third_party/catapult/common/py_vulcanize/third_party/rcssmin/_setup/py2/setup.py +@@ -21,10 +21,10 @@ + + This module provides a wrapper around the distutils core setup. + """ +-__author__ = u"Andr\xe9 Malo" ++__author__ = "Andr\xe9 Malo" + __docformat__ = "restructuredtext en" + +-import ConfigParser as _config_parser ++import configparser as _config_parser + from distutils import core as _core + import os as _os + import posixpath as _posixpath +@@ -52,16 +52,16 @@ def check_python_version(impl, version_min, version_max): + else: + raise AssertionError("impl not in ('python', 'pypy', 'jython')") + +- pyversion = map(int, version_info[:3]) ++ pyversion = list(map(int, version_info[:3])) + if version_min: + min_required = \ +- map(int, '.'.join((version_min, '0.0.0')).split('.')[:3]) ++ list(map(int, '.'.join((version_min, '0.0.0')).split('.')[:3])) + if pyversion < min_required: + raise EnvironmentError("Need at least %s %s (vs. %s)" % ( + impl, version_min, '.'.join(map(str, pyversion)) + )) + if version_max: +- max_required = map(int, version_max.split('.')) ++ max_required = list(map(int, version_max.split('.'))) + max_required[-1] += 1 + if pyversion >= max_required: + raise EnvironmentError("Need at max %s %s (vs. %s)" % ( +@@ -188,7 +188,7 @@ def find_packages(manifest): + packages[ + _os.path.normpath(dirpath).replace(sep, '.') + ] = None +- packages = packages.keys() ++ packages = list(packages.keys()) + packages.sort() + return packages + +@@ -311,9 +311,9 @@ def make_manifest(manifest, config, docs, kwargs): + cmd.ensure_finalized() + #from pprint import pprint; pprint(("install_data", cmd.get_inputs())) + try: +- strings = basestring ++ strings = str + except NameError: +- strings = (str, unicode) ++ strings = (str, str) + + for item in cmd.get_inputs(): + if isinstance(item, strings): +@@ -327,7 +327,7 @@ def make_manifest(manifest, config, docs, kwargs): + for filename in _shell.files(item): + result.append(filename) + +- result = dict([(item, None) for item in result]).keys() ++ result = list(dict([(item, None) for item in result]).keys()) + result.sort() + return result + +diff --git a/src/3rdparty/chromium/third_party/catapult/common/py_vulcanize/third_party/rcssmin/_setup/py2/shell.py b/src/3rdparty/chromium/third_party/catapult/common/py_vulcanize/third_party/rcssmin/_setup/py2/shell.py +index 4eafb9c86..0fb659cf5 100644 +--- a/src/3rdparty/chromium/third_party/catapult/common/py_vulcanize/third_party/rcssmin/_setup/py2/shell.py ++++ b/src/3rdparty/chromium/third_party/catapult/common/py_vulcanize/third_party/rcssmin/_setup/py2/shell.py +@@ -21,9 +21,9 @@ + + Shell utilities. + """ +-from __future__ import generators + +-__author__ = u"Andr\xe9 Malo" ++ ++__author__ = "Andr\xe9 Malo" + __docformat__ = "restructuredtext en" + + import errno as _errno +@@ -49,7 +49,7 @@ class SignalError(ExitError): + ExitError.__init__(self, code) + import signal as _signal + self.signal = signal +- for key, val in vars(_signal).iteritems(): ++ for key, val in vars(_signal).items(): + if key.startswith('SIG') and not key.startswith('SIG_'): + if val == signal: + self.signalstr = key[3:] +@@ -78,7 +78,7 @@ def rm(dest): + """ Remove a file """ + try: + _os.unlink(native(dest)) +- except OSError, e: ++ except OSError as e: + if _errno.ENOENT != e.errno: + raise + +@@ -87,7 +87,7 @@ def rm_rf(dest): + dest = native(dest) + if _os.path.exists(dest): + for path in files(dest, '*'): +- _os.chmod(native(path), 0644) ++ _os.chmod(native(path), 0o644) + _shutil.rmtree(dest) + + +@@ -136,15 +136,15 @@ except AttributeError: + j = _tempfile._counter.get_next() # pylint: disable = E1101, W0212 + fname = _os.path.join(dir, prefix + str(j) + suffix) + try: +- fd = _os.open(fname, flags, 0600) +- except OSError, e: ++ fd = _os.open(fname, flags, 0o600) ++ except OSError as e: + if e.errno == _errno.EEXIST: + count -= 1 + continue + raise + _set_cloexec(fd) + return fd, _os.path.abspath(fname) +- raise IOError, (_errno.EEXIST, "No usable temporary file name found") ++ raise IOError(_errno.EEXIST, "No usable temporary file name found") + + + def _pipespawn(argv, env): +@@ -234,7 +234,7 @@ sys.exit(3) + res = proc.wait() + if res != 0: + if res == 2: +- signal, code = map(int, result.splitlines()[-1].split()) ++ signal, code = list(map(int, result.splitlines()[-1].split())) + raise SignalError(code, signal) + elif res == 3: + code = int(result.splitlines()[-1].strip()) +@@ -346,7 +346,7 @@ def spawn(*argv, **kwargs): + + echo = kwargs.get('echo') + if echo: +- print ' '.join(argv) ++ print(' '.join(argv)) + filepipe = kwargs.get('filepipe') + if filepipe: + return _filepipespawn( +@@ -376,7 +376,7 @@ except AttributeError: + + try: + names = listdir(top) +- except error, err: ++ except error as err: + if onerror is not None: + onerror(err) + return +diff --git a/src/3rdparty/chromium/third_party/catapult/common/py_vulcanize/third_party/rcssmin/_setup/py2/term/__init__.py b/src/3rdparty/chromium/third_party/catapult/common/py_vulcanize/third_party/rcssmin/_setup/py2/term/__init__.py +index 4e50ec374..545945451 100644 +--- a/src/3rdparty/chromium/third_party/catapult/common/py_vulcanize/third_party/rcssmin/_setup/py2/term/__init__.py ++++ b/src/3rdparty/chromium/third_party/catapult/common/py_vulcanize/third_party/rcssmin/_setup/py2/term/__init__.py +@@ -21,7 +21,7 @@ + + Terminal tools, not distributed. + """ +-__author__ = u"Andr\xe9 Malo" ++__author__ = "Andr\xe9 Malo" + __docformat__ = "restructuredtext en" + + # pylint: disable = W0611 +diff --git a/src/3rdparty/chromium/third_party/catapult/common/py_vulcanize/third_party/rcssmin/_setup/py2/term/_term.py b/src/3rdparty/chromium/third_party/catapult/common/py_vulcanize/third_party/rcssmin/_setup/py2/term/_term.py +index 72b727c1a..6beb3b796 100644 +--- a/src/3rdparty/chromium/third_party/catapult/common/py_vulcanize/third_party/rcssmin/_setup/py2/term/_term.py ++++ b/src/3rdparty/chromium/third_party/catapult/common/py_vulcanize/third_party/rcssmin/_setup/py2/term/_term.py +@@ -19,7 +19,7 @@ + Terminal writer + ================= + """ +-__author__ = u"Andr\xe9 Malo" ++__author__ = "Andr\xe9 Malo" + __docformat__ = "restructuredtext en" + + import sys as _sys +diff --git a/src/3rdparty/chromium/third_party/catapult/common/py_vulcanize/third_party/rcssmin/_setup/py2/util.py b/src/3rdparty/chromium/third_party/catapult/common/py_vulcanize/third_party/rcssmin/_setup/py2/util.py +index 5c05fac6e..813d4b1e8 100644 +--- a/src/3rdparty/chromium/third_party/catapult/common/py_vulcanize/third_party/rcssmin/_setup/py2/util.py ++++ b/src/3rdparty/chromium/third_party/catapult/common/py_vulcanize/third_party/rcssmin/_setup/py2/util.py +@@ -21,7 +21,7 @@ + + Setup utilities. + """ +-__author__ = u"Andr\xe9 Malo" ++__author__ = "Andr\xe9 Malo" + __docformat__ = "restructuredtext en" + + try: +@@ -29,16 +29,16 @@ try: + except ImportError: + class log(object): + def info(self, value): +- print value ++ print(value) + def debug(self, value): + pass + log = log() + + from distutils import util as _util + try: +- from ConfigParser import SafeConfigParser ++ from configparser import SafeConfigParser + except ImportError: +- import ConfigParser as _config_parser ++ import configparser as _config_parser + class SafeConfigParser(_config_parser.ConfigParser): + """ Safe config parser """ + def _interpolate(self, section, option, rawval, vars): +diff --git a/src/3rdparty/chromium/third_party/catapult/common/py_vulcanize/third_party/rcssmin/_setup/py3/shell.py b/src/3rdparty/chromium/third_party/catapult/common/py_vulcanize/third_party/rcssmin/_setup/py3/shell.py +index 91f2ebc5b..7641691e2 100644 +--- a/src/3rdparty/chromium/third_party/catapult/common/py_vulcanize/third_party/rcssmin/_setup/py3/shell.py ++++ b/src/3rdparty/chromium/third_party/catapult/common/py_vulcanize/third_party/rcssmin/_setup/py3/shell.py +@@ -48,7 +48,7 @@ class SignalError(ExitError): + ExitError.__init__(self, code) + import signal as _signal + self.signal = signal +- for key, val in vars(_signal).items(): ++ for key, val in list(vars(_signal).items()): + if key.startswith('SIG') and not key.startswith('SIG_'): + if val == signal: + self.signalstr = key[3:] +diff --git a/src/3rdparty/chromium/third_party/catapult/common/py_vulcanize/third_party/rcssmin/bench/cssmin.py b/src/3rdparty/chromium/third_party/catapult/common/py_vulcanize/third_party/rcssmin/bench/cssmin.py +index cbfbf8d49..3eae15818 100644 +--- a/src/3rdparty/chromium/third_party/catapult/common/py_vulcanize/third_party/rcssmin/bench/cssmin.py ++++ b/src/3rdparty/chromium/third_party/catapult/common/py_vulcanize/third_party/rcssmin/bench/cssmin.py +@@ -24,10 +24,7 @@ + + """ + +-try: +- from StringIO import StringIO # The pure-Python StringIO supports unicode. +-except ImportError: +- from io import StringIO ++from io import StringIO + import re + + +@@ -129,7 +126,7 @@ def normalize_rgb_colors_to_hex(css): + regex = re.compile(r"rgb\s*\(\s*([0-9,\s]+)\s*\)") + match = regex.search(css) + while match: +- colors = map(lambda s: s.strip(), match.group(1).split(",")) ++ colors = [s.strip() for s in match.group(1).split(",")] + hexcolor = '#%.2x%.2x%.2x' % tuple(map(int, colors)) + css = css.replace(match.group(), hexcolor) + match = regex.search(css) +diff --git a/src/3rdparty/chromium/third_party/catapult/common/py_vulcanize/third_party/rcssmin/bench/main.py b/src/3rdparty/chromium/third_party/catapult/common/py_vulcanize/third_party/rcssmin/bench/main.py +index 078150629..dcf93794d 100644 +--- a/src/3rdparty/chromium/third_party/catapult/common/py_vulcanize/third_party/rcssmin/bench/main.py ++++ b/src/3rdparty/chromium/third_party/catapult/common/py_vulcanize/third_party/rcssmin/bench/main.py +@@ -113,7 +113,7 @@ def bench(filenames, count): + + ports = [item for item in dir(cssmins) if item.startswith('p_')] + ports.sort() +- space = max(map(len, ports)) - 4 ++ space = max(list(map(len, ports))) - 4 + ports = [(item[5:], getattr(cssmins, item).cssmin) for item in ports] + flush = _sys.stdout.flush + +@@ -159,7 +159,7 @@ def bench(filenames, count): + + xcount = count + while True: +- counted = [None for _ in xrange(xcount)] ++ counted = [None for _ in range(xcount)] + start = _time.time() + for _ in counted: + cssmin(style) +@@ -199,20 +199,20 @@ def main(argv=None): + opts, args = _getopt.getopt(argv, "hc:p:", ["help"]) + except getopt.GetoptError: + e = _sys.exc_info()[0](_sys.exc_info()[1]) +- print >> _sys.stderr, "%s\nTry %s -mbench.main --help" % ( ++ print("%s\nTry %s -mbench.main --help" % ( + e, + _os.path.basename(_sys.executable), +- ) ++ ), file=_sys.stderr) + _sys.exit(2) + + count, pickle = 10, None + for key, value in opts: + if key in ("-h", "--help"): +- print >> _sys.stderr, ( ++ print(( + "%s -mbench.main [-c count] [-p file] cssfile ..." % ( + _os.path.basename(_sys.executable), + ) +- ) ++ ), file=_sys.stderr) + _sys.exit(0) + elif key == '-c': + count = int(value) +diff --git a/src/3rdparty/chromium/third_party/catapult/common/py_vulcanize/third_party/rcssmin/bench/write.py b/src/3rdparty/chromium/third_party/catapult/common/py_vulcanize/third_party/rcssmin/bench/write.py +index 939e11d59..e8b1dfc50 100644 +--- a/src/3rdparty/chromium/third_party/catapult/common/py_vulcanize/third_party/rcssmin/bench/write.py ++++ b/src/3rdparty/chromium/third_party/catapult/common/py_vulcanize/third_party/rcssmin/bench/write.py +@@ -47,7 +47,7 @@ import sys as _sys + + + try: +- unicode ++ str + except NameError: + def uni(v): + if hasattr(v, 'decode'): +@@ -55,7 +55,7 @@ except NameError: + return str(v) + else: + def uni(v): +- if isinstance(v, unicode): ++ if isinstance(v, str): + return v.encode('utf-8') + return str(v) + +@@ -140,10 +140,10 @@ def write_table(filename, results): + + # calculate column widths (global for all tables) + for idx, row in enumerate(rows): +- widths[idx] = max(widths[idx], max(map(len, row))) ++ widths[idx] = max(widths[idx], max(list(map(len, row)))) + + # ... and transpose it back. +- tables.append(zip(*rows)) ++ tables.append(list(zip(*rows))) + pythons.append((version, tables)) + + if last_version.startswith('2.'): +@@ -302,20 +302,20 @@ def main(argv=None): + opts, args = _getopt.getopt(argv, "hp:t:", ["help"]) + except getopt.GetoptError: + e = _sys.exc_info()[0](_sys.exc_info()[1]) +- print >> _sys.stderr, "%s\nTry %s -mbench.write --help" % ( ++ print("%s\nTry %s -mbench.write --help" % ( + e, + _os.path.basename(_sys.executable), +- ) ++ ), file=_sys.stderr) + _sys.exit(2) + + plain, table = None, None + for key, value in opts: + if key in ("-h", "--help"): +- print >> _sys.stderr, ( ++ print(( + "%s -mbench.write [-p plain] [-t table] 1: + target = telems + name[:-1] + tmap.setdefault(_posixpath.join(*target), []).append(fname) +- return tmap.items() ++ return list(tmap.items()) + + + class Documentation(Data): +@@ -155,7 +155,7 @@ class Manpages(Documentation): + mpmap.setdefault(ext, []).append(manpage) + return [cls(manpages, prefix=_posixpath.join( + 'share', 'man', 'man%s' % section, +- )) for section, manpages in mpmap.items()] ++ )) for section, manpages in list(mpmap.items())] + dispatch = classmethod(dispatch) + + def flatten(self, installer): +diff --git a/src/3rdparty/chromium/third_party/catapult/common/py_vulcanize/third_party/rjsmin/_setup/py2/dist.py b/src/3rdparty/chromium/third_party/catapult/common/py_vulcanize/third_party/rjsmin/_setup/py2/dist.py +index 21a654151..cce21e57a 100644 +--- a/src/3rdparty/chromium/third_party/catapult/common/py_vulcanize/third_party/rjsmin/_setup/py2/dist.py ++++ b/src/3rdparty/chromium/third_party/catapult/common/py_vulcanize/third_party/rjsmin/_setup/py2/dist.py +@@ -21,7 +21,7 @@ + + dist utilities. + """ +-__author__ = u"Andr\xe9 Malo" ++__author__ = "Andr\xe9 Malo" + __docformat__ = "restructuredtext en" + + import sys as _sys +diff --git a/src/3rdparty/chromium/third_party/catapult/common/py_vulcanize/third_party/rjsmin/_setup/py2/ext.py b/src/3rdparty/chromium/third_party/catapult/common/py_vulcanize/third_party/rjsmin/_setup/py2/ext.py +index bcc0209a5..4fd4a065d 100644 +--- a/src/3rdparty/chromium/third_party/catapult/common/py_vulcanize/third_party/rjsmin/_setup/py2/ext.py ++++ b/src/3rdparty/chromium/third_party/catapult/common/py_vulcanize/third_party/rjsmin/_setup/py2/ext.py +@@ -21,7 +21,7 @@ + + C extension tools. + """ +-__author__ = u"Andr\xe9 Malo" ++__author__ = "Andr\xe9 Malo" + __docformat__ = "restructuredtext en" + __test__ = False + +@@ -60,7 +60,7 @@ class Extension(_core.Extension): + + def __init__(self, *args, **kwargs): + """ Initialization """ +- if kwargs.has_key('depends'): ++ if 'depends' in kwargs: + self.depends = kwargs['depends'] or [] + else: + self.depends = [] +diff --git a/src/3rdparty/chromium/third_party/catapult/common/py_vulcanize/third_party/rjsmin/_setup/py2/setup.py b/src/3rdparty/chromium/third_party/catapult/common/py_vulcanize/third_party/rjsmin/_setup/py2/setup.py +index 8b15d497a..ef6b28b75 100644 +--- a/src/3rdparty/chromium/third_party/catapult/common/py_vulcanize/third_party/rjsmin/_setup/py2/setup.py ++++ b/src/3rdparty/chromium/third_party/catapult/common/py_vulcanize/third_party/rjsmin/_setup/py2/setup.py +@@ -21,10 +21,10 @@ + + This module provides a wrapper around the distutils core setup. + """ +-__author__ = u"Andr\xe9 Malo" ++__author__ = "Andr\xe9 Malo" + __docformat__ = "restructuredtext en" + +-import ConfigParser as _config_parser ++import configparser as _config_parser + from distutils import core as _core + import os as _os + import posixpath as _posixpath +@@ -52,16 +52,16 @@ def check_python_version(impl, version_min, version_max): + else: + raise AssertionError("impl not in ('python', 'pypy', 'jython')") + +- pyversion = map(int, version_info[:3]) ++ pyversion = list(map(int, version_info[:3])) + if version_min: + min_required = \ +- map(int, '.'.join((version_min, '0.0.0')).split('.')[:3]) ++ list(map(int, '.'.join((version_min, '0.0.0')).split('.')[:3])) + if pyversion < min_required: + raise EnvironmentError("Need at least %s %s (vs. %s)" % ( + impl, version_min, '.'.join(map(str, pyversion)) + )) + if version_max: +- max_required = map(int, version_max.split('.')) ++ max_required = list(map(int, version_max.split('.'))) + max_required[-1] += 1 + if pyversion >= max_required: + raise EnvironmentError("Need at max %s %s (vs. %s)" % ( +@@ -188,7 +188,7 @@ def find_packages(manifest): + packages[ + _os.path.normpath(dirpath).replace(sep, '.') + ] = None +- packages = packages.keys() ++ packages = list(packages.keys()) + packages.sort() + return packages + +@@ -311,9 +311,9 @@ def make_manifest(manifest, config, docs, kwargs): + cmd.ensure_finalized() + #from pprint import pprint; pprint(("install_data", cmd.get_inputs())) + try: +- strings = basestring ++ strings = str + except NameError: +- strings = (str, unicode) ++ strings = (str, str) + + for item in cmd.get_inputs(): + if isinstance(item, strings): +@@ -327,7 +327,7 @@ def make_manifest(manifest, config, docs, kwargs): + for filename in _shell.files(item): + result.append(filename) + +- result = dict([(item, None) for item in result]).keys() ++ result = list(dict([(item, None) for item in result]).keys()) + result.sort() + return result + +diff --git a/src/3rdparty/chromium/third_party/catapult/common/py_vulcanize/third_party/rjsmin/_setup/py2/shell.py b/src/3rdparty/chromium/third_party/catapult/common/py_vulcanize/third_party/rjsmin/_setup/py2/shell.py +index 4eafb9c86..0fb659cf5 100644 +--- a/src/3rdparty/chromium/third_party/catapult/common/py_vulcanize/third_party/rjsmin/_setup/py2/shell.py ++++ b/src/3rdparty/chromium/third_party/catapult/common/py_vulcanize/third_party/rjsmin/_setup/py2/shell.py +@@ -21,9 +21,9 @@ + + Shell utilities. + """ +-from __future__ import generators + +-__author__ = u"Andr\xe9 Malo" ++ ++__author__ = "Andr\xe9 Malo" + __docformat__ = "restructuredtext en" + + import errno as _errno +@@ -49,7 +49,7 @@ class SignalError(ExitError): + ExitError.__init__(self, code) + import signal as _signal + self.signal = signal +- for key, val in vars(_signal).iteritems(): ++ for key, val in vars(_signal).items(): + if key.startswith('SIG') and not key.startswith('SIG_'): + if val == signal: + self.signalstr = key[3:] +@@ -78,7 +78,7 @@ def rm(dest): + """ Remove a file """ + try: + _os.unlink(native(dest)) +- except OSError, e: ++ except OSError as e: + if _errno.ENOENT != e.errno: + raise + +@@ -87,7 +87,7 @@ def rm_rf(dest): + dest = native(dest) + if _os.path.exists(dest): + for path in files(dest, '*'): +- _os.chmod(native(path), 0644) ++ _os.chmod(native(path), 0o644) + _shutil.rmtree(dest) + + +@@ -136,15 +136,15 @@ except AttributeError: + j = _tempfile._counter.get_next() # pylint: disable = E1101, W0212 + fname = _os.path.join(dir, prefix + str(j) + suffix) + try: +- fd = _os.open(fname, flags, 0600) +- except OSError, e: ++ fd = _os.open(fname, flags, 0o600) ++ except OSError as e: + if e.errno == _errno.EEXIST: + count -= 1 + continue + raise + _set_cloexec(fd) + return fd, _os.path.abspath(fname) +- raise IOError, (_errno.EEXIST, "No usable temporary file name found") ++ raise IOError(_errno.EEXIST, "No usable temporary file name found") + + + def _pipespawn(argv, env): +@@ -234,7 +234,7 @@ sys.exit(3) + res = proc.wait() + if res != 0: + if res == 2: +- signal, code = map(int, result.splitlines()[-1].split()) ++ signal, code = list(map(int, result.splitlines()[-1].split())) + raise SignalError(code, signal) + elif res == 3: + code = int(result.splitlines()[-1].strip()) +@@ -346,7 +346,7 @@ def spawn(*argv, **kwargs): + + echo = kwargs.get('echo') + if echo: +- print ' '.join(argv) ++ print(' '.join(argv)) + filepipe = kwargs.get('filepipe') + if filepipe: + return _filepipespawn( +@@ -376,7 +376,7 @@ except AttributeError: + + try: + names = listdir(top) +- except error, err: ++ except error as err: + if onerror is not None: + onerror(err) + return +diff --git a/src/3rdparty/chromium/third_party/catapult/common/py_vulcanize/third_party/rjsmin/_setup/py2/util.py b/src/3rdparty/chromium/third_party/catapult/common/py_vulcanize/third_party/rjsmin/_setup/py2/util.py +index 5c05fac6e..813d4b1e8 100644 +--- a/src/3rdparty/chromium/third_party/catapult/common/py_vulcanize/third_party/rjsmin/_setup/py2/util.py ++++ b/src/3rdparty/chromium/third_party/catapult/common/py_vulcanize/third_party/rjsmin/_setup/py2/util.py +@@ -21,7 +21,7 @@ + + Setup utilities. + """ +-__author__ = u"Andr\xe9 Malo" ++__author__ = "Andr\xe9 Malo" + __docformat__ = "restructuredtext en" + + try: +@@ -29,16 +29,16 @@ try: + except ImportError: + class log(object): + def info(self, value): +- print value ++ print(value) + def debug(self, value): + pass + log = log() + + from distutils import util as _util + try: +- from ConfigParser import SafeConfigParser ++ from configparser import SafeConfigParser + except ImportError: +- import ConfigParser as _config_parser ++ import configparser as _config_parser + class SafeConfigParser(_config_parser.ConfigParser): + """ Safe config parser """ + def _interpolate(self, section, option, rawval, vars): +diff --git a/src/3rdparty/chromium/third_party/catapult/common/py_vulcanize/third_party/rjsmin/_setup/py3/shell.py b/src/3rdparty/chromium/third_party/catapult/common/py_vulcanize/third_party/rjsmin/_setup/py3/shell.py +index 91f2ebc5b..7641691e2 100644 +--- a/src/3rdparty/chromium/third_party/catapult/common/py_vulcanize/third_party/rjsmin/_setup/py3/shell.py ++++ b/src/3rdparty/chromium/third_party/catapult/common/py_vulcanize/third_party/rjsmin/_setup/py3/shell.py +@@ -48,7 +48,7 @@ class SignalError(ExitError): + ExitError.__init__(self, code) + import signal as _signal + self.signal = signal +- for key, val in vars(_signal).items(): ++ for key, val in list(vars(_signal).items()): + if key.startswith('SIG') and not key.startswith('SIG_'): + if val == signal: + self.signalstr = key[3:] +diff --git a/src/3rdparty/chromium/third_party/catapult/common/py_vulcanize/third_party/rjsmin/bench/jsmin.py b/src/3rdparty/chromium/third_party/catapult/common/py_vulcanize/third_party/rjsmin/bench/jsmin.py +index b1f7dd7e6..6ec9b119a 100644 +--- a/src/3rdparty/chromium/third_party/catapult/common/py_vulcanize/third_party/rjsmin/bench/jsmin.py ++++ b/src/3rdparty/chromium/third_party/catapult/common/py_vulcanize/third_party/rjsmin/bench/jsmin.py +@@ -31,13 +31,7 @@ + # */ + + # imports adjusted for speed (cStringIO) and python 3 (io) -- nd +-try: +- from cStringIO import StringIO +-except ImportError: +- try: +- from StringIO import StringIO +- except ImportError: +- from io import StringIO ++from io import StringIO + + + def jsmin(js): +diff --git a/src/3rdparty/chromium/third_party/catapult/common/py_vulcanize/third_party/rjsmin/bench/jsmin_2_0_9.py b/src/3rdparty/chromium/third_party/catapult/common/py_vulcanize/third_party/rjsmin/bench/jsmin_2_0_9.py +index 23bed6082..7415a2af3 100644 +--- a/src/3rdparty/chromium/third_party/catapult/common/py_vulcanize/third_party/rjsmin/bench/jsmin_2_0_9.py ++++ b/src/3rdparty/chromium/third_party/catapult/common/py_vulcanize/third_party/rjsmin/bench/jsmin_2_0_9.py +@@ -29,9 +29,9 @@ is_3 = sys.version_info >= (3, 0) + if is_3: + import io + else: +- import StringIO ++ import io + try: +- import cStringIO ++ import io + except ImportError: + cStringIO = None + +@@ -45,12 +45,12 @@ def jsmin(js): + returns a minified version of the javascript string + """ + if not is_3: +- if cStringIO and not isinstance(js, unicode): ++ if cStringIO and not isinstance(js, str): + # strings can use cStringIO for a 3x performance +- # improvement, but unicode (in python2) cannot +- klass = cStringIO.StringIO ++ # improvement, but unicode (in python3) cannot ++ klass = io.StringIO + else: +- klass = StringIO.StringIO ++ klass = io.StringIO + else: + klass = io.StringIO + ins = klass(js) +diff --git a/src/3rdparty/chromium/third_party/catapult/common/py_vulcanize/third_party/rjsmin/bench/main.py b/src/3rdparty/chromium/third_party/catapult/common/py_vulcanize/third_party/rjsmin/bench/main.py +index c43810594..0632ee6b9 100644 +--- a/src/3rdparty/chromium/third_party/catapult/common/py_vulcanize/third_party/rjsmin/bench/main.py ++++ b/src/3rdparty/chromium/third_party/catapult/common/py_vulcanize/third_party/rjsmin/bench/main.py +@@ -111,7 +111,7 @@ def bench(filenames, count): + + ports = [item for item in dir(jsmins) if item.startswith('p_')] + ports.sort() +- space = max(map(len, ports)) - 4 ++ space = max(list(map(len, ports))) - 4 + ports = [(item[5:], getattr(jsmins, item).jsmin) for item in ports] + flush = _sys.stdout.flush + +@@ -157,7 +157,7 @@ def bench(filenames, count): + + xcount = count + while True: +- counted = [None for _ in xrange(xcount)] ++ counted = [None for _ in range(xcount)] + start = _time.time() + for _ in counted: + jsmin(script) +@@ -197,20 +197,20 @@ def main(argv=None): + opts, args = _getopt.getopt(argv, "hc:p:", ["help"]) + except getopt.GetoptError: + e = _sys.exc_info()[0](_sys.exc_info()[1]) +- print >> _sys.stderr, "%s\nTry %s -mbench.main --help" % ( ++ print("%s\nTry %s -mbench.main --help" % ( + e, + _os.path.basename(_sys.executable), +- ) ++ ), file=_sys.stderr) + _sys.exit(2) + + count, pickle = 10, None + for key, value in opts: + if key in ("-h", "--help"): +- print >> _sys.stderr, ( ++ print(( + "%s -mbench.main [-c count] [-p file] cssfile ..." % ( + _os.path.basename(_sys.executable), + ) +- ) ++ ), file=_sys.stderr) + _sys.exit(0) + elif key == '-c': + count = int(value) +diff --git a/src/3rdparty/chromium/third_party/catapult/common/py_vulcanize/third_party/rjsmin/bench/write.py b/src/3rdparty/chromium/third_party/catapult/common/py_vulcanize/third_party/rjsmin/bench/write.py +index e91c3d073..df436ea96 100644 +--- a/src/3rdparty/chromium/third_party/catapult/common/py_vulcanize/third_party/rjsmin/bench/write.py ++++ b/src/3rdparty/chromium/third_party/catapult/common/py_vulcanize/third_party/rjsmin/bench/write.py +@@ -47,7 +47,7 @@ import sys as _sys + + + try: +- unicode ++ str + except NameError: + def uni(v): + if hasattr(v, 'decode'): +@@ -55,7 +55,7 @@ except NameError: + return str(v) + else: + def uni(v): +- if isinstance(v, unicode): ++ if isinstance(v, str): + return v.encode('utf-8') + return str(v) + +@@ -141,10 +141,10 @@ def write_table(filename, results): + + # calculate column widths (global for all tables) + for idx, row in enumerate(rows): +- widths[idx] = max(widths[idx], max(map(len, row))) ++ widths[idx] = max(widths[idx], max(list(map(len, row)))) + + # ... and transpose it back. +- tables.append(zip(*rows)) ++ tables.append(list(zip(*rows))) + pythons.append((version, tables)) + + if last_version.startswith('2.'): +@@ -303,20 +303,20 @@ def main(argv=None): + opts, args = _getopt.getopt(argv, "hp:t:", ["help"]) + except getopt.GetoptError: + e = _sys.exc_info()[0](_sys.exc_info()[1]) +- print >> _sys.stderr, "%s\nTry %s -mbench.write --help" % ( ++ print("%s\nTry %s -mbench.write --help" % ( + e, + _os.path.basename(_sys.executable), +- ) ++ ), file=_sys.stderr) + _sys.exit(2) + + plain, table = None, None + for key, value in opts: + if key in ("-h", "--help"): +- print >> _sys.stderr, ( ++ print(( + "%s -mbench.write [-p plain] [-t table] , " ++ ) ++ assert isinstance(soup.style.string, Stylesheet) ++ assert isinstance(soup.script.string, Script) ++ ++ soup = self.soup( ++ "" ++ ) ++ assert isinstance(soup.style.string, Stylesheet) ++ # The contents of the style tag resemble an HTML comment, but ++ # it's not treated as a comment. ++ assert soup.style.string == "" ++ assert isinstance(soup.style.string, Stylesheet) ++ ++ def test_pickle_and_unpickle_identity(self): ++ # Pickling a tree, then unpickling it, yields a tree identical ++ # to the original. ++ tree = self.soup("foo") ++ dumped = pickle.dumps(tree, 2) ++ loaded = pickle.loads(dumped) ++ assert loaded.__class__ == BeautifulSoup ++ assert loaded.decode() == tree.decode() ++ ++ def assertDoctypeHandled(self, doctype_fragment): ++ """Assert that a given doctype string is handled correctly.""" ++ doctype_str, soup = self._document_with_doctype(doctype_fragment) ++ ++ # Make sure a Doctype object was created. ++ doctype = soup.contents[0] ++ assert doctype.__class__ == Doctype ++ assert doctype == doctype_fragment ++ assert soup.encode("utf8")[:len(doctype_str)] == doctype_str ++ ++ # Make sure that the doctype was correctly associated with the ++ # parse tree and that the rest of the document parsed. ++ assert soup.p.contents[0] == 'foo' ++ ++ def _document_with_doctype(self, doctype_fragment, doctype_string="DOCTYPE"): ++ """Generate and parse a document with the given doctype.""" ++ doctype = '' % (doctype_string, doctype_fragment) ++ markup = doctype + '\n

foo

' ++ soup = self.soup(markup) ++ return doctype.encode("utf8"), soup ++ ++ def test_normal_doctypes(self): ++ """Make sure normal, everyday HTML doctypes are handled correctly.""" ++ self.assertDoctypeHandled("html") ++ self.assertDoctypeHandled( ++ 'html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN"') ++ ++ def test_empty_doctype(self): ++ soup = self.soup("") ++ doctype = soup.contents[0] ++ assert "" == doctype.strip() ++ ++ def test_mixed_case_doctype(self): ++ # A lowercase or mixed-case doctype becomes a Doctype. ++ for doctype_fragment in ("doctype", "DocType"): ++ doctype_str, soup = self._document_with_doctype( ++ "html", doctype_fragment ++ ) ++ ++ # Make sure a Doctype object was created and that the DOCTYPE ++ # is uppercase. ++ doctype = soup.contents[0] ++ assert doctype.__class__ == Doctype ++ assert doctype == "html" ++ assert soup.encode("utf8")[:len(doctype_str)] == b"" ++ ++ # Make sure that the doctype was correctly associated with the ++ # parse tree and that the rest of the document parsed. ++ assert soup.p.contents[0] == 'foo' ++ ++ def test_public_doctype_with_url(self): ++ doctype = 'html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd"' ++ self.assertDoctypeHandled(doctype) ++ ++ def test_system_doctype(self): ++ self.assertDoctypeHandled('foo SYSTEM "http://www.example.com/"') ++ ++ def test_namespaced_system_doctype(self): ++ # We can handle a namespaced doctype with a system ID. ++ self.assertDoctypeHandled('xsl:stylesheet SYSTEM "htmlent.dtd"') ++ ++ def test_namespaced_public_doctype(self): ++ # Test a namespaced doctype with a public id. ++ self.assertDoctypeHandled('xsl:stylesheet PUBLIC "htmlent.dtd"') ++ ++ def test_real_xhtml_document(self): ++ """A real XHTML document should come out more or less the same as it went in.""" ++ markup = b""" ++ ++ ++Hello. ++Goodbye. ++""" ++ with warnings.catch_warnings(record=True) as w: ++ soup = self.soup(markup) ++ assert soup.encode("utf-8").replace(b"\n", b"") == markup.replace(b"\n", b"") ++ ++ # No warning was issued about parsing an XML document as HTML, ++ # because XHTML is both. ++ assert w == [] ++ ++ ++ def test_namespaced_html(self): ++ # When a namespaced XML document is parsed as HTML it should ++ # be treated as HTML with weird tag names. ++ markup = b"""content""" ++ with warnings.catch_warnings(record=True) as w: ++ soup = self.soup(markup) ++ ++ assert 2 == len(soup.find_all("ns1:foo")) ++ ++ # n.b. no "you're parsing XML as HTML" warning was given ++ # because there was no XML declaration. ++ assert [] == w ++ ++ def test_detect_xml_parsed_as_html(self): ++ # A warning is issued when parsing an XML document as HTML, ++ # but basic stuff should still work. ++ markup = b"""string""" ++ with warnings.catch_warnings(record=True) as w: ++ soup = self.soup(markup) ++ assert soup.tag.string == 'string' ++ [warning] = w ++ assert isinstance(warning.message, XMLParsedAsHTMLWarning) ++ assert str(warning.message) == XMLParsedAsHTMLWarning.MESSAGE ++ ++ # NOTE: the warning is not issued if the document appears to ++ # be XHTML (tested with test_real_xhtml_document in the ++ # superclass) or if there is no XML declaration (tested with ++ # test_namespaced_html in the superclass). ++ ++ def test_processing_instruction(self): ++ # We test both Unicode and bytestring to verify that ++ # process_markup correctly sets processing_instruction_class ++ # even when the markup is already Unicode and there is no ++ # need to process anything. ++ markup = """""" ++ soup = self.soup(markup) ++ assert markup == soup.decode() ++ ++ markup = b"""""" ++ soup = self.soup(markup) ++ assert markup == soup.encode("utf8") ++ ++ def test_deepcopy(self): ++ """Make sure you can copy the tree builder. ++ ++ This is important because the builder is part of a ++ BeautifulSoup object, and we want to be able to copy that. ++ """ ++ copy.deepcopy(self.default_builder) ++ ++ def test_p_tag_is_never_empty_element(self): ++ """A

tag is never designated as an empty-element tag. ++ ++ Even if the markup shows it as an empty-element tag, it ++ shouldn't be presented that way. ++ """ ++ soup = self.soup("

") ++ assert not soup.p.is_empty_element ++ assert str(soup.p) == "

" ++ ++ def test_unclosed_tags_get_closed(self): ++ """A tag that's not closed by the end of the document should be closed. ++ ++ This applies to all tags except empty-element tags. ++ """ ++ self.assert_soup("

", "

") ++ self.assert_soup("", "") ++ ++ self.assert_soup("
", "
") ++ ++ def test_br_is_always_empty_element_tag(self): ++ """A
tag is designated as an empty-element tag. ++ ++ Some parsers treat

as one
tag, some parsers as ++ two tags, but it should always be an empty-element tag. ++ """ ++ soup = self.soup("

") ++ assert soup.br.is_empty_element ++ assert str(soup.br) == "
" ++ ++ def test_nested_formatting_elements(self): ++ self.assert_soup("") ++ ++ def test_double_head(self): ++ html = ''' ++ ++ ++Ordinary HEAD element test ++ ++ ++ ++Hello, world! ++ ++ ++''' ++ soup = self.soup(html) ++ assert "text/javascript" == soup.find('script')['type'] ++ ++ def test_comment(self): ++ # Comments are represented as Comment objects. ++ markup = "

foobaz

" ++ self.assert_soup(markup) ++ ++ soup = self.soup(markup) ++ comment = soup.find(string="foobar") ++ assert comment.__class__ == Comment ++ ++ # The comment is properly integrated into the tree. ++ foo = soup.find(string="foo") ++ assert comment == foo.next_element ++ baz = soup.find(string="baz") ++ assert comment == baz.previous_element ++ ++ def test_preserved_whitespace_in_pre_and_textarea(self): ++ """Whitespace must be preserved in
 and "
++        self.assert_soup(pre_markup)
++        self.assert_soup(textarea_markup)
++
++        soup = self.soup(pre_markup)
++        assert soup.pre.prettify() == pre_markup
++
++        soup = self.soup(textarea_markup)
++        assert soup.textarea.prettify() == textarea_markup
++
++        soup = self.soup("")
++        assert soup.textarea.prettify() == ""
++
++    def test_nested_inline_elements(self):
++        """Inline elements can be nested indefinitely."""
++        b_tag = "Inside a B tag"
++        self.assert_soup(b_tag)
++
++        nested_b_tag = "

A nested tag

" ++ self.assert_soup(nested_b_tag) ++ ++ double_nested_b_tag = "

A doubly nested tag

" ++ self.assert_soup(nested_b_tag) ++ ++ def test_nested_block_level_elements(self): ++ """Block elements can be nested.""" ++ soup = self.soup('

Foo

') ++ blockquote = soup.blockquote ++ assert blockquote.p.b.string == 'Foo' ++ assert blockquote.b.string == 'Foo' ++ ++ def test_correctly_nested_tables(self): ++ """One table can go inside another one.""" ++ markup = ('' ++ '' ++ "') ++ ++ self.assert_soup( ++ markup, ++ '
Here's another table:" ++ '' ++ '' ++ '
foo
Here\'s another table:' ++ '
foo
' ++ '
') ++ ++ self.assert_soup( ++ "" ++ "" ++ "
Foo
Bar
Baz
") ++ ++ def test_multivalued_attribute_with_whitespace(self): ++ # Whitespace separating the values of a multi-valued attribute ++ # should be ignored. ++ ++ markup = '
' ++ soup = self.soup(markup) ++ assert ['foo', 'bar'] == soup.div['class'] ++ ++ # If you search by the literal name of the class it's like the whitespace ++ # wasn't there. ++ assert soup.div == soup.find('div', class_="foo bar") ++ ++ def test_deeply_nested_multivalued_attribute(self): ++ # html5lib can set the attributes of the same tag many times ++ # as it rearranges the tree. This has caused problems with ++ # multivalued attributes. ++ markup = '
' ++ soup = self.soup(markup) ++ assert ["css"] == soup.div.div['class'] ++ ++ def test_multivalued_attribute_on_html(self): ++ # html5lib uses a different API to set the attributes ot the ++ # tag. This has caused problems with multivalued ++ # attributes. ++ markup = '' ++ soup = self.soup(markup) ++ assert ["a", "b"] == soup.html['class'] ++ ++ def test_angle_brackets_in_attribute_values_are_escaped(self): ++ self.assert_soup('', '') ++ ++ def test_strings_resembling_character_entity_references(self): ++ # "&T" and "&p" look like incomplete character entities, but they are ++ # not. ++ self.assert_soup( ++ "

• AT&T is in the s&p 500

", ++ "

\u2022 AT&T is in the s&p 500

" ++ ) ++ ++ def test_apos_entity(self): ++ self.assert_soup( ++ "

Bob's Bar

", ++ "

Bob's Bar

", ++ ) ++ ++ def test_entities_in_foreign_document_encoding(self): ++ # “ and ” are invalid numeric entities referencing ++ # Windows-1252 characters. - references a character common ++ # to Windows-1252 and Unicode, and ☃ references a ++ # character only found in Unicode. ++ # ++ # All of these entities should be converted to Unicode ++ # characters. ++ markup = "

“Hello” -☃

" ++ soup = self.soup(markup) ++ assert "“Hello” -☃" == soup.p.string ++ ++ def test_entities_in_attributes_converted_to_unicode(self): ++ expect = '

' ++ self.assert_soup('

', expect) ++ self.assert_soup('

', expect) ++ self.assert_soup('

', expect) ++ self.assert_soup('

', expect) ++ ++ def test_entities_in_text_converted_to_unicode(self): ++ expect = '

pi\N{LATIN SMALL LETTER N WITH TILDE}ata

' ++ self.assert_soup("

piñata

", expect) ++ self.assert_soup("

piñata

", expect) ++ self.assert_soup("

piñata

", expect) ++ self.assert_soup("

piñata

", expect) ++ ++ def test_quot_entity_converted_to_quotation_mark(self): ++ self.assert_soup("

I said "good day!"

", ++ '

I said "good day!"

') ++ ++ def test_out_of_range_entity(self): ++ expect = "\N{REPLACEMENT CHARACTER}" ++ self.assert_soup("�", expect) ++ self.assert_soup("�", expect) ++ self.assert_soup("�", expect) ++ ++ def test_multipart_strings(self): ++ "Mostly to prevent a recurrence of a bug in the html5lib treebuilder." ++ soup = self.soup("

\nfoo

") ++ assert "p" == soup.h2.string.next_element.name ++ assert "p" == soup.p.name ++ self.assertConnectedness(soup) ++ ++ def test_empty_element_tags(self): ++ """Verify consistent handling of empty-element tags, ++ no matter how they come in through the markup. ++ """ ++ self.assert_soup('


', "


") ++ self.assert_soup('


', "


") ++ ++ def test_head_tag_between_head_and_body(self): ++ "Prevent recurrence of a bug in the html5lib treebuilder." ++ content = """ ++ ++ foo ++ ++""" ++ soup = self.soup(content) ++ assert soup.html.body is not None ++ self.assertConnectedness(soup) ++ ++ def test_multiple_copies_of_a_tag(self): ++ "Prevent recurrence of a bug in the html5lib treebuilder." ++ content = """ ++ ++ ++ ++ ++ ++""" ++ soup = self.soup(content) ++ self.assertConnectedness(soup.article) ++ ++ def test_basic_namespaces(self): ++ """Parsers don't need to *understand* namespaces, but at the ++ very least they should not choke on namespaces or lose ++ data.""" ++ ++ markup = b'4' ++ soup = self.soup(markup) ++ assert markup == soup.encode() ++ html = soup.html ++ assert 'http://www.w3.org/1999/xhtml' == soup.html['xmlns'] ++ assert 'http://www.w3.org/1998/Math/MathML' == soup.html['xmlns:mathml'] ++ assert 'http://www.w3.org/2000/svg' == soup.html['xmlns:svg'] ++ ++ def test_multivalued_attribute_value_becomes_list(self): ++ markup = b'' ++ soup = self.soup(markup) ++ assert ['foo', 'bar'] == soup.a['class'] ++ ++ # ++ # Generally speaking, tests below this point are more tests of ++ # Beautiful Soup than tests of the tree builders. But parsers are ++ # weird, so we run these tests separately for every tree builder ++ # to detect any differences between them. ++ # ++ ++ def test_can_parse_unicode_document(self): ++ # A seemingly innocuous document... but it's in Unicode! And ++ # it contains characters that can't be represented in the ++ # encoding found in the declaration! The horror! ++ markup = 'Sacr\N{LATIN SMALL LETTER E WITH ACUTE} bleu!' ++ soup = self.soup(markup) ++ assert 'Sacr\xe9 bleu!' == soup.body.string ++ ++ def test_soupstrainer(self): ++ """Parsers should be able to work with SoupStrainers.""" ++ strainer = SoupStrainer("b") ++ soup = self.soup("A bold statement", ++ parse_only=strainer) ++ assert soup.decode() == "bold" ++ ++ def test_single_quote_attribute_values_become_double_quotes(self): ++ self.assert_soup("", ++ '') ++ ++ def test_attribute_values_with_nested_quotes_are_left_alone(self): ++ text = """a""" ++ self.assert_soup(text) ++ ++ def test_attribute_values_with_double_nested_quotes_get_quoted(self): ++ text = """a""" ++ soup = self.soup(text) ++ soup.foo['attr'] = 'Brawls happen at "Bob\'s Bar"' ++ self.assert_soup( ++ soup.foo.decode(), ++ """a""") ++ ++ def test_ampersand_in_attribute_value_gets_escaped(self): ++ self.assert_soup('', ++ '') ++ ++ self.assert_soup( ++ 'foo', ++ 'foo') ++ ++ def test_escaped_ampersand_in_attribute_value_is_left_alone(self): ++ self.assert_soup('') ++ ++ def test_entities_in_strings_converted_during_parsing(self): ++ # Both XML and HTML entities are converted to Unicode characters ++ # during parsing. ++ text = "

<<sacré bleu!>>

" ++ expected = "

<<sacr\N{LATIN SMALL LETTER E WITH ACUTE} bleu!>>

" ++ self.assert_soup(text, expected) ++ ++ def test_smart_quotes_converted_on_the_way_in(self): ++ # Microsoft smart quotes are converted to Unicode characters during ++ # parsing. ++ quote = b"

\x91Foo\x92

" ++ soup = self.soup(quote) ++ assert soup.p.string == "\N{LEFT SINGLE QUOTATION MARK}Foo\N{RIGHT SINGLE QUOTATION MARK}" ++ ++ def test_non_breaking_spaces_converted_on_the_way_in(self): ++ soup = self.soup("  ") ++ assert soup.a.string == "\N{NO-BREAK SPACE}" * 2 ++ ++ def test_entities_converted_on_the_way_out(self): ++ text = "

<<sacré bleu!>>

" ++ expected = "

<<sacr\N{LATIN SMALL LETTER E WITH ACUTE} bleu!>>

".encode("utf-8") ++ soup = self.soup(text) ++ assert soup.p.encode("utf-8") == expected ++ ++ def test_real_iso_latin_document(self): ++ # Smoke test of interrelated functionality, using an ++ # easy-to-understand document. ++ ++ # Here it is in Unicode. Note that it claims to be in ISO-Latin-1. ++ unicode_html = '

Sacr\N{LATIN SMALL LETTER E WITH ACUTE} bleu!

' ++ ++ # That's because we're going to encode it into ISO-Latin-1, and use ++ # that to test. ++ iso_latin_html = unicode_html.encode("iso-8859-1") ++ ++ # Parse the ISO-Latin-1 HTML. ++ soup = self.soup(iso_latin_html) ++ # Encode it to UTF-8. ++ result = soup.encode("utf-8") ++ ++ # What do we expect the result to look like? Well, it would ++ # look like unicode_html, except that the META tag would say ++ # UTF-8 instead of ISO-Latin-1. ++ expected = unicode_html.replace("ISO-Latin-1", "utf-8") ++ ++ # And, of course, it would be in UTF-8, not Unicode. ++ expected = expected.encode("utf-8") ++ ++ # Ta-da! ++ assert result == expected ++ ++ def test_real_shift_jis_document(self): ++ # Smoke test to make sure the parser can handle a document in ++ # Shift-JIS encoding, without choking. ++ shift_jis_html = ( ++ b'
'
++            b'\x82\xb1\x82\xea\x82\xcdShift-JIS\x82\xc5\x83R\x81[\x83f'
++            b'\x83B\x83\x93\x83O\x82\xb3\x82\xea\x82\xbd\x93\xfa\x96{\x8c'
++            b'\xea\x82\xcc\x83t\x83@\x83C\x83\x8b\x82\xc5\x82\xb7\x81B'
++            b'
') ++ unicode_html = shift_jis_html.decode("shift-jis") ++ soup = self.soup(unicode_html) ++ ++ # Make sure the parse tree is correctly encoded to various ++ # encodings. ++ assert soup.encode("utf-8") == unicode_html.encode("utf-8") ++ assert soup.encode("euc_jp") == unicode_html.encode("euc_jp") ++ ++ def test_real_hebrew_document(self): ++ # A real-world test to make sure we can convert ISO-8859-9 (a ++ # Hebrew encoding) to UTF-8. ++ hebrew_document = b'Hebrew (ISO 8859-8) in Visual Directionality

Hebrew (ISO 8859-8) in Visual Directionality

\xed\xe5\xec\xf9' ++ soup = self.soup( ++ hebrew_document, from_encoding="iso8859-8") ++ # Some tree builders call it iso8859-8, others call it iso-8859-9. ++ # That's not a difference we really care about. ++ assert soup.original_encoding in ('iso8859-8', 'iso-8859-8') ++ assert soup.encode('utf-8') == ( ++ hebrew_document.decode("iso8859-8").encode("utf-8") ++ ) ++ ++ def test_meta_tag_reflects_current_encoding(self): ++ # Here's the tag saying that a document is ++ # encoded in Shift-JIS. ++ meta_tag = ('') ++ ++ # Here's a document incorporating that meta tag. ++ shift_jis_html = ( ++ '\n%s\n' ++ '' ++ 'Shift-JIS markup goes here.') % meta_tag ++ soup = self.soup(shift_jis_html) ++ ++ # Parse the document, and the charset is seemingly unaffected. ++ parsed_meta = soup.find('meta', {'http-equiv': 'Content-type'}) ++ content = parsed_meta['content'] ++ assert 'text/html; charset=x-sjis' == content ++ ++ # But that value is actually a ContentMetaAttributeValue object. ++ assert isinstance(content, ContentMetaAttributeValue) ++ ++ # And it will take on a value that reflects its current ++ # encoding. ++ assert 'text/html; charset=utf8' == content.encode("utf8") ++ ++ # For the rest of the story, see TestSubstitutions in ++ # test_tree.py. ++ ++ def test_html5_style_meta_tag_reflects_current_encoding(self): ++ # Here's the tag saying that a document is ++ # encoded in Shift-JIS. ++ meta_tag = ('') ++ ++ # Here's a document incorporating that meta tag. ++ shift_jis_html = ( ++ '\n%s\n' ++ '' ++ 'Shift-JIS markup goes here.') % meta_tag ++ soup = self.soup(shift_jis_html) ++ ++ # Parse the document, and the charset is seemingly unaffected. ++ parsed_meta = soup.find('meta', id="encoding") ++ charset = parsed_meta['charset'] ++ assert 'x-sjis' == charset ++ ++ # But that value is actually a CharsetMetaAttributeValue object. ++ assert isinstance(charset, CharsetMetaAttributeValue) ++ ++ # And it will take on a value that reflects its current ++ # encoding. ++ assert 'utf8' == charset.encode("utf8") ++ ++ def test_python_specific_encodings_not_used_in_charset(self): ++ # You can encode an HTML document using a Python-specific ++ # encoding, but that encoding won't be mentioned _inside_ the ++ # resulting document. Instead, the document will appear to ++ # have no encoding. ++ for markup in [ ++ b'' ++ b'' ++ ]: ++ soup = self.soup(markup) ++ for encoding in PYTHON_SPECIFIC_ENCODINGS: ++ if encoding in ( ++ 'idna', 'mbcs', 'oem', 'undefined', ++ 'string_escape', 'string-escape' ++ ): ++ # For one reason or another, these will raise an ++ # exception if we actually try to use them, so don't ++ # bother. ++ continue ++ encoded = soup.encode(encoding) ++ assert b'meta charset=""' in encoded ++ assert encoding.encode("ascii") not in encoded ++ ++ def test_tag_with_no_attributes_can_have_attributes_added(self): ++ data = self.soup("text") ++ data.a['foo'] = 'bar' ++ assert 'text' == data.a.decode() ++ ++ def test_closing_tag_with_no_opening_tag(self): ++ # Without BeautifulSoup.open_tag_counter, the tag will ++ # cause _popToTag to be called over and over again as we look ++ # for a tag that wasn't there. The result is that 'text2' ++ # will show up outside the body of the document. ++ soup = self.soup("

text1

text2
") ++ assert "

text1

text2
" == soup.body.decode() ++ ++ def test_worst_case(self): ++ """Test the worst case (currently) for linking issues.""" ++ ++ soup = self.soup(BAD_DOCUMENT) ++ self.linkage_validator(soup) ++ ++ ++class XMLTreeBuilderSmokeTest(TreeBuilderSmokeTest): ++ ++ def test_pickle_and_unpickle_identity(self): ++ # Pickling a tree, then unpickling it, yields a tree identical ++ # to the original. ++ tree = self.soup("foo") ++ dumped = pickle.dumps(tree, 2) ++ loaded = pickle.loads(dumped) ++ assert loaded.__class__ == BeautifulSoup ++ assert loaded.decode() == tree.decode() ++ ++ def test_docstring_generated(self): ++ soup = self.soup("") ++ assert soup.encode() == b'\n' ++ ++ def test_xml_declaration(self): ++ markup = b"""\n""" ++ soup = self.soup(markup) ++ assert markup == soup.encode("utf8") ++ ++ def test_python_specific_encodings_not_used_in_xml_declaration(self): ++ # You can encode an XML document using a Python-specific ++ # encoding, but that encoding won't be mentioned _inside_ the ++ # resulting document. ++ markup = b"""\n""" ++ soup = self.soup(markup) ++ for encoding in PYTHON_SPECIFIC_ENCODINGS: ++ if encoding in ( ++ 'idna', 'mbcs', 'oem', 'undefined', ++ 'string_escape', 'string-escape' ++ ): ++ # For one reason or another, these will raise an ++ # exception if we actually try to use them, so don't ++ # bother. ++ continue ++ encoded = soup.encode(encoding) ++ assert b'' in encoded ++ assert encoding.encode("ascii") not in encoded ++ ++ def test_processing_instruction(self): ++ markup = b"""\n""" ++ soup = self.soup(markup) ++ assert markup == soup.encode("utf8") ++ ++ def test_real_xhtml_document(self): ++ """A real XHTML document should come out *exactly* the same as it went in.""" ++ markup = b""" ++ ++ ++Hello. ++Goodbye. ++""" ++ soup = self.soup(markup) ++ assert soup.encode("utf-8") == markup ++ ++ def test_nested_namespaces(self): ++ doc = b""" ++ ++ ++ ++ ++ ++""" ++ soup = self.soup(doc) ++ assert doc == soup.encode() ++ ++ def test_formatter_processes_script_tag_for_xml_documents(self): ++ doc = """ ++ ++""" ++ soup = BeautifulSoup(doc, "lxml-xml") ++ # lxml would have stripped this while parsing, but we can add ++ # it later. ++ soup.script.string = 'console.log("< < hey > > ");' ++ encoded = soup.encode() ++ assert b"< < hey > >" in encoded ++ ++ def test_can_parse_unicode_document(self): ++ markup = 'Sacr\N{LATIN SMALL LETTER E WITH ACUTE} bleu!' ++ soup = self.soup(markup) ++ assert 'Sacr\xe9 bleu!' == soup.root.string ++ ++ def test_can_parse_unicode_document_begining_with_bom(self): ++ markup = '\N{BYTE ORDER MARK}Sacr\N{LATIN SMALL LETTER E WITH ACUTE} bleu!' ++ soup = self.soup(markup) ++ assert 'Sacr\xe9 bleu!' == soup.root.string ++ ++ def test_popping_namespaced_tag(self): ++ markup = 'b2012-07-02T20:33:42Zcd' ++ soup = self.soup(markup) ++ assert str(soup.rss) == markup ++ ++ def test_docstring_includes_correct_encoding(self): ++ soup = self.soup("") ++ assert soup.encode("latin1") == b'\n' ++ ++ def test_large_xml_document(self): ++ """A large XML document should come out the same as it went in.""" ++ markup = (b'\n' ++ + b'0' * (2**12) ++ + b'') ++ soup = self.soup(markup) ++ assert soup.encode("utf-8") == markup ++ ++ def test_tags_are_empty_element_if_and_only_if_they_are_empty(self): ++ self.assert_soup("

", "

") ++ self.assert_soup("

foo

") ++ ++ def test_namespaces_are_preserved(self): ++ markup = 'This tag is in the a namespaceThis tag is in the b namespace' ++ soup = self.soup(markup) ++ root = soup.root ++ assert "http://example.com/" == root['xmlns:a'] ++ assert "http://example.net/" == root['xmlns:b'] ++ ++ def test_closing_namespaced_tag(self): ++ markup = '

20010504

' ++ soup = self.soup(markup) ++ assert str(soup.p) == markup ++ ++ def test_namespaced_attributes(self): ++ markup = '' ++ soup = self.soup(markup) ++ assert str(soup.foo) == markup ++ ++ def test_namespaced_attributes_xml_namespace(self): ++ markup = 'bar' ++ soup = self.soup(markup) ++ assert str(soup.foo) == markup ++ ++ def test_find_by_prefixed_name(self): ++ doc = """ ++foo ++ bar ++ baz ++ ++""" ++ soup = self.soup(doc) ++ ++ # There are three tags. ++ assert 3 == len(soup.find_all('tag')) ++ ++ # But two of them are ns1:tag and one of them is ns2:tag. ++ assert 2 == len(soup.find_all('ns1:tag')) ++ assert 1 == len(soup.find_all('ns2:tag')) ++ ++ assert 1, len(soup.find_all('ns2:tag', key='value')) ++ assert 3, len(soup.find_all(['ns1:tag', 'ns2:tag'])) ++ ++ def test_copy_tag_preserves_namespace(self): ++ xml = """ ++""" ++ ++ soup = self.soup(xml) ++ tag = soup.document ++ duplicate = copy.copy(tag) ++ ++ # The two tags have the same namespace prefix. ++ assert tag.prefix == duplicate.prefix ++ ++ def test_worst_case(self): ++ """Test the worst case (currently) for linking issues.""" ++ ++ soup = self.soup(BAD_DOCUMENT) ++ self.linkage_validator(soup) ++ ++ ++class HTML5TreeBuilderSmokeTest(HTMLTreeBuilderSmokeTest): ++ """Smoke test for a tree builder that supports HTML5.""" ++ ++ def test_real_xhtml_document(self): ++ # Since XHTML is not HTML5, HTML5 parsers are not tested to handle ++ # XHTML documents in any particular way. ++ pass ++ ++ def test_html_tags_have_namespace(self): ++ markup = "" ++ soup = self.soup(markup) ++ assert "http://www.w3.org/1999/xhtml" == soup.a.namespace ++ ++ def test_svg_tags_have_namespace(self): ++ markup = '' ++ soup = self.soup(markup) ++ namespace = "http://www.w3.org/2000/svg" ++ assert namespace == soup.svg.namespace ++ assert namespace == soup.circle.namespace ++ ++ ++ def test_mathml_tags_have_namespace(self): ++ markup = '5' ++ soup = self.soup(markup) ++ namespace = 'http://www.w3.org/1998/Math/MathML' ++ assert namespace == soup.math.namespace ++ assert namespace == soup.msqrt.namespace ++ ++ def test_xml_declaration_becomes_comment(self): ++ markup = '' ++ soup = self.soup(markup) ++ assert isinstance(soup.contents[0], Comment) ++ assert soup.contents[0] == '?xml version="1.0" encoding="utf-8"?' ++ assert "html" == soup.contents[0].next_element.name ++ ++def skipIf(condition, reason): ++ def nothing(test, *args, **kwargs): ++ return None ++ ++ def decorator(test_item): ++ if condition: ++ return nothing ++ else: ++ return test_item ++ ++ return decorator +diff --git a/src/3rdparty/chromium/third_party/catapult/third_party/beautifulsoup4/bs4/tests/test_builder.py b/src/3rdparty/chromium/third_party/catapult/third_party/beautifulsoup4/bs4/tests/test_builder.py +new file mode 100644 +index 000000000..75370712a +--- /dev/null ++++ b/src/3rdparty/chromium/third_party/catapult/third_party/beautifulsoup4/bs4/tests/test_builder.py +@@ -0,0 +1,29 @@ ++import pytest ++from unittest.mock import patch ++from bs4.builder import DetectsXMLParsedAsHTML ++ ++class TestDetectsXMLParsedAsHTML(object): ++ ++ @pytest.mark.parametrize( ++ "markup,looks_like_xml", ++ [("No xml declaration", False), ++ ("obviously HTMLActually XHTML", False), ++ (" < html>Tricky XHTML", False), ++ ("", True), ++ ] ++ ) ++ def test_warn_if_markup_looks_like_xml(self, markup, looks_like_xml): ++ # Test of our ability to guess at whether markup looks XML-ish ++ # _and_ not HTML-ish. ++ with patch('bs4.builder.DetectsXMLParsedAsHTML._warn') as mock: ++ for data in markup, markup.encode('utf8'): ++ result = DetectsXMLParsedAsHTML.warn_if_markup_looks_like_xml( ++ data ++ ) ++ assert result == looks_like_xml ++ if looks_like_xml: ++ assert mock.called ++ else: ++ assert not mock.called ++ mock.reset_mock() +diff --git a/src/3rdparty/chromium/third_party/catapult/third_party/beautifulsoup4/bs4/tests/test_builder_registry.py b/src/3rdparty/chromium/third_party/catapult/third_party/beautifulsoup4/bs4/tests/test_builder_registry.py +index 92ad10fb0..5fa874c80 100644 +--- a/src/3rdparty/chromium/third_party/catapult/third_party/beautifulsoup4/bs4/tests/test_builder_registry.py ++++ b/src/3rdparty/chromium/third_party/catapult/third_party/beautifulsoup4/bs4/tests/test_builder_registry.py +@@ -1,6 +1,7 @@ + """Tests of the builder registry.""" + +-import unittest ++import pytest ++import warnings + + from bs4 import BeautifulSoup + from bs4.builder import ( +@@ -25,62 +26,58 @@ except ImportError: + LXML_PRESENT = False + + +-class BuiltInRegistryTest(unittest.TestCase): ++class TestBuiltInRegistry(object): + """Test the built-in registry with the default builders registered.""" + + def test_combination(self): ++ assert registry.lookup('strict', 'html') == HTMLParserTreeBuilder + if LXML_PRESENT: +- self.assertEqual(registry.lookup('fast', 'html'), +- LXMLTreeBuilder) +- +- if LXML_PRESENT: +- self.assertEqual(registry.lookup('permissive', 'xml'), +- LXMLTreeBuilderForXML) +- self.assertEqual(registry.lookup('strict', 'html'), +- HTMLParserTreeBuilder) ++ assert registry.lookup('fast', 'html') == LXMLTreeBuilder ++ assert registry.lookup('permissive', 'xml') == LXMLTreeBuilderForXML + if HTML5LIB_PRESENT: +- self.assertEqual(registry.lookup('html5lib', 'html'), +- HTML5TreeBuilder) ++ assert registry.lookup('html5lib', 'html') == HTML5TreeBuilder + + def test_lookup_by_markup_type(self): + if LXML_PRESENT: +- self.assertEqual(registry.lookup('html'), LXMLTreeBuilder) +- self.assertEqual(registry.lookup('xml'), LXMLTreeBuilderForXML) ++ assert registry.lookup('html') == LXMLTreeBuilder ++ assert registry.lookup('xml') == LXMLTreeBuilderForXML + else: +- self.assertEqual(registry.lookup('xml'), None) ++ assert registry.lookup('xml') == None + if HTML5LIB_PRESENT: +- self.assertEqual(registry.lookup('html'), HTML5TreeBuilder) ++ assert registry.lookup('html') == HTML5TreeBuilder + else: +- self.assertEqual(registry.lookup('html'), HTMLParserTreeBuilder) ++ assert registry.lookup('html') == HTMLParserTreeBuilder + + def test_named_library(self): + if LXML_PRESENT: +- self.assertEqual(registry.lookup('lxml', 'xml'), +- LXMLTreeBuilderForXML) +- self.assertEqual(registry.lookup('lxml', 'html'), +- LXMLTreeBuilder) ++ assert registry.lookup('lxml', 'xml') == LXMLTreeBuilderForXML ++ assert registry.lookup('lxml', 'html') == LXMLTreeBuilder + if HTML5LIB_PRESENT: +- self.assertEqual(registry.lookup('html5lib'), +- HTML5TreeBuilder) ++ assert registry.lookup('html5lib') == HTML5TreeBuilder + +- self.assertEqual(registry.lookup('html.parser'), +- HTMLParserTreeBuilder) ++ assert registry.lookup('html.parser') == HTMLParserTreeBuilder + + def test_beautifulsoup_constructor_does_lookup(self): +- # You can pass in a string. +- BeautifulSoup("", features="html") +- # Or a list of strings. +- BeautifulSoup("", features=["html", "fast"]) + ++ with warnings.catch_warnings(record=True) as w: ++ # This will create a warning about not explicitly ++ # specifying a parser, but we'll ignore it. ++ ++ # You can pass in a string. ++ BeautifulSoup("", features="html") ++ # Or a list of strings. ++ BeautifulSoup("", features=["html", "fast"]) ++ pass ++ + # You'll get an exception if BS can't find an appropriate + # builder. +- self.assertRaises(ValueError, BeautifulSoup, +- "", features="no-such-feature") ++ with pytest.raises(ValueError): ++ BeautifulSoup("", features="no-such-feature") + +-class RegistryTest(unittest.TestCase): ++class TestRegistry(object): + """Test the TreeBuilderRegistry class in general.""" + +- def setUp(self): ++ def setup_method(self): + self.registry = TreeBuilderRegistry() + + def builder_for_features(self, *feature_list): +@@ -95,28 +92,28 @@ class RegistryTest(unittest.TestCase): + + # Since the builder advertises no features, you can't find it + # by looking up features. +- self.assertEqual(self.registry.lookup('foo'), None) ++ assert self.registry.lookup('foo') is None + + # But you can find it by doing a lookup with no features, if + # this happens to be the only registered builder. +- self.assertEqual(self.registry.lookup(), builder) ++ assert self.registry.lookup() == builder + + def test_register_with_features_makes_lookup_succeed(self): + builder = self.builder_for_features('foo', 'bar') +- self.assertEqual(self.registry.lookup('foo'), builder) +- self.assertEqual(self.registry.lookup('bar'), builder) ++ assert self.registry.lookup('foo') is builder ++ assert self.registry.lookup('bar') is builder + + def test_lookup_fails_when_no_builder_implements_feature(self): + builder = self.builder_for_features('foo', 'bar') +- self.assertEqual(self.registry.lookup('baz'), None) ++ assert self.registry.lookup('baz') is None + + def test_lookup_gets_most_recent_registration_when_no_feature_specified(self): + builder1 = self.builder_for_features('foo') + builder2 = self.builder_for_features('bar') +- self.assertEqual(self.registry.lookup(), builder2) ++ assert self.registry.lookup() == builder2 + + def test_lookup_fails_when_no_tree_builders_registered(self): +- self.assertEqual(self.registry.lookup(), None) ++ assert self.registry.lookup() is None + + def test_lookup_gets_most_recent_builder_supporting_all_features(self): + has_one = self.builder_for_features('foo') +@@ -128,14 +125,12 @@ class RegistryTest(unittest.TestCase): + + # There are two builders featuring 'foo' and 'bar', but + # the one that also features 'quux' was registered later. +- self.assertEqual(self.registry.lookup('foo', 'bar'), +- has_both_late) ++ assert self.registry.lookup('foo', 'bar') == has_both_late + + # There is only one builder featuring 'foo', 'bar', and 'baz'. +- self.assertEqual(self.registry.lookup('foo', 'bar', 'baz'), +- has_both_early) ++ assert self.registry.lookup('foo', 'bar', 'baz') == has_both_early + + def test_lookup_fails_when_cannot_reconcile_requested_features(self): + builder1 = self.builder_for_features('foo', 'bar') + builder2 = self.builder_for_features('foo', 'baz') +- self.assertEqual(self.registry.lookup('bar', 'baz'), None) ++ assert self.registry.lookup('bar', 'baz') is None +diff --git a/src/3rdparty/chromium/third_party/catapult/third_party/beautifulsoup4/bs4/tests/test_dammit.py b/src/3rdparty/chromium/third_party/catapult/third_party/beautifulsoup4/bs4/tests/test_dammit.py +new file mode 100644 +index 000000000..9971234e3 +--- /dev/null ++++ b/src/3rdparty/chromium/third_party/catapult/third_party/beautifulsoup4/bs4/tests/test_dammit.py +@@ -0,0 +1,371 @@ ++# encoding: utf-8 ++import pytest ++import logging ++import bs4 ++from bs4 import BeautifulSoup ++from bs4.dammit import ( ++ EntitySubstitution, ++ EncodingDetector, ++ UnicodeDammit, ++) ++ ++class TestUnicodeDammit(object): ++ """Standalone tests of UnicodeDammit.""" ++ ++ def test_unicode_input(self): ++ markup = "I'm already Unicode! \N{SNOWMAN}" ++ dammit = UnicodeDammit(markup) ++ assert dammit.unicode_markup == markup ++ ++ def test_smart_quotes_to_unicode(self): ++ markup = b"\x91\x92\x93\x94" ++ dammit = UnicodeDammit(markup) ++ assert dammit.unicode_markup == "\u2018\u2019\u201c\u201d" ++ ++ def test_smart_quotes_to_xml_entities(self): ++ markup = b"\x91\x92\x93\x94" ++ dammit = UnicodeDammit(markup, smart_quotes_to="xml") ++ assert dammit.unicode_markup == "‘’“”" ++ ++ def test_smart_quotes_to_html_entities(self): ++ markup = b"\x91\x92\x93\x94" ++ dammit = UnicodeDammit(markup, smart_quotes_to="html") ++ assert dammit.unicode_markup == "‘’“”" ++ ++ def test_smart_quotes_to_ascii(self): ++ markup = b"\x91\x92\x93\x94" ++ dammit = UnicodeDammit(markup, smart_quotes_to="ascii") ++ assert dammit.unicode_markup == """''""""" ++ ++ def test_detect_utf8(self): ++ utf8 = b"Sacr\xc3\xa9 bleu! \xe2\x98\x83" ++ dammit = UnicodeDammit(utf8) ++ assert dammit.original_encoding.lower() == 'utf-8' ++ assert dammit.unicode_markup == 'Sacr\xe9 bleu! \N{SNOWMAN}' ++ ++ def test_convert_hebrew(self): ++ hebrew = b"\xed\xe5\xec\xf9" ++ dammit = UnicodeDammit(hebrew, ["iso-8859-8"]) ++ assert dammit.original_encoding.lower() == 'iso-8859-8' ++ assert dammit.unicode_markup == '\u05dd\u05d5\u05dc\u05e9' ++ ++ def test_dont_see_smart_quotes_where_there_are_none(self): ++ utf_8 = b"\343\202\261\343\203\274\343\202\277\343\202\244 Watch" ++ dammit = UnicodeDammit(utf_8) ++ assert dammit.original_encoding.lower() == 'utf-8' ++ assert dammit.unicode_markup.encode("utf-8") == utf_8 ++ ++ def test_ignore_inappropriate_codecs(self): ++ utf8_data = "Räksmörgås".encode("utf-8") ++ dammit = UnicodeDammit(utf8_data, ["iso-8859-8"]) ++ assert dammit.original_encoding.lower() == 'utf-8' ++ ++ def test_ignore_invalid_codecs(self): ++ utf8_data = "Räksmörgås".encode("utf-8") ++ for bad_encoding in ['.utf8', '...', 'utF---16.!']: ++ dammit = UnicodeDammit(utf8_data, [bad_encoding]) ++ assert dammit.original_encoding.lower() == 'utf-8' ++ ++ def test_exclude_encodings(self): ++ # This is UTF-8. ++ utf8_data = "Räksmörgås".encode("utf-8") ++ ++ # But if we exclude UTF-8 from consideration, the guess is ++ # Windows-1252. ++ dammit = UnicodeDammit(utf8_data, exclude_encodings=["utf-8"]) ++ assert dammit.original_encoding.lower() == 'windows-1252' ++ ++ # And if we exclude that, there is no valid guess at all. ++ dammit = UnicodeDammit( ++ utf8_data, exclude_encodings=["utf-8", "windows-1252"]) ++ assert dammit.original_encoding == None ++ ++class TestEncodingDetector(object): ++ ++ def test_encoding_detector_replaces_junk_in_encoding_name_with_replacement_character(self): ++ detected = EncodingDetector( ++ b'') ++ encodings = list(detected.encodings) ++ assert 'utf-\N{REPLACEMENT CHARACTER}' in encodings ++ ++ def test_detect_html5_style_meta_tag(self): ++ ++ for data in ( ++ b'', ++ b"", ++ b"", ++ b""): ++ dammit = UnicodeDammit(data, is_html=True) ++ assert "euc-jp" == dammit.original_encoding ++ ++ def test_last_ditch_entity_replacement(self): ++ # This is a UTF-8 document that contains bytestrings ++ # completely incompatible with UTF-8 (ie. encoded with some other ++ # encoding). ++ # ++ # Since there is no consistent encoding for the document, ++ # Unicode, Dammit will eventually encode the document as UTF-8 ++ # and encode the incompatible characters as REPLACEMENT ++ # CHARACTER. ++ # ++ # If chardet is installed, it will detect that the document ++ # can be converted into ISO-8859-1 without errors. This happens ++ # to be the wrong encoding, but it is a consistent encoding, so the ++ # code we're testing here won't run. ++ # ++ # So we temporarily disable chardet if it's present. ++ doc = b"""\357\273\277 ++\330\250\330\252\330\261 ++\310\322\321\220\312\321\355\344""" ++ chardet = bs4.dammit.chardet_dammit ++ logging.disable(logging.WARNING) ++ try: ++ def noop(str): ++ return None ++ bs4.dammit.chardet_dammit = noop ++ dammit = UnicodeDammit(doc) ++ assert True == dammit.contains_replacement_characters ++ assert "\ufffd" in dammit.unicode_markup ++ ++ soup = BeautifulSoup(doc, "html.parser") ++ assert soup.contains_replacement_characters ++ finally: ++ logging.disable(logging.NOTSET) ++ bs4.dammit.chardet_dammit = chardet ++ ++ def test_byte_order_mark_removed(self): ++ # A document written in UTF-16LE will have its byte order marker stripped. ++ data = b'\xff\xfe<\x00a\x00>\x00\xe1\x00\xe9\x00<\x00/\x00a\x00>\x00' ++ dammit = UnicodeDammit(data) ++ assert "áé" == dammit.unicode_markup ++ assert "utf-16le" == dammit.original_encoding ++ ++ def test_known_definite_versus_user_encodings(self): ++ # The known_definite_encodings are used before sniffing the ++ # byte-order mark; the user_encodings are used afterwards. ++ ++ # Here's a document in UTF-16LE. ++ data = b'\xff\xfe<\x00a\x00>\x00\xe1\x00\xe9\x00<\x00/\x00a\x00>\x00' ++ dammit = UnicodeDammit(data) ++ ++ # We can process it as UTF-16 by passing it in as a known ++ # definite encoding. ++ before = UnicodeDammit(data, known_definite_encodings=["utf-16"]) ++ assert "utf-16" == before.original_encoding ++ ++ # If we pass UTF-18 as a user encoding, it's not even ++ # tried--the encoding sniffed from the byte-order mark takes ++ # precedence. ++ after = UnicodeDammit(data, user_encodings=["utf-8"]) ++ assert "utf-16le" == after.original_encoding ++ assert ["utf-16le"] == [x[0] for x in dammit.tried_encodings] ++ ++ # Here's a document in ISO-8859-8. ++ hebrew = b"\xed\xe5\xec\xf9" ++ dammit = UnicodeDammit(hebrew, known_definite_encodings=["utf-8"], ++ user_encodings=["iso-8859-8"]) ++ ++ # The known_definite_encodings don't work, BOM sniffing does ++ # nothing (it only works for a few UTF encodings), but one of ++ # the user_encodings does work. ++ assert "iso-8859-8" == dammit.original_encoding ++ assert ["utf-8", "iso-8859-8"] == [x[0] for x in dammit.tried_encodings] ++ ++ def test_deprecated_override_encodings(self): ++ # override_encodings is a deprecated alias for ++ # known_definite_encodings. ++ hebrew = b"\xed\xe5\xec\xf9" ++ dammit = UnicodeDammit( ++ hebrew, ++ known_definite_encodings=["shift-jis"], ++ override_encodings=["utf-8"], ++ user_encodings=["iso-8859-8"], ++ ) ++ assert "iso-8859-8" == dammit.original_encoding ++ ++ # known_definite_encodings and override_encodings were tried ++ # before user_encodings. ++ assert ["shift-jis", "utf-8", "iso-8859-8"] == ( ++ [x[0] for x in dammit.tried_encodings] ++ ) ++ ++ def test_detwingle(self): ++ # Here's a UTF8 document. ++ utf8 = ("\N{SNOWMAN}" * 3).encode("utf8") ++ ++ # Here's a Windows-1252 document. ++ windows_1252 = ( ++ "\N{LEFT DOUBLE QUOTATION MARK}Hi, I like Windows!" ++ "\N{RIGHT DOUBLE QUOTATION MARK}").encode("windows_1252") ++ ++ # Through some unholy alchemy, they've been stuck together. ++ doc = utf8 + windows_1252 + utf8 ++ ++ # The document can't be turned into UTF-8: ++ with pytest.raises(UnicodeDecodeError): ++ doc.decode("utf8") ++ ++ # Unicode, Dammit thinks the whole document is Windows-1252, ++ # and decodes it into "☃☃☃“Hi, I like Windows!”☃☃☃" ++ ++ # But if we run it through fix_embedded_windows_1252, it's fixed: ++ fixed = UnicodeDammit.detwingle(doc) ++ assert "☃☃☃“Hi, I like Windows!”☃☃☃" == fixed.decode("utf8") ++ ++ def test_detwingle_ignores_multibyte_characters(self): ++ # Each of these characters has a UTF-8 representation ending ++ # in \x93. \x93 is a smart quote if interpreted as ++ # Windows-1252. But our code knows to skip over multibyte ++ # UTF-8 characters, so they'll survive the process unscathed. ++ for tricky_unicode_char in ( ++ "\N{LATIN SMALL LIGATURE OE}", # 2-byte char '\xc5\x93' ++ "\N{LATIN SUBSCRIPT SMALL LETTER X}", # 3-byte char '\xe2\x82\x93' ++ "\xf0\x90\x90\x93", # This is a CJK character, not sure which one. ++ ): ++ input = tricky_unicode_char.encode("utf8") ++ assert input.endswith(b'\x93') ++ output = UnicodeDammit.detwingle(input) ++ assert output == input ++ ++ def test_find_declared_encoding(self): ++ # Test our ability to find a declared encoding inside an ++ # XML or HTML document. ++ # ++ # Even if the document comes in as Unicode, it may be ++ # interesting to know what encoding was claimed ++ # originally. ++ ++ html_unicode = '' ++ html_bytes = html_unicode.encode("ascii") ++ ++ xml_unicode= '' ++ xml_bytes = xml_unicode.encode("ascii") ++ ++ m = EncodingDetector.find_declared_encoding ++ assert m(html_unicode, is_html=False) is None ++ assert "utf-8" == m(html_unicode, is_html=True) ++ assert "utf-8" == m(html_bytes, is_html=True) ++ ++ assert "iso-8859-1" == m(xml_unicode) ++ assert "iso-8859-1" == m(xml_bytes) ++ ++ # Normally, only the first few kilobytes of a document are checked for ++ # an encoding. ++ spacer = b' ' * 5000 ++ assert m(spacer + html_bytes) is None ++ assert m(spacer + xml_bytes) is None ++ ++ # But you can tell find_declared_encoding to search an entire ++ # HTML document. ++ assert ( ++ m(spacer + html_bytes, is_html=True, search_entire_document=True) ++ == "utf-8" ++ ) ++ ++ # The XML encoding declaration has to be the very first thing ++ # in the document. We'll allow whitespace before the document ++ # starts, but nothing else. ++ assert m(xml_bytes, search_entire_document=True) == "iso-8859-1" ++ assert m(b' ' + xml_bytes, search_entire_document=True) == "iso-8859-1" ++ assert m(b'a' + xml_bytes, search_entire_document=True) is None ++ ++ ++class TestEntitySubstitution(object): ++ """Standalone tests of the EntitySubstitution class.""" ++ def setup_method(self): ++ self.sub = EntitySubstitution ++ ++ def test_simple_html_substitution(self): ++ # Unicode characters corresponding to named HTML entites ++ # are substituted, and no others. ++ s = "foo\u2200\N{SNOWMAN}\u00f5bar" ++ assert self.sub.substitute_html(s) == "foo∀\N{SNOWMAN}õbar" ++ ++ def test_smart_quote_substitution(self): ++ # MS smart quotes are a common source of frustration, so we ++ # give them a special test. ++ quotes = b"\x91\x92foo\x93\x94" ++ dammit = UnicodeDammit(quotes) ++ assert self.sub.substitute_html(dammit.markup) == "‘’foo“”" ++ ++ def test_html5_entity(self): ++ # Some HTML5 entities correspond to single- or multi-character ++ # Unicode sequences. ++ ++ for entity, u in ( ++ # A few spot checks of our ability to recognize ++ # special character sequences and convert them ++ # to named entities. ++ ('⊧', '\u22a7'), ++ ('𝔑', '\U0001d511'), ++ ('≧̸', '\u2267\u0338'), ++ ('¬', '\xac'), ++ ('⫬', '\u2aec'), ++ ++ # We _could_ convert | to &verbarr;, but we don't, because ++ # | is an ASCII character. ++ ('|' '|'), ++ ++ # Similarly for the fj ligature, which we could convert to ++ # fj, but we don't. ++ ("fj", "fj"), ++ ++ # We do convert _these_ ASCII characters to HTML entities, ++ # because that's required to generate valid HTML. ++ ('>', '>'), ++ ('<', '<'), ++ ('&', '&'), ++ ): ++ template = '3 %s 4' ++ raw = template % u ++ with_entities = template % entity ++ assert self.sub.substitute_html(raw) == with_entities ++ ++ def test_html5_entity_with_variation_selector(self): ++ # Some HTML5 entities correspond either to a single-character ++ # Unicode sequence _or_ to the same character plus U+FE00, ++ # VARIATION SELECTOR 1. We can handle this. ++ data = "fjords \u2294 penguins" ++ markup = "fjords ⊔ penguins" ++ assert self.sub.substitute_html(data) == markup ++ ++ data = "fjords \u2294\ufe00 penguins" ++ markup = "fjords ⊔︀ penguins" ++ assert self.sub.substitute_html(data) == markup ++ ++ def test_xml_converstion_includes_no_quotes_if_make_quoted_attribute_is_false(self): ++ s = 'Welcome to "my bar"' ++ assert self.sub.substitute_xml(s, False) == s ++ ++ def test_xml_attribute_quoting_normally_uses_double_quotes(self): ++ assert self.sub.substitute_xml("Welcome", True) == '"Welcome"' ++ assert self.sub.substitute_xml("Bob's Bar", True) == '"Bob\'s Bar"' ++ ++ def test_xml_attribute_quoting_uses_single_quotes_when_value_contains_double_quotes(self): ++ s = 'Welcome to "my bar"' ++ assert self.sub.substitute_xml(s, True) == "'Welcome to \"my bar\"'" ++ ++ def test_xml_attribute_quoting_escapes_single_quotes_when_value_contains_both_single_and_double_quotes(self): ++ s = 'Welcome to "Bob\'s Bar"' ++ assert self.sub.substitute_xml(s, True) == '"Welcome to "Bob\'s Bar""' ++ ++ def test_xml_quotes_arent_escaped_when_value_is_not_being_quoted(self): ++ quoted = 'Welcome to "Bob\'s Bar"' ++ assert self.sub.substitute_xml(quoted) == quoted ++ ++ def test_xml_quoting_handles_angle_brackets(self): ++ assert self.sub.substitute_xml("foo") == "foo<bar>" ++ ++ def test_xml_quoting_handles_ampersands(self): ++ assert self.sub.substitute_xml("AT&T") == "AT&T" ++ ++ def test_xml_quoting_including_ampersands_when_they_are_part_of_an_entity(self): ++ assert self.sub.substitute_xml("ÁT&T") == "&Aacute;T&T" ++ ++ def test_xml_quoting_ignoring_ampersands_when_they_are_part_of_an_entity(self): ++ assert self.sub.substitute_xml_containing_entities("ÁT&T") == "ÁT&T" ++ ++ def test_quotes_not_html_substituted(self): ++ """There's no need to do this except inside attribute values.""" ++ text = 'Bob\'s "bar"' ++ assert self.sub.substitute_html(text) == text +diff --git a/src/3rdparty/chromium/third_party/catapult/third_party/beautifulsoup4/bs4/tests/test_docs.py b/src/3rdparty/chromium/third_party/catapult/third_party/beautifulsoup4/bs4/tests/test_docs.py +index 5b9f67709..0194d6973 100644 +--- a/src/3rdparty/chromium/third_party/catapult/third_party/beautifulsoup4/bs4/tests/test_docs.py ++++ b/src/3rdparty/chromium/third_party/catapult/third_party/beautifulsoup4/bs4/tests/test_docs.py +@@ -1,5 +1,7 @@ + "Test harness for doctests." + ++# TODO: Pretty sure this isn't used and should be deleted. ++ + # pylint: disable-msg=E0611,W0142 + + __metaclass__ = type +diff --git a/src/3rdparty/chromium/third_party/catapult/third_party/beautifulsoup4/bs4/tests/test_element.py b/src/3rdparty/chromium/third_party/catapult/third_party/beautifulsoup4/bs4/tests/test_element.py +new file mode 100644 +index 000000000..6d08ab5d0 +--- /dev/null ++++ b/src/3rdparty/chromium/third_party/catapult/third_party/beautifulsoup4/bs4/tests/test_element.py +@@ -0,0 +1,74 @@ ++"""Tests of classes in element.py. ++ ++The really big classes -- Tag, PageElement, and NavigableString -- ++are tested in separate files. ++""" ++ ++from bs4.element import ( ++ CharsetMetaAttributeValue, ++ ContentMetaAttributeValue, ++ NamespacedAttribute, ++) ++from . import SoupTest ++ ++ ++class TestNamedspacedAttribute(object): ++ ++ def test_name_may_be_none_or_missing(self): ++ a = NamespacedAttribute("xmlns", None) ++ assert a == "xmlns" ++ ++ a = NamespacedAttribute("xmlns", "") ++ assert a == "xmlns" ++ ++ a = NamespacedAttribute("xmlns") ++ assert a == "xmlns" ++ ++ def test_namespace_may_be_none_or_missing(self): ++ a = NamespacedAttribute(None, "tag") ++ assert a == "tag" ++ ++ a = NamespacedAttribute("", "tag") ++ assert a == "tag" ++ ++ def test_attribute_is_equivalent_to_colon_separated_string(self): ++ a = NamespacedAttribute("a", "b") ++ assert "a:b" == a ++ ++ def test_attributes_are_equivalent_if_prefix_and_name_identical(self): ++ a = NamespacedAttribute("a", "b", "c") ++ b = NamespacedAttribute("a", "b", "c") ++ assert a == b ++ ++ # The actual namespace is not considered. ++ c = NamespacedAttribute("a", "b", None) ++ assert a == c ++ ++ # But name and prefix are important. ++ d = NamespacedAttribute("a", "z", "c") ++ assert a != d ++ ++ e = NamespacedAttribute("z", "b", "c") ++ assert a != e ++ ++ ++class TestAttributeValueWithCharsetSubstitution(object): ++ """Certain attributes are designed to have the charset of the ++ final document substituted into their value. ++ """ ++ ++ def test_content_meta_attribute_value(self): ++ # The value of a CharsetMetaAttributeValue is whatever ++ # encoding the string is in. ++ value = CharsetMetaAttributeValue("euc-jp") ++ assert "euc-jp" == value ++ assert "euc-jp" == value.original_value ++ assert "utf8" == value.encode("utf8") ++ assert "ascii" == value.encode("ascii") ++ ++ def test_content_meta_attribute_value(self): ++ value = ContentMetaAttributeValue("text/html; charset=euc-jp") ++ assert "text/html; charset=euc-jp" == value ++ assert "text/html; charset=euc-jp" == value.original_value ++ assert "text/html; charset=utf8" == value.encode("utf8") ++ assert "text/html; charset=ascii" == value.encode("ascii") +diff --git a/src/3rdparty/chromium/third_party/catapult/third_party/beautifulsoup4/bs4/tests/test_formatter.py b/src/3rdparty/chromium/third_party/catapult/third_party/beautifulsoup4/bs4/tests/test_formatter.py +new file mode 100644 +index 000000000..84d4e3b2e +--- /dev/null ++++ b/src/3rdparty/chromium/third_party/catapult/third_party/beautifulsoup4/bs4/tests/test_formatter.py +@@ -0,0 +1,113 @@ ++import pytest ++ ++from bs4.element import Tag ++from bs4.formatter import ( ++ Formatter, ++ HTMLFormatter, ++ XMLFormatter, ++) ++from . import SoupTest ++ ++class TestFormatter(SoupTest): ++ ++ def test_default_attributes(self): ++ # Test the default behavior of Formatter.attributes(). ++ formatter = Formatter() ++ tag = Tag(name="tag") ++ tag['b'] = 1 ++ tag['a'] = 2 ++ ++ # Attributes come out sorted by name. In Python 3, attributes ++ # normally come out of a dictionary in the order they were ++ # added. ++ assert [('a', 2), ('b', 1)] == formatter.attributes(tag) ++ ++ # This works even if Tag.attrs is None, though this shouldn't ++ # normally happen. ++ tag.attrs = None ++ assert [] == formatter.attributes(tag) ++ ++ assert ' ' == formatter.indent ++ ++ def test_sort_attributes(self): ++ # Test the ability to override Formatter.attributes() to, ++ # e.g., disable the normal sorting of attributes. ++ class UnsortedFormatter(Formatter): ++ def attributes(self, tag): ++ self.called_with = tag ++ for k, v in sorted(tag.attrs.items()): ++ if k == 'ignore': ++ continue ++ yield k,v ++ ++ soup = self.soup('

') ++ formatter = UnsortedFormatter() ++ decoded = soup.decode(formatter=formatter) ++ ++ # attributes() was called on the

tag. It filtered out one ++ # attribute and sorted the other two. ++ assert formatter.called_with == soup.p ++ assert '

' == decoded ++ ++ def test_empty_attributes_are_booleans(self): ++ # Test the behavior of empty_attributes_are_booleans as well ++ # as which Formatters have it enabled. ++ ++ for name in ('html', 'minimal', None): ++ formatter = HTMLFormatter.REGISTRY[name] ++ assert False == formatter.empty_attributes_are_booleans ++ ++ formatter = XMLFormatter.REGISTRY[None] ++ assert False == formatter.empty_attributes_are_booleans ++ ++ formatter = HTMLFormatter.REGISTRY['html5'] ++ assert True == formatter.empty_attributes_are_booleans ++ ++ # Verify that the constructor sets the value. ++ formatter = Formatter(empty_attributes_are_booleans=True) ++ assert True == formatter.empty_attributes_are_booleans ++ ++ # Now demonstrate what it does to markup. ++ for markup in ( ++ "", ++ '' ++ ): ++ soup = self.soup(markup) ++ for formatter in ('html', 'minimal', 'xml', None): ++ assert b'' == soup.option.encode(formatter='html') ++ assert b'' == soup.option.encode(formatter='html5') ++ ++ @pytest.mark.parametrize( ++ "indent,expect", ++ [ ++ (None, '\n\ntext\n\n'), ++ (-1, '\n\ntext\n\n'), ++ (0, '\n\ntext\n\n'), ++ ("", '\n\ntext\n\n'), ++ ++ (1, '\n \n text\n \n'), ++ (2, '\n \n text\n \n'), ++ ++ ("\t", '\n\t\n\t\ttext\n\t\n'), ++ ('abc', '\nabc\nabcabctext\nabc\n'), ++ ++ # Some invalid inputs -- the default behavior is used. ++ (object(), '\n \n text\n \n'), ++ (b'bytes', '\n \n text\n \n'), ++ ] ++ ) ++ def test_indent(self, indent, expect): ++ # Pretty-print a tree with a Formatter set to ++ # indent in a certain way and verify the results. ++ soup = self.soup("text") ++ formatter = Formatter(indent=indent) ++ assert soup.prettify(formatter=formatter) == expect ++ ++ # Pretty-printing only happens with prettify(), not ++ # encode(). ++ assert soup.encode(formatter=formatter) != expect ++ ++ def test_default_indent_value(self): ++ formatter = Formatter() ++ assert formatter.indent == ' ' ++ +diff --git a/src/3rdparty/chromium/third_party/catapult/third_party/beautifulsoup4/bs4/tests/test_html5lib.py b/src/3rdparty/chromium/third_party/catapult/third_party/beautifulsoup4/bs4/tests/test_html5lib.py +index 594c3e1f2..b32ab3042 100644 +--- a/src/3rdparty/chromium/third_party/catapult/third_party/beautifulsoup4/bs4/tests/test_html5lib.py ++++ b/src/3rdparty/chromium/third_party/catapult/third_party/beautifulsoup4/bs4/tests/test_html5lib.py +@@ -5,10 +5,10 @@ import warnings + try: + from bs4.builder import HTML5TreeBuilder + HTML5LIB_PRESENT = True +-except ImportError, e: ++except ImportError as e: + HTML5LIB_PRESENT = False + from bs4.element import SoupStrainer +-from bs4.testing import ( ++from . import ( + HTML5TreeBuilderSmokeTest, + SoupTest, + skipIf, +@@ -17,12 +17,12 @@ from bs4.testing import ( + @skipIf( + not HTML5LIB_PRESENT, + "html5lib seems not to be present, not testing its tree builder.") +-class HTML5LibBuilderSmokeTest(SoupTest, HTML5TreeBuilderSmokeTest): ++class TestHTML5LibBuilder(SoupTest, HTML5TreeBuilderSmokeTest): + """See ``HTML5TreeBuilderSmokeTest``.""" + + @property + def default_builder(self): +- return HTML5TreeBuilder() ++ return HTML5TreeBuilder + + def test_soupstrainer(self): + # The html5lib tree builder does not support SoupStrainers. +@@ -30,12 +30,9 @@ class HTML5LibBuilderSmokeTest(SoupTest, HTML5TreeBuilderSmokeTest): + markup = "

A bold statement.

" + with warnings.catch_warnings(record=True) as w: + soup = self.soup(markup, parse_only=strainer) +- self.assertEqual( +- soup.decode(), self.document_for(markup)) ++ assert soup.decode() == self.document_for(markup) + +- self.assertTrue( +- "the html5lib tree builder doesn't support parse_only" in +- str(w[0].message)) ++ assert "the html5lib tree builder doesn't support parse_only" in str(w[0].message) + + def test_correctly_nested_tables(self): + """html5lib inserts tags where other parsers don't.""" +@@ -46,13 +43,13 @@ class HTML5LibBuilderSmokeTest(SoupTest, HTML5TreeBuilderSmokeTest): + 'foo' + '') + +- self.assertSoupEquals( ++ self.assert_soup( + markup, + '
Here\'s another table:' + '
foo
' + '
') + +- self.assertSoupEquals( ++ self.assert_soup( + "" + "" + "
Foo
Bar
Baz
") +@@ -69,17 +66,158 @@ class HTML5LibBuilderSmokeTest(SoupTest, HTML5TreeBuilderSmokeTest): + ''' + soup = self.soup(markup) + # Verify that we can reach the

tag; this means the tree is connected. +- self.assertEqual(b"

foo

", soup.p.encode()) ++ assert b"

foo

" == soup.p.encode() + + def test_reparented_markup(self): + markup = '

foo

\n

bar

' + soup = self.soup(markup) +- self.assertEqual(u"

foo

\n

bar

", soup.body.decode()) +- self.assertEqual(2, len(soup.find_all('p'))) ++ assert "

foo

\n

bar

" == soup.body.decode() ++ assert 2 == len(soup.find_all('p')) + + + def test_reparented_markup_ends_with_whitespace(self): + markup = '

foo

\n

bar

\n' + soup = self.soup(markup) +- self.assertEqual(u"

foo

\n

bar

\n", soup.body.decode()) +- self.assertEqual(2, len(soup.find_all('p'))) ++ assert "

foo

\n

bar

\n" == soup.body.decode() ++ assert 2 == len(soup.find_all('p')) ++ ++ def test_reparented_markup_containing_identical_whitespace_nodes(self): ++ """Verify that we keep the two whitespace nodes in this ++ document distinct when reparenting the adjacent tags. ++ """ ++ markup = '
' ++ soup = self.soup(markup) ++ space1, space2 = soup.find_all(string=' ') ++ tbody1, tbody2 = soup.find_all('tbody') ++ assert space1.next_element is tbody1 ++ assert tbody2.next_element is space2 ++ ++ def test_reparented_markup_containing_children(self): ++ markup = '' ++ soup = self.soup(markup) ++ noscript = soup.noscript ++ assert "target" == noscript.next_element ++ target = soup.find(string='target') ++ ++ # The 'aftermath' string was duplicated; we want the second one. ++ final_aftermath = soup.find_all(string='aftermath')[-1] ++ ++ # The